diff --git a/MODULES b/MODULES index ff2a2a3441302fbac39213fadb212a7cfda964de..20d77dcf0ad3e61255357634944cde81aa339ba0 100644 --- a/MODULES +++ b/MODULES @@ -3,15 +3,7 @@ # WARNING: CDO HAS TO BE ON VERSION 1.9.4 # (If not, conflicts with weekly means computation could appear) -if [[ $BSC_MACHINE == "power" ]]; then - - module unuse /apps/modules/modulefiles/applications - module use /gpfs/projects/bsc32/software/rhel/7.4/ppc64le/POWER9/modules/all/ - - module load CDO/1.9.4-foss-2018b - module load R/3.6.1-foss-2018b - -elif [[ $BSC_MACHINE == "nord3v2" ]]; then +if [[ $BSC_MACHINE == "nord3v2" ]]; then module use /gpfs/projects/bsc32/software/suselinux/11/modules/all module unuse /apps/modules/modulefiles/applications /apps/modules/modulefiles/compilers /apps/modules/modulefiles/tools /apps/modules/modulefiles/libraries /apps/modules/modulefiles/environment @@ -19,11 +11,17 @@ elif [[ $BSC_MACHINE == "nord3v2" ]]; then module load CDO/1.9.8-foss-2019b module load R/4.1.2-foss-2019b module load OpenMPI/4.0.5-GCC-8.3.0-nord3-v2 + module load GEOS + module load GDAL + module load PROJ else module load CDO/1.9.8-foss-2015a module load R/4.1.2-foss-2015a-bare + module load GEOS + module load GDAL + module load PROJ fi diff --git a/tas-tos_scorecards_data_loading.R b/example_scripts/tas-tos_scorecards_data_loading.R similarity index 100% rename from tas-tos_scorecards_data_loading.R rename to example_scripts/tas-tos_scorecards_data_loading.R diff --git a/modules/test_decadal.R b/example_scripts/test_decadal.R similarity index 100% rename from modules/test_decadal.R rename to example_scripts/test_decadal.R diff --git a/modules/test_seasonal.R b/example_scripts/test_seasonal.R similarity index 89% rename from modules/test_seasonal.R rename to example_scripts/test_seasonal.R index 575585dbd01667a7477cd8186e6d140990e23624..1afb9549d6d520103592d4a82c240f54a1220d62 100644 --- a/modules/test_seasonal.R +++ b/example_scripts/test_seasonal.R @@ -22,5 +22,4 @@ probabilities <- compute_probabilities(recipe, data) ## TODO: Fix plotting # save_data(recipe, data, skill_metrics, probabilities) # Plot data -# plot_data(recipe, calibrated_data, skill_metrics, probabilities, -# significance = T) +plot_data(recipe, data, skill_metrics, probabilities, significance = T) diff --git a/modules/Visualization/R/get_proj_code.R b/modules/Visualization/R/get_proj_code.R new file mode 100644 index 0000000000000000000000000000000000000000..ba040c0089b87dc6295751445871a1d97eafaf62 --- /dev/null +++ b/modules/Visualization/R/get_proj_code.R @@ -0,0 +1,22 @@ +# This function returns the crs code for the projection indicated in +# 'proj_name' depending on the version of GDAL and proj.4, to be used +# in the PlotRobinson() function + +get_proj_code <- function(proj_name) { + # Define list with the grs codes for each projection + code_list <- list(robinson = c(54030, "ESRI:53030"), + stereographic = c(3995, "EPSG:3995"), + lambert_europe = c(102014, paste("+proj=lcc +lat_0=30", + "+lon_0=10 +lat_1=43", + "+lat_2=62 +x_0=0 +y_0=0", + "+ellps=intl +units=m", + "+no_defs +type=crs"))) + # Get crs code version depending on GDAL and PROJ version + if ((sf_extSoftVersion()[['GDAL']] < "3.0.0") && + (sf_extSoftVersion()[['PROJ']] < "6.0.0")) { + proj_code <- as.integer(code_list[[proj_name]][1]) + } else { + proj_code <- code_list[[proj_name]][2] + } + return(proj_code) +} diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index e3d75138973205eaf77425618227c90c929e88fa..3263076d5977932169f27e5c94a8230b944acd13 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -12,12 +12,22 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name start_date <- paste0(recipe$Analysis$Time$fcst_year, recipe$Analysis$Time$sdate) + init_month <- as.numeric(substr(recipe$Analysis$Time$sdate, + start = 1, stop = 2)) + if (!is.null(recipe$Analysis$Workflow$Visualization$projection)) { + projection <- tolower(recipe$Analysis$Workflow$Visualization$projection) + } else { + projection <- "cylindrical_equidistant" + } + # Compute ensemble mean ensemble_mean <- s2dv::MeanDims(fcst$data, 'ensemble') # Loop over variable dimension for (var in 1:fcst$dims[['var']]) { variable <- fcst$attrs$Variable$varName[[var]] + var_long_name <- fcst$attrs$Variable$metadata[[variable]]$long_name units <- fcst$attrs$Variable$metadata[[variable]]$units + # Subset ensemble mean by variable var_ens_mean <- ClimProjDiags::Subset(ensemble_mean, along = c("dat", "var", "sday", "sweek"), @@ -36,10 +46,8 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { palette = "RdBu" rev = T } - # Define brks, centered on in the case of anomalies - ## - if (grepl("anomaly", - fcst$attrs$Variable$metadata[[variable]]$long_name)) { + # Define brks, centered around zero in the case of anomalies + if (grepl("anomaly", var_long_name)) { variable <- paste(variable, "anomaly") max_value <- max(abs(var_ens_mean)) ugly_intervals <- seq(-max_value, max_value, max_value/20) @@ -51,31 +59,94 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { options(bitmapType = "cairo") for (i_syear in start_date) { - # Define name of output file and titles - i_var_ens_mean <- var_ens_mean[which(start_date == i_syear), , , ] - outfile <- paste0(outdir[[var]], "forecast_ensemble_mean-", i_syear, ".png") - toptitle <- paste("Forecast Ensemble Mean -", variable, "-", system_name, - "- Initialization:", i_syear) + if (length(start_date) == 1) { + i_var_ens_mean <- var_ens_mean[which(start_date == i_syear), , , ] + outfile <- paste0(outdir[[var]], "forecast_ensemble_mean-", start_date) + } else { + i_var_ens_mean <- var_ens_mean[which(start_date == i_syear), , , ] + outfile <- paste0(outdir[[var]], "forecast_ensemble_mean-", i_syear) + } + toptitle <- paste0(system_name, " / ", str_to_title(var_long_name), + "\n", "Forecast Ensemble Mean / ", "Init.: ", i_syear) months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], label = T, abb = F) - titles <- as.vector(months) - # Plots - PlotLayout(PlotEquiMap, c('longitude', 'latitude'), - i_var_ens_mean, longitude, latitude, - filled.continents = F, - toptitle = toptitle, - title_scale = 0.6, - titles = titles, - units = units, - cols = cols, - brks = brks, - fileout = outfile, - bar_label_digits = 4, - bar_extra_margin = rep(0.7, 4), - bar_label_scale = 1.5, - axes_label_scale = 1.3) + + if (recipe$Analysis$Workflow$Visualization$multi_panel) { + # Define name of output file and titles + titles <- as.vector(months) + # Plots + PlotLayout(PlotEquiMap, c('longitude', 'latitude'), + i_var_ens_mean, longitude, latitude, + filled.continents = F, + toptitle = toptitle, + title_scale = 0.7, + subtitle_scale = 0.8, + subtitle_margin_scale = 2, + titles = titles, + units = units, + cols = cols, + brks = brks, + fileout = paste0(outfile, ".png"), + bar_label_digits = 4, + extra_margin = rep(1, 4), + bar_label_scale = 1.5, + axes_label_scale = 1.1) + } else { + # Define function and parameters depending on projection + if (projection == 'cylindrical_equidistant') { + fun <- PlotEquiMap + base_args <- list(var = NULL, dots = NULL, + lon = longitude, lat = latitude, + dot_symbol = 20, title_scale = 0.6, + filled.continents = F, brks = brks, cols = cols, + bar_label_digits = 4, bar_label_scale = 1.5, + axes_label_scale = 1) + } else { + fun <- PlotRobinson + common_projections <- c("robinson", "stereographic", "lambert_europe") + if (projection %in% common_projections) { + target_proj <- get_proj_code(projection) + } else { + target_proj <- projection + } + base_args <- list(data = NULL, mask = NULL, + lon = longitude, lat = latitude, + lon_dim = 'longitude', lat_dim = 'latitude', + target_proj = target_proj, legend = 's2dv', + style = 'point', brks = brks, cols = cols) + } + # Loop over forecast times + for (i in 1:length(months)) { + # Get forecast time label + forecast_time <- match(months[i], month.name) - init_month + 1 + if (forecast_time < 1) { + forecast_time <- forecast_time + 12 + } + forecast_time <- sprintf("%02d", forecast_time) + # Define plot title + toptitle <- paste(system_name, "/", + str_to_title(var_long_name), + "\n", "Forecast Ensemble Mean /", months[i]) + # Define caption + if (identical(fun, PlotRobinson)) { + ## TODO: Customize technical details + base_args[['caption']] <- + paste0("Nominal start date: ", start_date, "\n", + "Forecast month: ", sprintf("%02d", i), "\n", + "Reference: ", recipe$Analysis$Datasets$Reference) + } + # Modify base arguments + base_args[[1]] <- i_var_ens_mean[i, , ] + fileout <- paste0(outfile, "_ft", sprintf("%02d", i), ".png") + # Plot + do.call(fun, + args = c(base_args, + list(toptitle = toptitle, + fileout = fileout))) + } + } } - info(recipe$Run$logger, - "##### FCST ENSEMBLE MEAN PLOT SAVED TO OUTPUT DIRECTORY #####") } + info(recipe$Run$logger, + "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") } diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index f912e24968a917142cb2270fce9d680f1c2c2e9c..ec3fb3920bff7c5e8a01613ddaccebbc2a1b8ae5 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -14,9 +14,10 @@ plot_most_likely_terciles <- function(recipe, longitude <- fcst$coords$lon archive <- get_archive(recipe) system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name - variable <- recipe$Analysis$Variables$name start_date <- paste0(recipe$Analysis$Time$fcst_year, recipe$Analysis$Time$sdate) + init_month <- as.numeric(substr(recipe$Analysis$Time$sdate, + start = 1, stop = 2)) # Retrieve and rearrange probability bins for the forecast if (is.null(probabilities$probs_fcst$prob_b33) || @@ -37,6 +38,7 @@ plot_most_likely_terciles <- function(recipe, # Drop extra dims, add time dim if missing: for (var in 1:fcst$dims[['var']]) { variable <- fcst$attrs$Variable$varName[[var]] + var_long_name <- fcst$attrs$Variable$metadata[[variable]]$long_name var_probs <- ClimProjDiags::Subset(probs_fcst, along = c("var"), indices = var, @@ -47,35 +49,72 @@ plot_most_likely_terciles <- function(recipe, for (i_syear in start_date) { # Define name of output file and titles i_var_probs <- var_probs[which(start_date == i_syear), , , , ] - outfile <- paste0(outdir[[var]], "forecast_most_likely_tercile-", - i_syear, ".png") - toptitle <- paste("Most Likely Tercile -", variable, "-", system_name, "-", - "Initialization:", i_syear) + outfile <- paste0(outdir[[var]], "forecast_most_likely_tercile-", i_syear) + browser() + toptitle <- paste0(system_name, " / ", str_to_title(var_long_name), + "\n", "Most Likely Tercile / Initialization: ", + i_syear) months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], label = T, abb = F) - ## TODO: Ensure this works for daily and sub-daily cases - titles <- as.vector(months) - # Plots - ## NOTE: PlotLayout() and PlotMostLikelyQuantileMap() are still being worked - ## on. - suppressWarnings( - PlotLayout(PlotMostLikelyQuantileMap, c('bin', 'longitude', 'latitude'), - cat_dim = 'bin', - i_var_probs, longitude, latitude, - coast_width = 1.5, - title_scale = 0.6, - legend_scale = 0.8, #cex_bar_titles = 0.6, - toptitle = toptitle, - titles = titles, - fileout = outfile, - bar_label_digits = 2, - bar_scale = rep(0.7, 4), - bar_label_scale = 1.2, - axes_label_scale = 1.3, - triangle_ends = c(F, F), width = 11, height = 8) - ) + years <- lubridate::year(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ]) + if (recipe$Analysis$Workflow$Visualization$multi_panel) { + ## TODO: Ensure this works for daily and sub-daily cases + titles <- as.vector(months) + # Plots + ## NOTE: PlotLayout() and PlotMostLikelyQuantileMap() are still being worked + ## on. + suppressWarnings( + PlotLayout(PlotMostLikelyQuantileMap, c('bin', 'longitude', 'latitude'), + cat_dim = 'bin', + i_var_probs, longitude, latitude, + coast_width = 1.5, + title_scale = 0.6, + title_margin_scale = 0.7, + subtitle_scale = 1, + legend_scale = 0.8, #cex_bar_titles = 0.6, + toptitle = toptitle, + titles = titles, + fileout = paste0(outfile, ".png"), + bar_label_digits = 2, + bar_scale = rep(0.7, 4), + extra_margin = rep(1, 4), + bar_label_scale = 1.2, + axes_label_scale = 1.1, + triangle_ends = c(F, F)) # , width = 11, height = 8) + ) + } else { + for (i in 1:length(months)) { + # Get forecast time label + forecast_time <- match(months[i], month.name) - init_month + 1 + if (forecast_time < 1) { + forecast_time <- forecast_time + 12 + } + forecast_time <- sprintf("%02d", forecast_time) + # Define plot title + toptitle <- paste0(system_name, " / ", + str_to_title(var_long_name), + "\n", "Most Likely Tercile / ", + months[i], " ", years[i], + " (Init.: ", i_syear, ")") + # Plot + fileout <- paste0(outfile, "_ft", forecast_time, ".png") + PlotMostLikelyQuantileMap(cat_dim = 'bin', + probs = i_var_probs[i, , , ], + lon = longitude, lat = latitude, + coast_width = 1.5, + title_scale = 1, + legend_scale = 0.8, + cex_bar_titles = 0.9, + toptitle = toptitle, + fileout = fileout, + bar_label_digits = 2, + bar_label_scale = 0.7, + axes_label_scale = 1.1, + triangle_ends = c(F, F) , width = 10, height = 8) + } + } } - info(recipe$Run$logger, - "##### MOST LIKELY TERCILE PLOT SAVED TO OUTPUT DIRECTORY #####") } + info(recipe$Run$logger, + "##### MOST LIKELY TERCILE PLOTS SAVED TO OUTPUT DIRECTORY #####") } diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index e62496f6d48cf5cc0c48615de671eda0d92c613e..834cc8cc05578df530afadf799db71383f464cb0 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -1,3 +1,5 @@ +library(stringr) + plot_skill_metrics <- function(recipe, data_cube, skill_metrics, outdir, significance = F) { # recipe: Auto-S2S recipe @@ -7,7 +9,6 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, # outdir: output directory # significance: T/F, whether to display the significance dots in the plots - ## TODO: OPTION for CERISE: Using PuOr # Abort if frequency is daily if (recipe$Analysis$Variables$freq == "daily_mean") { error(recipe$Run$logger, "Visualization functions not yet implemented @@ -29,9 +30,13 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, start = 1, stop = 2)) month_label <- tolower(month.name[init_month]) month_abbreviation <- month.abb[init_month] + if (!is.null(recipe$Analysis$Workflow$Visualization$projection)) { + projection <- tolower(recipe$Analysis$Workflow$Visualization$projection) + } else { + projection <- "cylindrical_equidistant" + } # Define color palette and number of breaks according to output format - ## TODO: Make separate function if (tolower(recipe$Analysis$Output_format) %in% c("scorecards", "cerise")) { diverging_palette <- "purpleorange" sequential_palette <- "Oranges" @@ -45,14 +50,12 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, "enscorr", "rpss_specs", "bss90_specs", "bss10_specs", "enscorr_specs", "rmsss") scores <- c("rps", "frps", "crps", "frps_specs") - # Assign colorbar to each metric type - ## TODO: Triangle ends + # Loop over variables and assign colorbar and plot parameters to each metric for (var in 1:data_cube$dims[['var']]) { var_skill <- lapply(skill_metrics, function(x) { ClimProjDiags::Subset(x, along = 'var', indices = var, drop = 'selected')}) - for (name in c(skill_scores, scores, "mean_bias", "enssprerr")) { if (name %in% names(skill_metrics)) { # Define plot characteristics and metric name to display in plot @@ -122,7 +125,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, # Split skill significance into list of lists, along the time dimension # This allows for plotting the significance dots correctly. skill_significance <- ClimProjDiags::ArrayToList(skill_significance, - dim = 'time', + dim = "time", level = "sublist", names = "dots") } else { @@ -130,40 +133,112 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, } # Define output file name and titles if (tolower(recipe$Analysis$Horizon) == "seasonal") { - outfile <- paste0(outdir[var], name, "-", month_label, ".png") + outfile <- paste0(outdir[var], name, "-", month_label) } else { - outfile <- paste0(outdir[var], name, ".png") + outfile <- paste0(outdir[var], name) } - toptitle <- paste(display_name, "-", data_cube$attrs$Variable$varName[var], - "-", system_name, "-", month_abbreviation, - hcst_period) + # Get months months <- unique(lubridate::month(data_cube$attrs$Dates, label = T, abb = F)) - titles <- as.vector(months) - # Plot - suppressWarnings( - PlotLayout(PlotEquiMap, c('longitude', 'latitude'), - asplit(skill, MARGIN=1), # Splitting array into a list - longitude, latitude, - special_args = skill_significance, - dot_symbol = 20, - toptitle = toptitle, - title_scale = 0.6, - titles = titles, - filled.continents=F, - brks = brks, - cols = cols, - col_inf = col_inf, - col_sup = col_sup, - fileout = outfile, - bar_label_digits = 3, - bar_extra_margin = rep(0.9, 4), - bar_label_scale = 1.5, - axes_label_scale = 1.3) - ) + # Get variable name and long name + var_name <- data_cube$attrs$Variable$varName[[var]] + var_long_name <- data_cube$attrs$Variable$metadata[[var_name]]$long_name + + # Multi-panel or single-panel plots + if (recipe$Analysis$Workflow$Visualization$multi_panel) { + # Define titles + toptitle <- paste0(system_name, " / ", str_to_title(var_long_name), + "\n", display_name, " / ", hcst_period) + titles <- as.vector(months) + ## TODO: Combine PlotLayout with PlotRobinson? + suppressWarnings( + PlotLayout(PlotEquiMap, c('longitude', 'latitude'), + asplit(skill, MARGIN=1), # Splitting array into a list + longitude, latitude, + special_args = skill_significance, + dot_symbol = 20, + toptitle = toptitle, + title_scale = 0.6, + titles = titles, + filled.continents = F, + brks = brks, + cols = cols, + col_inf = col_inf, + col_sup = col_sup, + fileout = paste0(outfile, ".png"), + bar_label_digits = 3, + bar_extra_margin = rep(0.9, 4), + extra_margin = rep(1, 4), + bar_label_scale = 1.5, + axes_label_scale = 1.3) + ) + } else { + # Define function and parameters depending on projection + if (projection == 'cylindrical_equidistant') { + fun <- PlotEquiMap + base_args <- list(var = NULL, dots = NULL, + lon = longitude, lat = latitude, + dot_symbol = 20, title_scale = 0.6, + filled.continents = F, brks = brks, cols = cols, + col_inf = col_inf, col_sup = col_sup, + bar_label_digits = 3, bar_label_scale = 1.5, + axes_label_scale = 1) + } else { + fun <- PlotRobinson + common_projections <- c("robinson", "stereographic", "lambert_europe") + if (projection %in% common_projections) { + target_proj <- get_proj_code(projection) + } else { + target_proj <- projection + } + base_args <- list(data = NULL, mask = NULL, + lon = longitude, lat = latitude, + lon_dim = 'longitude', lat_dim = 'latitude', + target_proj = target_proj, legend = 's2dv', + style = 'point', brks = brks, cols = cols, + col_inf = col_inf, col_sup = col_sup) + } + # Loop over forecast times + for (i in 1:dim(skill)[['time']]) { + # Get forecast time label + forecast_time <- match(months[i], month.name) - init_month + 1 + if (forecast_time < 1) { + forecast_time <- forecast_time + 12 + } + forecast_time <- sprintf("%02d", forecast_time) + # Define plot title + toptitle <- paste(system_name, "/", + str_to_title(var_long_name), + "\n", display_name, "/", months[i], "/", + hcst_period) + # Modify base arguments + base_args[[1]] <- skill[i, , ] + if (!is.null(skill_significance)) { + base_args[[2]] <- skill_significance[[i]][[1]] + significance_caption <- "alpha = 0.05" + } else { + significance_caption <- NULL + } + if (identical(fun, PlotRobinson)) { + ## TODO: Customize alpha and other technical details depending on the metric + base_args[['caption']] <- + paste0("Nominal start date: ", + "1st of ", str_to_title(month_label), "\n", + "Forecast month: ", forecast_time, "\n", + "Reference: ", recipe$Analysis$Datasets$Reference, "\n", + significance_caption) + } + fileout <- paste0(outfile, "_ft", forecast_time, ".png") + # Plot + do.call(fun, + args = c(base_args, + list(toptitle = toptitle, + fileout = fileout))) + } + } } } - info(recipe$Run$logger, - "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") } + info(recipe$Run$logger, + "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") } diff --git a/modules/Visualization/R/tmp/PlotRobinson.R b/modules/Visualization/R/tmp/PlotRobinson.R new file mode 100644 index 0000000000000000000000000000000000000000..6a3412f53d8261748f6daa31171a0096dd99f614 --- /dev/null +++ b/modules/Visualization/R/tmp/PlotRobinson.R @@ -0,0 +1,524 @@ +#'Plot map in Robinson or other projections +#' +#'Transform a regular grid longitude-latitude data to a different projection and +#'plot the map. The target projection must be a valid CRS string, preferrably be +#'EPSG or ESRI code; check \link[sf]{st_crs} for more explanation. This function +#'is mainly tested for Robinson projection (ESRI:54030), but it can work with +#'other projection types in theory.\n +#'The map can be plotted by points or polygon. A legend can be plotted as either +#'a color bar or a discrete ggplot legend. Dots can be drawn on top of the data, +#'which can be used for significance test. A mask can be added to not plot the +#'data specified. A number of options is provided to adjust aesthetics, like +#'position, size, colors, etc. +#' +#'@param data A numeric array with longitude and latitude dimensions. The grid +#' should be regular grid. It can contain NA values. +#'@param lon A numeric vector of longitude locations of the cell centers of the +#' grid of 'data'. Expected to be regularly spaced, within the range of either +#' [-180, 180] or [0, 360]. +#'@param lat A numeric vector of latitude locations of the cell centers of the +#' grid of 'data'. Expected to be regularly spaced, within the range [-90, 90] +#' of ascending or descending order. +#'@param lon_dim A character string indicating the longitude dimension name in +#' 'data'. If it is NULL, the function tries to find the name in +#' \code{s2dv:::.KnownLonNames}. The default value is NULL. +#'@param lat_dim A character string indicating the latitude dimension name in +#' 'data'. If it is NULL, the function tries to find the name in +#' \code{s2dv:::.KnownLatNames}. The default value is NULL. +#'@param target_proj A character string indicating the target projection. It +#' should be a valid crs string. The default projection is Robinson +#' (ESRI:54030). Note that the character string may work differently depending +#' on PROJ and GDAL module version. +#'@param legend A character string indicating the legend style. It can be 's2dv' +#' (color bar by \code{ColorBar()}), 'ggplot2' (discrete legend by ggplot2), or +#' NULL (no legend), +#'@param style A character string indicating the plotting style. It can be +#' 'point' or 'polygon'. The default value is 'point'. Note that 'polygon' may +#' be time- and memory-consuming for global or high-resolution data. +#'@param dots An array with the same dimensions as 'data' of [0, 1] or logical +#' indicating the grids to plot dots. The value 0 or FALSE is the point to be +#' dotted. +#'@param mask An array with the same dimensions as 'data' of [0, 1] or logical +#' indicating the grids to not plot data. The value 0 or FALSE is the point not +#' to be plotted. +#'@param brks,cols,bar_limits,triangle_ends Usually only providing 'brks' is +#' enough to generate the desired color bar. These parameters allow to +#' define n breaks that define n - 1 intervals to classify each of the values +#' in 'data'. The corresponding grid cell of a given value in 'data' will be +#' colored in function of the interval it belongs to. These parameters are +#' sent to \code{ColorBar()} to generate the breaks and colours. Additional +#' colours for values beyond the limits of the colour bar are also generated +#' and applied to the plot if 'bar_limits' or 'brks' and 'triangle_ends' are +#' properly provided to do so. See ?ColorBar for a full explanation. +#'@param col_inf,col_sup,colNA Colour identifiers to color the values that +#' excess the extremes of the color bar and to color NAs, respectively. 'colNA' +#' takes attr(cols, 'na_color') if available by default, where cols is the +#' parameter 'cols' if provided or the vector of colors returned by +#' 'color_fun'. 'col_inf' and 'col_sup' will take the value of 'colNA' if not +#' specified. See ?ColorBar for a full explanation. +#'@param color_fun,bar_extra_margin Set of +#' parameters to control the visual aspect of the drawn colour bar +#' (1/3). See ?ColorBar for a full explanation. +#'@param vertical A logical value indicating the direction of colorbar if +#' parameter 'legend' is 's2dv'. The default value is TRUE. +#'@param toptitle A character string of the top title of the figure, scalable +#' with parameter 'title_size'. +#'@param caption A character string of the caption located at left-bottom of the +#' plot. +#'@param units A character string of the data units, which is the title of the +#' legend. +#'@param crop_coastlines A named numeric vector [lonmin, lonmax, latmin, latmax] +#' indicating the region to plot coastlines. Note that the longitude range +#' cannot exceed 180 degrees. +#'@param point_size A number of the size of the data points if "style = 'point'". +#' The default is 'auto' and the function tries to find the appropriate size. +#'@param title_size A number of the size of the top title. The default is 16. +#'@param dots_size A number of the size of the dots. The default is 0.5. +#'@param dots_shape A number indicating the dot shape recognized by parameter +#' 'shape' in \code{geom_point()}. +#'@param coastlines_width A number indicating the width of the coastlines. +#'@param fileout A character string of the path to save the plot. If not +#' specified (default), a graphic device will pop up. The extension should be +#' accepted by \code{ggsave()}. +#'@param width A number of the plot width, in the units specified in parameter +#' 'size_units'. The default is 8. +#'@param height A number of the plot height, in the units specified in parameter +#' 'size_units'. The default is 4. +#'@param size_units A character string of the units of the size of the device +#' (file or window) to plot in. The default is 'in' (inches). See ?ggsave and +#' ?Devices for details of the corresponding device. +#'@param res Resolution of the device (file or window) to plot in. The default +#' value is 300. See ?ggsave 'dpi' and ?Devices for details of the +#' corresponding device. +#' +#'@return A map plot with speficied projection, either in pop-up window or a +#' saved file. +#' +#'@examples +#'data <- array(rep(seq(-10, 10, length.out = 181), 360) + rnorm(360), +#' dim = c(lat = 181, lon = 360)) +#'dots <- data +#'dots[which(dots < 4 & dots > -4)] <- 0 +#'dots[which(dots != 0)] <- 1 +#'PlotRobinson(data, lon = 0:359, lat = -90:90, dots = dots, +#' brks = seq(-10, 10, length.out = 11), +#' toptitle = 'synthetic example', vertical = F, +#' caption = 'Robinson Global\ns2dv::PlotRobinson example', +#' bar_extra_margin = c(0, 1, 0, 1), width = 8, height = 6) +#'PlotRobinson(data, lon = 0:359, lat = -90:90, mask = dots, legend = 'ggplot2', +#' target_proj = "+proj=moll", brks = seq(-10, 10, length.out = 11), +#' color_fun = clim.palette("purpleorange"), colNA = 'green', +#' toptitle = 'synthetic example', +#' caption = 'Mollweide Global\ns2dv::PlotRobinson example', +#' width = 8, height = 6) +#' +#'@import sf ggplot2 rnaturalearth cowplot +#'@export +PlotRobinson <- function(data, lon, lat, lon_dim = NULL, lat_dim = NULL, + target_proj = 54030, legend = 's2dv', style = 'point', + dots = NULL, mask = NULL, brks = NULL, cols = NULL, bar_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, colNA = NULL, + color_fun = clim.palette(), bar_extra_margin = rep(0, 4), vertical = TRUE, + toptitle = NULL, caption = NULL, units = NULL, crop_coastlines = NULL, + point_size = "auto", title_size = 16, dots_size = 0.5, + dots_shape = 47, coastlines_width = 0.3, + fileout = NULL, width = 8, height = 4, size_units = "in", + res = 300) { + + # Sanity check + # data + data <- drop(data) + if (length(dim(data)) != 2) { + stop("Parameter 'data' must have two dimensions.") + } + dims <- dim(data) + # lon, lon_dim + if (is.null(lon_dim)) { + lon_dim <- names(dims)[names(dims) %in% .KnownLonNames()] + if (identical(lon_dim, character(0))) { + stop("Cannot find known longitude name in data dimension. Please define parameter 'lon_dim'.") + } + } + if (is.unsorted(lon)) { + .warning("Parameter 'lon' should be sorted to guarantee the correct result.") + } + # lat, lat_dim + if (is.null(lat_dim)) { + lat_dim <- names(dims)[names(dims) %in% .KnownLatNames()] + if (identical(lat_dim, character(0))) { + stop("Cannot find known latitude name in data dimension. Please define parameter 'lat_dim'.") + } + } + if (!all(names(dims) %in% c(lat_dim, lon_dim))) { + stop("Dimensions names in paramter 'data' should match 'lat_dim' and 'lon_dim.") + } + if (length(lon) != dims[lon_dim]) { + stop("Length of parameter 'lon' should match longitude dimension in 'data'.") + } + if (length(lat) != dims[lat_dim]) { + stop("Length of parameter 'lat' should match latitude dimension in 'data'.") + } + data <- s2dv::Reorder(data, c(lon_dim, lat_dim)) + # Make lat always from 90 to -90 + sort_lat <- FALSE + if (!is.unsorted(lat)) { + lat <- rev(lat) + data <- ClimProjDiags::Subset(data, along = lat_dim, indices = seq(length(lat), 1, -1)) + sort_lat <- TRUE + } + + # original_proj: it can only be regular grid now + original_proj <- st_crs(4326) + # tartget_proj + if (is.null(target_proj)) { + stop("Parameter 'target_proj' cannot be NULL.") + } else { + target_proj_tmp <- st_crs(target_proj) + if (is.na(target_proj_tmp)) { + .warning(paste0("Try ESRI code: ESRI:", target_proj)) + target_proj <- st_crs(paste0("ESRI:", target_proj)) + } else { + target_proj <- target_proj_tmp + } + } + + # legend + if (!is.null(legend) && (!legend %in% c('s2dv', 'ggplot2'))) { + stop("Parameter 'legend' must be NULL, ggplot2 or s2dv.") + } + # style + if (!style %in% c('point', 'polygon') || length(style) != 1) { + stop("Parameter 'style' must be 'point' or 'polygon'.") + } + if (style == 'polygon') { + # polygon is slow for global map (and may be wrong) Confirm if users want to proceed + if ((abs(diff(range(lon))) > 350 & abs(diff(range(lat))) > 175) | + (prod(dim(data)) >= (180 * 360))) { + if (!isTRUE(utils::askYesNo("The region seems to be global and style 'polygon' is chosen. It may be time- and memory-consuming to plot the map. Are you sure that you want to continue?"))) { + return(invisible()) + } + } + } + # dots + if (!is.null(dots)) { + dots <- drop(dots) + if (any(!names(dim(dots)) %in% c(lon_dim, lat_dim))) { + stop("Parameter 'dots' must have two dimensions named as longitude and latitude dimensions in 'data'.") + } else { + dots <- Reorder(dots, c(lon_dim, lat_dim)) + } + if (!identical(dim(dots), dim(data))) { + stop("Parameter 'dots' must have the same dimensions as 'data'.") + } else if (is.numeric(dots)) { + if (all(dots %in% c(0, 1))) { + dots <- array(as.logical(dots), dim = dim(dots)) + } else { + stop("Parameter 'dots' must have only TRUE/FALSE or 0/1.") + } + } else if (is.logical(dots)) { + if (!all(dots %in% c(T, F))) { + stop("Parameter 'dots' must have only TRUE/FALSE or 0/1.") + } + } else { + stop("Parameter 'dots' must be a logical or numerical array.") + } + } + # mask + if (!is.null(mask)) { + mask <- drop(mask) + if (any(!names(dim(mask)) %in% c(lon_dim, lat_dim))) { + stop("Parameter 'mask' must have two dimensions named as longitude and latitude dimensions in 'data'.") + } else { + mask <- Reorder(mask, c(lon_dim, lat_dim)) + } + if (!identical(dim(mask), dim(data))) { + stop("Parameter 'mask' must have the same dimensions as 'data'.") + } else if (is.numeric(mask)) { + if (all(mask %in% c(0, 1))) { + mask <- array(as.logical(mask), dim = dim(mask)) + } else { + stop("Parameter 'mask' must have only TRUE/FALSE or 0/1.") + } + } else if (is.logical(mask)) { + if (!all(mask %in% c(T, F))) { + stop("Parameter 'mask' must have only TRUE/FALSE or 0/1.") + } + } else { + stop("Parameter 'mask' must be a logical or numerical array.") + } + } + + # Color bar + ## Check: brks, cols, bar_limits, color_fun, bar_extra_margin, units + ## Build: brks, cols, bar_limits, col_inf, col_sup + var_limits <- c(min(data, na.rm = TRUE), max(data, na.rm = TRUE)) + colorbar <- ColorBar(brks = brks, cols = cols, vertical = vertical, subsampleg = NULL, + bar_limits = bar_limits, var_limits = var_limits, triangle_ends = triangle_ends, + col_inf = col_inf, col_sup = col_sup, color_fun = color_fun, + plot = FALSE, draw_ticks = TRUE, draw_separators = FALSE, + triangle_ends_scale = 1, extra_labels = NULL, + title = units, title_scale = 1, # units_scale + label_scale = 1, tick_scale = 1, #bar_tick_scale + extra_margin = bar_extra_margin, label_digits = 4) + brks <- colorbar$brks + cols <- colorbar$cols + col_inf <- colorbar$col_inf + col_sup <- colorbar$col_sup + bar_limits <- c(head(brks, 1), tail(brks, 1)) + # colNA + if (is.null(colNA)) { + if ('na_color' %in% names(attributes(cols))) { + colNA <- attr(cols, 'na_color') + if (!.IsColor(colNA)) { + stop("The 'na_color' provided as attribute of the colour vector must be a valid colour identifier.") + } + } else { + colNA <- 'pink' + } + } else if (!.IsColor(colNA)) { + stop("Parameter 'colNA' must be a valid colour identifier.") + } + # toptitle + if (!is.null(toptitle) && !is.character(toptitle)) { + stop("Parameter 'toptitle' must be a character string.") + } + # caption + if (!is.null(caption) && !is.character(caption)) { + stop("Parameter 'caption' must be a character string.") + } + # crop_coastlines + if (!is.null(crop_coastlines)) { + # if crop_coastlines doesn't have name, [lonmin, lonmax, latmin, latmax] + if (is.null(names(crop_coastlines))) { + names(crop_coastlines) <- c("lonmin", "lonmax", "latmin", "latmax") + } else if (!identical(sort(names(crop_coastlines)), sort(c("latmax", "latmin", "lonmax", "lonmin")))) { + stop("Parameter 'crop_coastlines' needs to have names 'latmax', 'latmin', 'lonmax', 'lonmin'.") + } + } + + # point_size + if (point_size == 'auto') { + # 360x181 with default setting, 0.05 + point_size <- round(0.05 * (360 * 181) / (length(lon) * length(lat)), 2) + } else if (!is.numeric(point_size)) { + stop("Parameter 'point_size' must be a number.") + } + # + +#================================================================= + + # Adapt s2dv ColorBar parameters to ggplot plot + # If legend is NULL, still tune with s2dv legend way + if (is.null(legend) || legend == 's2dv') { + # the colorbar triangle color. If it is NULL (no triangle plotted), use colNA + col_inf_image <- ifelse(is.null(col_inf), colNA, col_inf) + col_sup_image <- ifelse(is.null(col_sup), colNA, col_sup) + cols_ggplot <- c(col_inf_image, cols, col_sup_image) + # Add brks to triangles + brks_ggplot <- c(min(data, na.rm = T), brks, max(data, na.rm = T)) + + } else { # ggplot2 legend + brks_ggplot <- brks + cols_ggplot <- cols + } + + # Build data dataframe + lonlat_df <- data.frame(lon = rep(as.vector(lon), length(lat)), + lat = sort(rep(as.vector(lat), length(lon)), decreasing = TRUE)) + data_df <- lonlat_df %>% + dplyr::mutate(dat = as.vector(data)) + + # Remove the points where mask = FALSE + if (!is.null(mask)) { + if (sort_lat) { + mask <- ClimProjDiags::Subset(mask, along = lat_dim, indices = seq(length(lat), 1, -1)) + } + mask_df <- data.frame(lon = rep(as.vector(lon), length(lat)), + lat = sort(rep(as.vector(lat), length(lon)), decreasing = TRUE), + mask = as.vector(mask)) + data_df <- data_df[mask_df$mask == TRUE, ] + lonlat_df <- data_df[, 1:2] + } + + #NOTE: if target_proj = "ESRI:54030", Nord3v2 has different behavior from hub and ws!! + data_df <- st_as_sf(data_df, coords = c("lon", "lat"), crs = original_proj) + data_df <- st_transform(data_df, crs = target_proj) + data_df <- data_df %>% + dplyr::mutate(long = st_coordinates(data_df)[, 1], + lat = st_coordinates(data_df)[, 2]) + + # Re-project dots + if (!is.null(dots)) { + if (sort_lat) { + dots <- ClimProjDiags::Subset(dots, along = lat_dim, indices = seq(length(lat), 1, -1)) + } + dots_df <- data.frame(lon = rep(as.vector(lon), length(lat)), + lat = sort(rep(as.vector(lat), length(lon)), decreasing = TRUE), + dot = as.vector(dots)) + + dots_df <- st_as_sf(dots_df, coords = c("lon", "lat"), crs = original_proj) + dots_df <- st_transform(dots_df, crs = target_proj) + dots_df <- dots_df %>% + dplyr::mutate(long = st_coordinates(dots_df)[, 1], + lat = st_coordinates(dots_df)[, 2]) + dots_df <- subset(dots_df, dot == FALSE) + } + + # coastlines + coastlines <- rnaturalearth::ne_coastline(scale = "medium", returnclass = "sf") + ## crop the coastlines to the desired range + if (!is.null(crop_coastlines)) { + suppressWarnings({ + coastlines <- st_crop(coastlines, + xmin = as.numeric(crop_coastlines['lonmin']), + xmax = as.numeric(crop_coastlines['lonmax']), + ymin = as.numeric(crop_coastlines['latmin']), + ymax = as.numeric(crop_coastlines['latmax'])) + }) + } + coastlines <- st_transform(coastlines, crs = target_proj) + + if (style == 'polygon') { + # Calculate polygon points from regular lat/lon + #NOTE: The original grid must be regular grid with same space + d_lon <- abs(lon[2] - lon[1]) / 2 + d_lat <- abs(lat[2] - lat[1]) / 2 + lon_poly <- lat_poly <- rep(NA, 4 * dim(lonlat_df)[1]) + for (ii in 1:dim(lonlat_df)[1]) { + lon_poly[(ii*4-3):(ii*4)] <- c(lonlat_df$lon[ii] - d_lon, lonlat_df$lon[ii] + d_lon, + lonlat_df$lon[ii] + d_lon, lonlat_df$lon[ii] - d_lon) + lat_poly[(ii*4-3):(ii*4)] <- c(lonlat_df$lat[ii] - d_lat, lonlat_df$lat[ii] - d_lat, + lonlat_df$lat[ii] + d_lat, lonlat_df$lat[ii] + d_lat) + } + # # To prevent out-of-global lon + # lon_poly[which(lon_poly >= 180)] <- 179.9 + # lon_poly[which(lon_poly < -180)] <- -180 + # To prevent out-of-global lat + lat_poly[which(lat_poly > 90)] <- 90 + lat_poly[which(lat_poly < -90)] <- -90 + + lonlat_df <- data.frame(lon = lon_poly, lat = lat_poly) + # Transfer lon/lat to projection + proj_lonlat <- st_as_sf(lonlat_df, coords = c("lon", "lat"), crs = original_proj) + #NOTE: if target_proj = "ESRI:54030", on Nord3v2, st_transform has lon and lat swapped! + proj_lonlat <- st_transform(proj_lonlat, crs = target_proj) + lonlat_df_proj <- st_coordinates(proj_lonlat) + + # Use id to create groups for each polygon + ids <- factor(paste0("id_", 1:dim(data_df)[1])) + values <- data.frame(id = ids, + value = data_df$dat) + positions <- data.frame(id = rep(ids, each = 4), + x = lonlat_df_proj[, 1], + y = lonlat_df_proj[, 2]) + datapoly <- merge(values, positions, by = "id") + datapoly <- st_as_sf(datapoly, coords = c("x", "y"), crs = target_proj) + datapoly <- datapoly %>% + dplyr::group_by(id) %>% + dplyr::summarise() %>% #NOTE: VERY SLOW if plot global + dplyr::mutate(value = values[order(values$id), ]$value) %>% + st_cast("POLYGON") %>% + st_convex_hull() # maintain outer polygen (no bowtie shape) + } + + # Plots + if (style == 'polygon') { + res_p <- ggplot(data = data_df) + #NOTE: must be data_df? + geom_sf(data = datapoly, + aes(col = cut(value, breaks = brks_ggplot, include.lowest = T), + fill = cut(value, breaks = brks_ggplot, include.lowest = T))) + } else if (style == 'point') { + res_p <- ggplot(data = data_df) + + geom_point(aes(x = long, y = lat, + col = cut(dat, breaks = brks_ggplot, include.lowest = T)), + #NOTE: These two lines make point size vary with lat + #size = point_size / (data_df$lat / min(data_df$lat))) + + #size = (sort(rep(as.vector(lat), length(lon))) / max(lat)) * point_size) + + size = point_size) + } + res_p <- res_p + + geom_sf(data = coastlines, colour ='black', size = coastlines_width) + + # Remove background grid and lat/lon label; add white background + theme_void() + theme(plot.background = element_rect(fill = 'white', colour = 'white')) + + # crop the projection + coord_sf(xlim = range(data_df$long), ylim = range(data_df$lat), + expand = TRUE, datum = target_proj) + + if (!is.null(dots)) { + res_p <- res_p + geom_point(data = dots_df, aes(x = long, y = lat), + shape = dots_shape, size = dots_size) + #NOTE: This line makes point size vary with lat + #size = dots_size / (dots_df$lat / min(dots_df$lat))) + } + + if (identical(legend, 'ggplot2')) { + if (style == 'polygon') { + res_p <- res_p + scale_colour_manual(values = cols_ggplot, + aesthetics = c("colour", "fill"), + drop = FALSE, na.value = colNA) + + guides(fill = guide_legend(title = units, override.aes = list(size = 1)), + color = "none") + } else if (style == 'point') { + res_p <- res_p + scale_colour_manual(values = cols_ggplot, + drop = FALSE, na.value = colNA) + + guides(colour = guide_legend(title = units, override.aes = list(size = 1))) + } + + } else { # s2dv or NULL + if (style == 'polygon') { + res_p <- res_p + scale_colour_manual(values = cols_ggplot, + aesthetics = c("colour", "fill"), + drop = FALSE, na.value = colNA) + } else if (style == 'point') { + res_p <- res_p + scale_colour_manual(values = cols_ggplot, + drop = FALSE, na.value = colNA) + } + # Remove ggplot legend + res_p <- res_p + theme(legend.position = "none", plot.margin = margin(0.5, 0, 0, 0, 'cm')) + } + + if (!is.null(toptitle)) { + res_p <- res_p + ggtitle(toptitle) + + theme(plot.title = element_text(size = title_size, hjust = 0.5, vjust = 3)) + } + if (!is.null(caption)) { + res_p <- res_p + labs(caption = caption) + + theme(plot.caption = element_text(hjust = 0, vjust = 1, margin = margin(0, 0, 0, 0, 'cm'))) + } + + # s2dv legend fun to put in cowplot::plot_grid + if (identical(legend, 's2dv')) { + fun_legend <- function() { + if (vertical) { + par(mar = c(7.1, 2.2, 7.1, 3.1), mgp = c(3, 1, 0)) + } else { + par(mar = c(1.1, 1.2, 0.1, 1.1), mgp = c(3, 1, 0)) + } + ColorBar(brks = brks, cols = cols, vertical = vertical, subsampleg = NULL, + bar_limits = bar_limits, var_limits = var_limits, triangle_ends = triangle_ends, + col_inf = col_inf, col_sup = col_sup, color_fun = color_fun, + plot = TRUE, draw_ticks = TRUE, draw_separators = FALSE, + triangle_ends_scale = 1, extra_labels = NULL, + title = units, title_scale = 1, # units_scale + label_scale = 1, tick_scale = 1, #bar_tick_scale + extra_margin = bar_extra_margin, label_digits = 4) + } + if (vertical) { + res_p <- cowplot::plot_grid(res_p, fun_legend, rel_widths = c(6, 1)) + } else { + res_p <- cowplot::plot_grid(res_p, fun_legend, rel_heights = c(5, 1), ncol = 1) + } + res_p <- res_p + theme(plot.background = element_rect(fill = "white", colour = "white")) + } + + if (!is.null(fileout)) { + ext <- regmatches(fileout, regexpr("[a-zA-Z0-9]*$", fileout)) + ggsave(fileout, res_p, width = width, height = height, dpi = res, units = size_units, + device = ext) + } else { # pop-up window + dev.new(units = size_units, res = res, width = width, height = height) + res_p + } + +} + diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 446400550cfac32aa8f3bdb918398245465ed10a..03a4d5a25926a21bdb1adc3b815eeb7fc539c226 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -5,6 +5,9 @@ ## TODO: Decadal plot names source("modules/Visualization/R/plot_skill_metrics.R") +source("modules/Visualization/R/get_proj_code.R") +## TODO: Remove after the next s2dv release +source("modules/Visualization/R/tmp/PlotRobinson.R") source("modules/Visualization/R/plot_most_likely_terciles_map.R") source("modules/Visualization/R/plot_ensemble_mean.R") diff --git a/recipes/atomic_recipes/recipe_system5c3s-tas.yml b/recipes/atomic_recipes/recipe_system5c3s-tas.yml index c4606d59c61edeae34fa4cdd677f6bff00e05e9e..b0f171e4ae6ac7f190f5f7e2547ae3a78e92faf1 100644 --- a/recipes/atomic_recipes/recipe_system5c3s-tas.yml +++ b/recipes/atomic_recipes/recipe_system5c3s-tas.yml @@ -13,11 +13,11 @@ Analysis: Reference: name: ERA5 Time: - sdate: '0601' + sdate: '0301' fcst_year: '2020' hcst_start: '1993' hcst_end: '2006' - ftime_min: 1 + ftime_min: 2 ftime_max: 3 Region: latmin: -10 @@ -36,19 +36,20 @@ Analysis: method: raw save: fcst_only Skill: - metric: RPSS_specs BSS90_specs EnsCorr_specs FRPS_specs FRPSS_specs BSS10_specs FRPS + metric: FRPS save: all Probabilities: percentiles: [[1/3, 2/3]] save: all Visualization: plots: skill_metrics forecast_ensemble_mean - + multi_panel: no + projection: robinson Indicators: index: no Output_format: S2S4E Run: Loglevel: INFO Terminal: yes - output_dir: /esarchive/scratch/vagudets/repos/auto-s2s/out-logs/ + output_dir: /esarchive/scratch/vagudets/auto-s2s-outputs/ code_dir: /esarchive/scratch/vagudets/repos/auto-s2s/ diff --git a/recipes/atomic_recipes/recipe_system7c3s-tas.yml b/recipes/atomic_recipes/recipe_system7c3s-tas.yml index e5cd2abaf0bb84decf9c5430743055a95f57c03b..c30ed71cca32f4c4cbf84373344e0f5526a352e0 100644 --- a/recipes/atomic_recipes/recipe_system7c3s-tas.yml +++ b/recipes/atomic_recipes/recipe_system7c3s-tas.yml @@ -15,15 +15,15 @@ Analysis: Time: sdate: '1101' fcst_year: '2020' - hcst_start: '1993' + hcst_start: '2000' hcst_end: '2010' ftime_min: 1 ftime_max: 2 Region: - latmin: -10 - latmax: 10 - lonmin: 0 - lonmax: 20 + latmin: 30 + latmax: 70 + lonmin: -20 + lonmax: 40 Regrid: method: bilinear type: to_system @@ -36,13 +36,15 @@ Analysis: method: mse_min save: 'none' # 'all'/'none'/'exp_only'/'fcst_only' Skill: - metric: RPS RPSS CRPS CRPSS FRPSS BSS10 BSS90 EnsCorr Corr mean_bias mean_bias_SS + metric: BSS10 BSS90 save: 'all' # 'all'/'none' Probabilities: - percentiles: [[1/3, 2/3], [1/10, 9/10], [1/4, 2/4, 3/4]] + percentiles: [[1/3, 2/3], [1/10, 9/10]] save: 'percentiles_only' # 'all'/'none'/'bins_only'/'percentiles_only' Visualization: plots: skill_metrics, forecast_ensemble_mean, most_likely_terciles + multi_panel: no + projection: lambert_europe Indicators: index: no ncores: 10 diff --git a/recipes/atomic_recipes/recipe_tas_global.yml b/recipes/atomic_recipes/recipe_tas_global.yml new file mode 100644 index 0000000000000000000000000000000000000000..969e150d4483932f7755112ca3e284e68907da99 --- /dev/null +++ b/recipes/atomic_recipes/recipe_tas_global.yml @@ -0,0 +1,57 @@ +Description: + Author: V. Agudetse + +Analysis: + Horizon: Seasonal + Variables: + name: tas + freq: monthly_mean + Datasets: + System: + name: Meteo-France-System7 + Multimodel: False + Reference: + name: ERA5 + Time: + sdate: '1101' + fcst_year: '2020' + hcst_start: '2000' + hcst_end: '2010' + ftime_min: 1 + ftime_max: 2 + Region: + latmin: -90 + latmax: 90 + lonmin: 0 + lonmax: 359.9 + Regrid: + method: bilinear + type: to_system + Workflow: + Anomalies: + compute: yes # yes/no, default yes + cross_validation: yes # yes/no, default yes + save: 'all' # 'all'/'none'/'exp_only'/'fcst_only' + Calibration: + method: mse_min + save: 'none' # 'all'/'none'/'exp_only'/'fcst_only' + Skill: + metric: RPSS BSS10 BSS90 EnsCorr mean_bias mean_bias_SS + save: 'all' # 'all'/'none' + Probabilities: + percentiles: [[1/3, 2/3], [1/10, 9/10]] + save: 'percentiles_only' # 'all'/'none'/'bins_only'/'percentiles_only' + Visualization: + plots: skill_metrics, forecast_ensemble_mean, most_likely_terciles + multi_pannel: no + projection: robinson + Indicators: + index: no + ncores: 10 + remove_NAs: yes + Output_format: S2S4E +Run: + Loglevel: INFO + Terminal: yes + output_dir: /esarchive/scratch/vagudets/auto-s2s-outputs/ + code_dir: /esarchive/scratch/vagudets/repos/auto-s2s/ diff --git a/tests/recipes/recipe-decadal_monthly_1.yml b/tests/recipes/recipe-decadal_monthly_1.yml index a2849c272a0e0c0da75e2af5ecb83347af9a0ce8..ce44751d1589e5b8d016c532da20174b796adfd5 100644 --- a/tests/recipes/recipe-decadal_monthly_1.yml +++ b/tests/recipes/recipe-decadal_monthly_1.yml @@ -46,6 +46,8 @@ Analysis: index: FALSE Visualization: plots: skill_metrics most_likely_terciles forecast_ensemble_mean + multi_panel: yes + projection: cylindrical_equidistant ncores: # Optional, int: number of cores, defaults to 1 remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE Output_format: S2S4E diff --git a/tests/recipes/recipe-decadal_monthly_2.yml b/tests/recipes/recipe-decadal_monthly_2.yml index b0892a0c899da38bb605ec9127e2d78c22861d41..8f2e8111ff91588c0c38d75bfa26fa4ca87ec5f5 100644 --- a/tests/recipes/recipe-decadal_monthly_2.yml +++ b/tests/recipes/recipe-decadal_monthly_2.yml @@ -46,6 +46,8 @@ Analysis: index: FALSE Visualization: plots: most_likely_terciles skill_metrics forecast_ensemble_mean + multi_panel: yes + projection: cylindrical_equidistant ncores: # Optional, int: number of cores, defaults to 1 remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE Output_format: S2S4E diff --git a/tests/recipes/recipe-seasonal_monthly_1.yml b/tests/recipes/recipe-seasonal_monthly_1.yml index 21321fab7a0de8d347ce0b38d188979bba3897ac..0edf4795764bf101cd3bd67de17e66065fc8602a 100644 --- a/tests/recipes/recipe-seasonal_monthly_1.yml +++ b/tests/recipes/recipe-seasonal_monthly_1.yml @@ -45,6 +45,8 @@ Analysis: index: no Visualization: plots: skill_metrics most_likely_terciles forecast_ensemble_mean + multi_panel: yes + projection: cylindrical_equidistant Output_format: S2S4E Run: Loglevel: INFO diff --git a/tools/check_recipe.R b/tools/check_recipe.R index a9170707d0ce9b0ebc5ab7410737ce6b33af893e..abd0c8e2a24a9b311526a502103444557096f3d7 100644 --- a/tools/check_recipe.R +++ b/tools/check_recipe.R @@ -341,7 +341,7 @@ check_recipe <- function(recipe) { if ("Visualization" %in% names(recipe$Analysis$Workflow)) { PLOT_OPTIONS <- c("skill_metrics", "forecast_ensemble_mean", "most_likely_terciles") - ## Separate plots parameter and check if all elements are in PLOT_OPTIONS + # Separate plots parameter and check if all elements are in PLOT_OPTIONS if (is.null(recipe$Analysis$Workflow$Visualization$plots)) { error(recipe$Run$logger, "The 'plots' element must be defined under 'Visualization'.") @@ -356,6 +356,23 @@ check_recipe <- function(recipe) { error_status <- T } } + # Check multi_panel option + if ((is.null(recipe$Analysis$Workflow$Visualization$multi_panel)) || + (!is.logical(recipe$Analysis$Workflow$Visualization$multi_panel))) { + error(recipe$Run$logger, + paste0("The 'multi_panel' element must be defined under ", + "Visualization, the options are 'yes' or 'no'.")) + error_status <- T + } + # Check projection + if (is.null(recipe$Analysis$Workflow$Visualization$projection)) { + warn(recipe$Run$logger, + paste0("No projection has been specified for the plots, the ", + "default projection is cylindrical equidistant.")) + ## TODO: This will not work + recipe$Analysis$Workflow$Visualization$projection <- "cylindrical_equidistant" + } + ## TODO: Add significance? } # --------------------------------------------------------------------- diff --git a/tools/libs.R b/tools/libs.R index 7a6b550f0eedb0c0146394bd7344ad11a6208bfe..e1179e8dc948372f18ba95a6909f48340f509df3 100644 --- a/tools/libs.R +++ b/tools/libs.R @@ -6,29 +6,20 @@ library(multiApply) library(yaml) library(s2dv) library(abind) -# library(s2dverification) -# library(ncdf4) -# library(easyVerification) library(easyNCDF) library(CSTools) library(lubridate) library(PCICt) library(RColorBrewer) library(configr) -# -# library(parallel) +library(sf) +library(ggplot2) +library(rnaturalearth) +library(cowplot) +library(stringr) library(pryr) # To check mem usage. -#setwd("/esarchive/scratch/nperez/git/S2S4E-backend-BSC/data-analysis/") -# source('export_2_nc.R') -# source('S2S/s2s.filefmt.R') -# source('s2s.calib.R') -# -# source("s2s_tools.R") -# source("Calibration_fcst4.R") -# source("R_Reorder.R") -# source("R_CST_MergeDims.R") -#setwd(recipe$Run$code_dir) -# # To be removed when new package is done by library(CSOperational) + +## To be removed when new package is done by library(CSOperational) source("tools/check_recipe.R") source("tools/prepare_outputs.R") source("tools/divide_recipe.R")