diff --git a/modules/Calibration/Calibration.R b/modules/Calibration/Calibration.R index ec4aabf25b211ed374ba4a840beb5fe042ca5c66..59e5451a437d2ed414c6c7fd9060f300ed9bd029 100644 --- a/modules/Calibration/Calibration.R +++ b/modules/Calibration/Calibration.R @@ -1,5 +1,5 @@ -calibrate_datasets <- function(data, recipe) { +calibrate_datasets <- function(recipe, data) { # Function that calibrates the hindcast using the method stated in the # recipe. If the forecast is not null, it calibrates it as well. # @@ -9,9 +9,9 @@ calibrate_datasets <- function(data, recipe) { method <- tolower(recipe$Analysis$Workflow$Calibration$method) if (method == "raw") { - warning("The Calibration module has been called, but the calibration ", - "method in the recipe is 'raw'. The hcst and fcst will not be ", - "calibrated.") + warn(recipe$Run$logger, "The Calibration module has been called, + but the calibration method in the recipe is 'raw'. + The hcst and fcst will not be calibrated.") fcst_calibrated <- data$fcst hcst_calibrated <- data$hcst CALIB_MSG <- "##### NO CALIBRATION PERFORMED #####" @@ -49,8 +49,9 @@ calibrate_datasets <- function(data, recipe) { ## TODO: implement other calibration methods ## TODO: Restructure the code? if (!(method %in% CST_CALIB_METHODS)) { - stop("Calibration method in the recipe is not available for monthly", - " data.") + error(recipe$Run$logger, "Calibration method in the recipe is not + available for monthly data.") + stop() } else { ## Alba's version of CST_Calibration (pending merge) is being used # Calibrate the hindcast @@ -85,8 +86,10 @@ calibrate_datasets <- function(data, recipe) { } else if (recipe$Analysis$Variables$freq == "daily_mean") { # Daily data calibration using Quantile Mapping if (!(method %in% c("qmap"))) { - stop("Calibration method in the recipe is not available at daily ", - "frequency. Only quantile mapping 'qmap' is implemented.") + error(recipe$Run$logger, "Calibration method in the recipe is not + available for daily data. Only quantile mapping 'qmap is + implemented.") + stop() } # Calibrate the hindcast dim_order <- names(dim(data$hcst$data)) @@ -120,7 +123,6 @@ calibrate_datasets <- function(data, recipe) { } } } -print(CALIB_MSG) - ## TODO: Return observations too? + info(recipe$Run$logger, CALIB_MSG) return(list(hcst = hcst_calibrated, fcst = fcst_calibrated)) } diff --git a/modules/Loading/Loading.R b/modules/Loading/Loading.R index a93be8cad6c4406664e94ce8c13246b96e14eeef..f78bd144d23022e5be5a13e7c35851865156b724 100644 --- a/modules/Loading/Loading.R +++ b/modules/Loading/Loading.R @@ -5,24 +5,8 @@ source("modules/Loading/dates2load.R") source("modules/Loading/check_latlon.R") source("tools/libs.R") -# RECIPE FOR TESTING -# -------------------------------------------------------------------------------- -# recipe_file <- "modules/Loading/testing_recipes/recipe_3.yml" -# recipe_file <- "modules/Loading/testing_recipes/recipe_2.yml" -# recipe_file <- "modules/Loading/testing_recipes/recipe_1.yml" -load_datasets <- function(recipe_file) { - - recipe <- read_yaml(recipe_file) - recipe$filepath <- recipe_file - recipe$name <- tools::file_path_sans_ext(basename(recipe_file)) - - ## TODO: this should come from the main script - # Create output folder and log: - logger <- prepare_outputs(recipe = recipe) - folder <- logger$foldername - log_file <- logger$logname - logger <- logger$logger +load_datasets <- function(recipe) { # ------------------------------------------- # Set params ----------------------------------------- @@ -40,7 +24,8 @@ load_datasets <- function(recipe_file) { store.freq <- recipe$Analysis$Variables$freq # get sdates array - sdates <- dates2load(recipe, logger) + ## LOGGER: Change dates2load to extract logger from recipe? + sdates <- dates2load(recipe, recipe$Run$logger) idxs <- NULL idxs$hcst <- get_timeidx(sdates$hcst, @@ -298,17 +283,39 @@ load_datasets <- function(recipe_file) { # Check for consistency between hcst and obs grid if (!(recipe$Analysis$Regrid$type == 'none')) { if (!identical(as.vector(hcst$lat), as.vector(obs$lat))) { - stop("hcst and obs don't share the same latitude.") + lat_error_msg <- paste("Latitude mismatch between hcst and obs.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lat_error_msg) + hcst_lat_msg <- paste0("First hcst lat: ", hcst$lat[1], + "; Last hcst lat: ", hcst$lat[length(hcst$lat)]) + info(recipe$Run$logger, hcst_lat_msg) + obs_lat_msg <- paste0("First obs lat: ", obs$lat[1], + "; Last obs lat: ", obs$lat[length(obs$lat)]) + info(recipe$Run$logger, obs_lat_msg) + stop("hcst and obs don't share the same latitudes.") } if (!identical(as.vector(hcst$lon), as.vector(obs$lon))) { - stop("hcst and obs don't share the same longitude.") + lon_error_msg <- paste("Longitude mismatch between hcst and obs.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lon_error_msg) + hcst_lon_msg <- paste0("First hcst lon: ", hcst$lon[1], + "; Last hcst lon: ", hcst$lon[length(hcst$lon)]) + info(recipe$Run$logger, hcst_lon_msg) + obs_lon_msg <- paste0("First obs lon: ", obs$lon[1], + "; Last obs lon: ", obs$lon[length(obs$lon)]) + info(recipe$Run$logger, obs_lon_msg) + stop("hcst and obs don't share the same longitudes.") + } } # Remove negative values in accumulative variables dictionary <- read_yaml("conf/variable-dictionary.yml") if (dictionary$vars[[variable]]$accum) { - info(logger, "Accumulated variable: setting negative values to zero.") + info(recipe$Run$logger, + "Accumulated variable: setting negative values to zero.") obs$data[obs$data < 0] <- 0 hcst$data[hcst$data < 0] <- 0 if (!is.null(fcst)) { @@ -324,7 +331,7 @@ load_datasets <- function(recipe_file) { attr(hcst$Variable, "variable")$units) && (attr(obs$Variable, "variable")$units == "m s-1")) { - info(logger, "Converting precipitation from m/s to mm/day.") + info(recipe$Run$logger, "Converting precipitation from m/s to mm/day.") obs$data <- obs$data*84000*1000 attr(obs$Variable, "variable")$units <- "mm/day" hcst$data <- hcst$data*84000*1000 @@ -337,13 +344,14 @@ load_datasets <- function(recipe_file) { } # Print a summary of the loaded data for the user, for each object - data_summary(hcst, store.freq) - data_summary(obs, store.freq) + data_summary(hcst, recipe) + data_summary(obs, recipe) if (!is.null(fcst)) { - data_summary(fcst, store.freq) + data_summary(fcst, recipe) } - print("##### DATA LOADING COMPLETED SUCCESSFULLY #####") + info(recipe$Run$logger, + "##### DATA LOADING COMPLETED SUCCESSFULLY #####") ############################################################################ # @@ -360,7 +368,7 @@ load_datasets <- function(recipe_file) { # freq.obs,"obs.grid","/",variable,"_",obs.NA_dates,".nc") # #if (any(is.na(hcst))){ - # fatal(logger, + # fatal(recipe$Run$logger, # paste(" ERROR: MISSING HCST VALUES FOUND DURING LOADING # ", # " ################################################# ", # " ###### MISSING FILES #### ", @@ -374,7 +382,7 @@ load_datasets <- function(recipe_file) { #} # #if (any(is.na(obs)) && !identical(obs.NA_dates,character(0))){ - # fatal(logger, + # fatal(recipe$logger, # paste(" ERROR: MISSING OBS VALUES FOUND DURING LOADING # ", # " ################################################# ", # " ###### MISSING FILES #### ", @@ -387,7 +395,7 @@ load_datasets <- function(recipe_file) { # quit(status=1) #} # - #info(logger, + #info(recipe$logger, # "######### DATA LOADING COMPLETED SUCCESFULLY ##############") ############################################################################ diff --git a/modules/Loading/Loading_decadal.R b/modules/Loading/Loading_decadal.R index 9c4bb33dbf9805c3719c7003997fa6e9b1a70f7a..e9f8b2748ce5b439a9f1a5827dd17d85f3800087 100644 --- a/modules/Loading/Loading_decadal.R +++ b/modules/Loading/Loading_decadal.R @@ -14,17 +14,12 @@ source("tools/libs.R") ## TODO: Remove once the fun is included in CSTools source("tools/tmp/as.s2dv_cube.R") - #==================================================================== # recipe_file <- "modules/Loading/testing_recipes/recipe_decadal.yml" # recipe_file <- "modules/Loading/testing_recipes/recipe_decadal_daily.yml" -load_datasets <- function(recipe_file) { - - recipe <- read_yaml(recipe_file) - recipe$filepath <- recipe_file - recipe$name <- tools::file_path_sans_ext(basename(recipe_file)) +load_datasets <- function(recipe) { archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive @@ -33,11 +28,7 @@ load_datasets <- function(recipe_file) { ## TODO: this should come from the main script # Create output folder and log: - logger <- prepare_outputs(recipe = recipe) - folder <- logger$foldername - log_file <- logger$logname - logger <- logger$logger - + #------------------------- # Read from recipe: #------------------------- @@ -173,7 +164,9 @@ load_datasets <- function(recipe_file) { # dim(hcst) should be [dat, var, sday, sweek, syear, time, latitude, longitude, ensemble] dim(hcst) <- c(dim(hcst)[1:2], sday = 1, sweek = 1, dim(hcst)[3:7]) if (!identical(dim(tmp_time_attr), dim(hcst)[c('syear', 'time')])) { - stop("hcst has problem in matching data and time attr dimension.") + error(recipe$Run$logger, + "hcst has problem in matching data and time attr dimension.") + stop() } dim(attr(hcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(tmp_time_attr)) @@ -245,7 +238,9 @@ load_datasets <- function(recipe_file) { # dim(fcst) should be [dat, var, sday, sweek, syear, time, latitude, longitude, ensemble] dim(fcst) <- c(dim(fcst)[1:2], sday = 1, sweek = 1, dim(fcst)[3:7]) if (!identical(dim(tmp_time_attr), dim(fcst)[c('syear', 'time')])) { - stop("fcst has problem in matching data and time attr dimension.") + error(recipe$Run$logger, + "fcst has problem in matching data and time attr dimension.") + stop() } dim(attr(fcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(tmp_time_attr)) @@ -261,7 +256,9 @@ load_datasets <- function(recipe_file) { # Only syear could be different if (!identical(dim(hcst$data)[-5], dim(fcst$data)[-5])) { - stop("hcst and fcst do not share the same dimension structure.") + error(recipe$Run$logger, + "hcst and fcst do not share the same dimension structure.") + stop() } } else { @@ -349,7 +346,9 @@ load_datasets <- function(recipe_file) { # Only ensemble dim could be different if (!identical(dim(obs), dim(hcst$data)[-9])) { - stop("obs and hcst dimensions do not match.") + error(recipe$Run$logger, + "obs and hcst dimensions do not match.") + stop() } # Add ensemble dim to obs dim(obs) <- c(dim(obs), ensemble = 1) @@ -365,45 +364,106 @@ load_datasets <- function(recipe_file) { #------------------------------------------- # dimension if (any(!names(dim(obs$data)) %in% names(dim(hcst$data)))) { - stop("hcst and obs don't share the same dimension names.") + error(recipe$Run$logger, + "hcst and obs don't share the same dimension names.") + stop() } else { ens_ind <- which(names(dim(obs$data)) == 'ensemble') match_ind <- match(names(dim(obs$data))[-ens_ind], names(dim(hcst$data))) - if (!all(dim(hcst$data)[match_ind] == dim(obs$data)[-ens_ind])) stop("hcst and obs don't share the same dimension length.") + if (!all(dim(hcst$data)[match_ind] == dim(obs$data)[-ens_ind])) { + error(recipe$Run$logger, + "hcst and obs don't share the same dimension length.") + stop() + } } # time attribute if (!identical(format(hcst$Dates$start, '%Y%m'), - format(obs$Dates$start, '%Y%m'))) - stop("hcst and obs don't share the same time.") + format(obs$Dates$start, '%Y%m'))) { + error(recipe$Run$logger, + "hcst and obs don't share the same time.") + stop() + } # lat and lon attributes - if (!identical(as.vector(hcst$lat), - as.vector(obs$lat))) - stop("hcst and obs don't share the same latitude.") - if (!identical(as.vector(hcst$lon), - as.vector(obs$lon))) - stop("hcst and obs don't share the same longitude.") + if (!(recipe$Analysis$Regrid$type == 'none')) { + if (!identical(as.vector(hcst$lat), as.vector(obs$lat))) { + lat_error_msg <- paste("Latitude mismatch between hcst and obs.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lat_error_msg) + hcst_lat_msg <- paste0("First hcst lat: ", hcst$lat[1], + "; Last hcst lat: ", hcst$lat[length(hcst$lat)]) + info(recipe$Run$logger, hcst_lat_msg) + obs_lat_msg <- paste0("First obs lat: ", obs$lat[1], + "; Last obs lat: ", obs$lat[length(obs$lat)]) + info(recipe$Run$logger, obs_lat_msg) + stop("hcst and obs don't share the same latitudes.") + } + + if (!identical(as.vector(hcst$lon), as.vector(obs$lon))) { + lon_error_msg <- paste("Longitude mismatch between hcst and obs.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lon_error_msg) + hcst_lon_msg <- paste0("First hcst lon: ", hcst$lon[1], + "; Last hcst lon: ", hcst$lon[length(hcst$lon)]) + info(recipe$Run$logger, hcst_lon_msg) + obs_lon_msg <- paste0("First obs lon: ", obs$lon[1], + "; Last obs lon: ", obs$lon[length(obs$lon)]) + info(recipe$Run$logger, obs_lon_msg) + stop("hcst and obs don't share the same longitudes.") + } + } # Check fcst if (!is.null(fcst)) { # dimension if (any(!names(dim(fcst$data)) %in% names(dim(hcst$data)))) { - stop("hcst and fcst don't share the same dimension names.") + error(recipe$Run$logger, + "hcst and fcst don't share the same dimension names.") + stop() } else { ens_ind <- which(names(dim(fcst$data)) %in% c('ensemble', 'syear')) match_ind <- match(names(dim(fcst$data))[-ens_ind], names(dim(hcst$data))) - if (!all(dim(hcst$data)[match_ind] == dim(fcst$data)[-ens_ind])) - stop("hcst and fcst don't share the same dimension length.") + if (!all(dim(hcst$data)[match_ind] == dim(fcst$data)[-ens_ind])) { + error(recipe$Run$logger, + "hcst and fcst don't share the same dimension length.") + stop() + } } # lat and lon attributes - if (!identical(as.vector(hcst$lat), - as.vector(fcst$lat))) - stop("hcst and fcst don't share the same latitude.") - if (!identical(as.vector(hcst$lon), - as.vector(fcst$lon))) - stop("hcst and fcst don't share the same longitude.") + if (!(recipe$Analysis$Regrid$type == 'none')) { + if (!identical(as.vector(hcst$lat), as.vector(fcst$lat))) { + lat_error_msg <- paste("Latitude mismatch between hcst and fcst.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lat_error_msg) + hcst_lat_msg <- paste0("First hcst lat: ", hcst$lat[1], + "; Last hcst lat: ", hcst$lat[length(hcst$lat)]) + info(recipe$Run$logger, hcst_lat_msg) + fcst_lat_msg <- paste0("First fcst lat: ", fcst$lat[1], + "; Last fcst lat: ", fcst$lat[length(fcst$lat)]) + info(recipe$Run$logger, fcst_lat_msg) + stop("hcst and fcst don't share the same latitudes.") + } + + if (!identical(as.vector(hcst$lon), as.vector(fcst$lon))) { + lon_error_msg <- paste("Longitude mismatch between hcst and fcst.", + "Please check the original grids and the", + "regrid parameters in your recipe.") + error(recipe$Run$logger, lon_error_msg) + hcst_lon_msg <- paste0("First hcst lon: ", hcst$lon[1], + "; Last hcst lon: ", hcst$lon[length(hcst$lon)]) + info(recipe$Run$logger, hcst_lon_msg) + fcst_lon_msg <- paste0("First fcst lon: ", fcst$lon[1], + "; Last fcst lon: ", fcst$lon[length(fcst$lon)]) + info(recipe$Run$logger, fcst_lon_msg) + stop("hcst and fcst don't share the same longitudes.") + } + } + } @@ -413,7 +473,8 @@ load_datasets <- function(recipe_file) { # Remove negative values in accumulative variables dictionary <- read_yaml("conf/variable-dictionary.yml") if (dictionary$vars[[variable]]$accum) { - info(logger, " Accumulated variable: setting negative values to zero.") + info(recipe$Run$logger, + " Accumulated variable: setting negative values to zero.") obs$data[obs$data < 0] <- 0 hcst$data[hcst$data < 0] <- 0 if (!is.null(fcst)) { @@ -428,7 +489,8 @@ load_datasets <- function(recipe_file) { attr(hcst$Variable, "variable")$units) && (attr(obs$Variable, "variable")$units == "m s-1")) { - info(logger, "Converting precipitation from m/s to mm/day.") + info(recipe$Run$logger, + "Converting precipitation from m/s to mm/day.") obs$data <- obs$data*84000*1000 attr(obs$Variable, "variable")$units <- "mm/day" hcst$data <- hcst$data*84000*1000 @@ -445,13 +507,14 @@ load_datasets <- function(recipe_file) { #------------------------------------------- # Print a summary of the loaded data for the user, for each object - data_summary(hcst, store.freq) - data_summary(obs, store.freq) + data_summary(hcst, recipe) + data_summary(obs, recipe) if (!is.null(fcst)) { - data_summary(fcst, store.freq) + data_summary(fcst, recipe) } - print("##### DATA LOADING COMPLETED SUCCESSFULLY #####") + info(recipe$Run$logger, + "##### DATA LOADING COMPLETED SUCCESSFULLY #####") return(list(hcst = hcst, fcst = fcst, obs = obs)) diff --git a/modules/Loading/testing_recipes/recipe_test-logging.yml b/modules/Loading/testing_recipes/recipe_test-logging.yml new file mode 100644 index 0000000000000000000000000000000000000000..372f6d83bdd31b8258bd28aa42237c95a8111ff3 --- /dev/null +++ b/modules/Loading/testing_recipes/recipe_test-logging.yml @@ -0,0 +1,47 @@ +Description: + Author: V. Agudetse + Info: Light recipe to raise some errors/warnings and test the logging system + +Analysis: + Horizon: Seasonal + Variables: + name: tas + freq: monthly_mean + Datasets: + System: + name: system7c3s + Multimodel: False + Reference: + name: era5 + Time: + sdate: '1101' + fcst_year: '2020' + hcst_start: '1993' + hcst_end: '1996' + ftime_min: 1 + ftime_max: 1 + Region: + latmin: -10 + latmax: 10 + lonmin: 0 + lonmax: 20 + Regrid: + method: bilinear + type: to_system + Workflow: + Calibration: + method: qmap + Skill: + metric: mean_bias bias_SS + Probabilities: + percentiles: + Indicators: + index: no + ncores: 1 + remove_NAs: yes + Output_format: S2S4E +Run: + Loglevel: INFO + Terminal: yes + output_dir: /esarchive/scratch/vagudets/repos/auto-s2s/out-logs/ + code_dir: /esarchive/scratch/vagudets/repos/auto-s2s/ diff --git a/modules/Saving/Saving.R b/modules/Saving/Saving.R index 713741fb6435165f42d7d9a5b0dc3686c5edd857..ed0933f2a6c053b0e2a51880a072213789310292 100644 --- a/modules/Saving/Saving.R +++ b/modules/Saving/Saving.R @@ -2,10 +2,11 @@ source("modules/Saving/paths2save.R") -save_data <- function(recipe, archive, data, +save_data <- function(recipe, data, calibrated_data = NULL, skill_metrics = NULL, - probabilities = NULL) { + probabilities = NULL, + archive = NULL) { # Wrapper for the saving functions. # recipe: The auto-s2s recipe @@ -19,14 +20,20 @@ save_data <- function(recipe, archive, data, if (is.null(recipe)) { stop("The 'recipe' parameter is mandatory.") } - if (is.null(archive)) { - stop("The 'archive' parameter is mandatory.") - } + if (is.null(data)) { stop("The 'data' parameter is mandatory. It should be the output of", "load_datasets().") } - + if (is.null(archive)) { + if (tolower(recipe$Analysis$Horizon) == "seasonal") { + archive <- read_yaml(paste0(recipe$Run$code_dir, + "conf/archive.yml"))$archive + } else if (tolower(recipe$Analysis$Horizon) == "decadal") { + archive <- read_yaml(paste0(recipe$Run$code_dir, + "conf/archive_decadal.yml"))$archive + } + } dict <- read_yaml("conf/variable-dictionary.yml") # Create output directory @@ -208,7 +215,8 @@ save_forecast <- function(data_cube, leadtimes <- as.numeric(dates - init_date)/3600 syears <- seq(1:dim(data_cube$data)['syear'][[1]]) - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) # expect dim = [sday = 1, sweek = 1, syear, time] + # expect dim = [sday = 1, sweek = 1, syear, time] + syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions fcst <- ClimProjDiags::Subset(data_cube$data, 'syear', i, drop = T) @@ -247,8 +255,9 @@ save_forecast <- function(data_cube, # Select start date if (fcst.horizon == 'decadal') { - #NOTE: Not good to use data_cube$load_parameters$dat1 because decadal data has been reshaped -# fcst.sdate <- format(as.Date(data_cube$Dates$start[i]), '%Y%m%d') + ## NOTE: Not good to use data_cube$load_parameters$dat1 because decadal + ## data has been reshaped + # fcst.sdate <- format(as.Date(data_cube$Dates$start[i]), '%Y%m%d') # init_date is like "1990-11-01" init_date <- as.POSIXct(init_date) @@ -282,7 +291,7 @@ save_forecast <- function(data_cube, ArrayToNc(vars, outfile) } } - print("##### FCST SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, "##### FCST SAVED TO NETCDF FILE #####") } @@ -330,7 +339,8 @@ save_observations <- function(data_cube, leadtimes <- as.numeric(dates - init_date)/3600 syears <- seq(1:dim(data_cube$data)['syear'][[1]]) - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) # expect dim = [sday = 1, sweek = 1, syear, time] + ## expect dim = [sday = 1, sweek = 1, syear, time] + syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions fcst <- ClimProjDiags::Subset(data_cube$data, 'syear', i, drop = T) @@ -417,11 +427,11 @@ save_observations <- function(data_cube, ArrayToNc(vars, outfile) } } - print("##### OBS SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, "##### OBS SAVED TO NETCDF FILE #####") } ## TODO: Place inside a function somewhere -# if (tolower(agg) == "country"){ +# if (tolower(agg) == "country") { # load(mask.path) # grid <- europe.countries.iso # } else { @@ -534,7 +544,7 @@ save_metrics <- function(skill, vars <- c(vars, skill) ArrayToNc(vars, outfile) } - print("##### SKILL METRICS SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, "##### SKILL METRICS SAVED TO NETCDF FILE #####") } save_corr <- function(skill, @@ -641,7 +651,8 @@ save_corr <- function(skill, vars <- c(vars, skill) ArrayToNc(vars, outfile) } - print("##### ENSEMBLE CORRELATION SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, + "##### ENSEMBLE CORRELATION SAVED TO NETCDF FILE #####") } save_percentiles <- function(percentiles, @@ -740,7 +751,7 @@ save_percentiles <- function(percentiles, vars <- c(vars, percentiles) ArrayToNc(vars, outfile) } - print("##### PERCENTILES SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, "##### PERCENTILES SAVED TO NETCDF FILE #####") } save_probabilities <- function(probs, @@ -786,7 +797,8 @@ save_probabilities <- function(probs, leadtimes <- as.numeric(dates - init_date)/3600 syears <- seq(1:dim(data_cube$data)['syear'][[1]]) - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) # expect dim = [sday = 1, sweek = 1, syear, time] + ## expect dim = [sday = 1, sweek = 1, syear, time] + syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions probs_syear <- lapply(probs, ClimProjDiags::Subset, 'syear', i, drop = 'selected') @@ -848,5 +860,5 @@ save_probabilities <- function(probs, ArrayToNc(vars, outfile) } } - print("##### PROBABILITIES SAVED TO NETCDF FILE #####") + info(recipe$Run$logger, "##### PROBABILITIES SAVED TO NETCDF FILE #####") } diff --git a/modules/Saving/paths2save.R b/modules/Saving/paths2save.R index 70f6cc9243a6465d452d9d375e6f95bafccc8682..f48ebe7b058969568bee2c14d47394de18664c0e 100644 --- a/modules/Saving/paths2save.R +++ b/modules/Saving/paths2save.R @@ -39,7 +39,7 @@ get_dir <- function(recipe, agg = "global") { ## TODO: Get aggregation from recipe ## TODO: Add time frequency - outdir <- recipe$Run$output_dir + outdir <- paste0(recipe$Run$output_dir, "/outputs/") variable <- recipe$Analysis$Variables$name if (!is.null(recipe$Analysis$Time$fcst_year)) { if (tolower(recipe$Analysis$Horizon) == 'decadal') { diff --git a/modules/Skill/Skill.R b/modules/Skill/Skill.R index 253553ca38e23d5485dd947fae6e2474b2140719..99f12346c21e98e87c4e78efcaf688c0bb37ee41 100644 --- a/modules/Skill/Skill.R +++ b/modules/Skill/Skill.R @@ -51,7 +51,7 @@ source("modules/Skill/tmp/AbsBiasSS.R") # " running Skill module ", "\n", # " it can call ", metric_fun )) -compute_skill_metrics <- function(exp, obs, recipe) { +compute_skill_metrics <- function(recipe, exp, obs) { # exp: s2dv_cube containing the hindcast # obs: s2dv_cube containing the observations # recipe: auto-s2s recipe as provided by read_yaml @@ -184,7 +184,9 @@ compute_skill_metrics <- function(exp, obs, recipe) { metric_name <- (strsplit(metric, "_"))[[1]][1] # Get metric name if (!(metric_name %in% c('frpss', 'frps', 'bss10', 'bss90', 'enscorr', 'rpss'))) { - stop("Some of the requested metrics are not available.") + ## TODO: Test this scenario + warn(recipe$Run$logger, + "Some of the requested metrics are not available.") } capture.output( skill <- Compute_verif_metrics(exp$data, obs$data, @@ -201,11 +203,11 @@ compute_skill_metrics <- function(exp, obs, recipe) { skill_metrics[[ metric ]] <- skill } } - print("##### SKILL METRIC COMPUTATION COMPLETE #####") + info(recipe$Run$logger, "##### SKILL METRIC COMPUTATION COMPLETE #####") return(skill_metrics) } -compute_probabilities <- function(data, recipe) { +compute_probabilities <- function(recipe, data) { if (is.null(recipe$Analysis$ncores)) { ncores <- 1 @@ -222,8 +224,9 @@ compute_probabilities <- function(data, recipe) { named_probs <- list() named_quantiles <- list() if (is.null(recipe$Analysis$Workflow$Probabilities$percentiles)) { - stop("Quantiles and probability bins have been requested, but no ", - "thresholds are provided in the recipe.") + error(recipe$Run$logger, "Quantiles and probability bins have been + requested, but no thresholds are provided in the recipe.") + stop() } else { for (element in recipe$Analysis$Workflow$Probabilities$percentiles) { # Parse thresholds in recipe @@ -259,10 +262,12 @@ compute_probabilities <- function(data, recipe) { named_probs <- lapply(named_probs, function(x) {.drop_dims(x)}) named_quantiles <- lapply(named_quantiles, function(x) {.drop_dims(x)}) } - print("##### PERCENTILES AND PROBABILITY CATEGORIES COMPUTED #####") + info(recipe$Run$logger, + "##### PERCENTILES AND PROBABILITY CATEGORIES COMPUTED #####") return(list(probs=named_probs, percentiles=named_quantiles)) } +## TODO: Replace with ClimProjDiags::Subset .drop_dims <- function(metric_array) { # Drop all singleton dimensions metric_array <- drop(metric_array) diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index a4450d14e45e241dd7e540b55aab242423693154..ff0e9fd44d8e71b831819271d51c7f1374e20068 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -7,11 +7,11 @@ source("modules/Visualization/tmp/PlotCombinedMap.R") ## TODO: Add param 'raw'? plot_data <- function(recipe, - archive, data, calibrated_data = NULL, skill_metrics = NULL, probabilities = NULL, + archive = NULL, significance = F) { # Try to produce and save several basic plots. @@ -32,6 +32,16 @@ plot_data <- function(recipe, "that can be plotted.") } + if (is.null(archive)) { + if (tolower(recipe$Analysis$Horizon) == "seasonal") { + archive <- read_yaml(paste0(recipe$Run$code_dir, + "conf/archive.yml"))$archive + } else if (tolower(recipe$Analysis$Horizon) == "decadal") { + archive <- read_yaml(paste0(recipe$Run$code_dir, + "conf/archive_decadal.yml"))$archive + } + } + # Plot skill metrics if (!is.null(skill_metrics)) { plot_skill_metrics(recipe, archive, data$hcst, skill_metrics, outdir, @@ -42,8 +52,8 @@ plot_data <- function(recipe, if (!is.null(calibrated_data$fcst)) { plot_ensemble_mean(recipe, archive, calibrated_data$fcst, outdir) } else if (!is.null(data$fcst)) { - warning("Only the uncalibrated forecast was provided. Using this data ", - "to plot the forecast ensemble mean.") + warn(recipe$Run$logger, "Only the uncalibrated forecast was provided. + Using this data to plot the forecast ensemble mean.") plot_ensemble_mean(recipe, archive, data$fcst, outdir) } @@ -52,8 +62,8 @@ plot_data <- function(recipe, plot_most_likely_terciles(recipe, archive, calibrated_data$fcst, probabilities$percentiles, outdir) } else if ((!is.null(probabilities)) && (!is.null(data$fcst))) { - warning("Only the uncalibrated forecast was provided. Using this data ", - "to plot the most likely terciles.") + warn(recipe$Run$logger, "Only the uncalibrated forecast was provided. + Using this data to plot the most likely terciles.") plot_most_likely_terciles(recipe, archive, data$fcst, probabilities$percentiles, outdir) } @@ -67,7 +77,7 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, stop("Visualization functions not yet implemented for daily data.") } # Abort if skill_metrics is not list - if (!is.list(skill_metrics)) { + if (!is.list(skill_metrics) || is.null(names(skill_metrics))) { stop("The element 'skill_metrics' must be a list of named arrays.") } @@ -179,7 +189,8 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, } } - print("##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") + info(recipe$Run$logger, + "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") } plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { @@ -252,7 +263,8 @@ plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { bar_label_digits = 4) } - print("##### FCST ENSEMBLE MEAN PLOT SAVED TO OUTPUT DIRECTORY #####") + info(recipe$Run$logger, + "##### FCST ENSEMBLE MEAN PLOT SAVED TO OUTPUT DIRECTORY #####") } plot_most_likely_terciles <- function(recipe, archive, @@ -336,5 +348,6 @@ plot_most_likely_terciles <- function(recipe, archive, ) } - print("##### MOST LIKELY TERCILE PLOT SAVED TO OUTPUT DIRECTORY #####") + info(recipe$Run$logger, + "##### MOST LIKELY TERCILE PLOT SAVED TO OUTPUT DIRECTORY #####") } diff --git a/modules/test_decadal.R b/modules/test_decadal.R index 01cf2d9201191eab287ab5629a520a01261f73a2..80304f978052889c19eccdf85ba9f1e99488dfe8 100644 --- a/modules/test_decadal.R +++ b/modules/test_decadal.R @@ -6,25 +6,25 @@ source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") recipe_file <- "modules/Loading/testing_recipes/recipe_decadal.yml" -recipe <- read_yaml(recipe_file) -archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive +recipe <- prepare_outputs(recipe_file) +# archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive # Load datasets -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) # Calibrate datasets -calibrated_data <- calibrate_datasets(data, recipe) +calibrated_data <- calibrate_datasets(recipe, data) # Compute skill metrics -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) # Compute percentiles and probability bins -probabilities <- compute_probabilities(calibrated_data$hcst, recipe) +probabilities <- compute_probabilities(recipe, calibrated_data$hcst) # Export all data to netCDF -save_data(recipe, archive, data, calibrated_data, skill_metrics, probabilities) +save_data(recipe, data, calibrated_data, skill_metrics, probabilities) # Plot data -plot_data(recipe, archive, data, calibrated_data, skill_metrics, - probabilities, significance = T) +plot_data(recipe, data, calibrated_data, skill_metrics, probabilities, + significance = T) diff --git a/modules/test_seasonal.R b/modules/test_seasonal.R index 5f59794f4e6fc6678119acd2d1979f27ec869663..d8eb5c4eba48b064ad463dc52b6e863d02b0589e 100644 --- a/modules/test_seasonal.R +++ b/modules/test_seasonal.R @@ -1,4 +1,3 @@ - source("modules/Loading/Loading.R") source("modules/Calibration/Calibration.R") source("modules/Skill/Skill.R") @@ -6,19 +5,19 @@ source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") recipe_file <- "modules/Loading/testing_recipes/recipe_system7c3s-tas.yml" -recipe <- read_yaml(recipe_file) -archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive.yml"))$archive +recipe <- prepare_outputs(recipe_file) +# archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive.yml"))$archive # Load datasets -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) # Calibrate datasets -calibrated_data <- calibrate_datasets(data, recipe) +calibrated_data <- calibrate_datasets(recipe, data) # Compute skill metrics -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) # Compute percentiles and probability bins -probabilities <- compute_probabilities(calibrated_data$hcst, recipe) +probabilities <- compute_probabilities(recipe, calibrated_data$hcst) # Export all data to netCDF -save_data(recipe, archive, data, calibrated_data, skill_metrics, probabilities) +save_data(recipe, data, calibrated_data, skill_metrics, probabilities) # Plot data -plot_data(recipe, archive, data, calibrated_data, skill_metrics, - probabilities, significance = T) +plot_data(recipe, data, calibrated_data, skill_metrics, probabilities, + significance = T) diff --git a/tests/testthat/test-decadal_daily_1.R b/tests/testthat/test-decadal_daily_1.R index a78fd1350ae3f8629929c81d4c529b0daa4f511b..c9833d2b34413422f49dcb13d24991ed624c0d80 100644 --- a/tests/testthat/test-decadal_daily_1.R +++ b/tests/testthat/test-decadal_daily_1.R @@ -8,12 +8,12 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") recipe_file <- "tests/recipes/recipe-decadal_daily_1.yml" -recipe <- read_yaml(recipe_file) +recipe <- prepare_outputs(recipe_file) archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) ## Calibrate datasets diff --git a/tests/testthat/test-decadal_monthly_1.R b/tests/testthat/test-decadal_monthly_1.R index 7bb5031e3470854b269dcde36c76135a396f59ad..5cf1922e73e1ee4bc1a8c46f9998cc64ecbc145b 100644 --- a/tests/testthat/test-decadal_monthly_1.R +++ b/tests/testthat/test-decadal_monthly_1.R @@ -9,25 +9,25 @@ source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") recipe_file <- "tests/recipes/recipe-decadal_monthly_1.yml" -recipe <- read_yaml(recipe_file) +recipe <- prepare_outputs(recipe_file) archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) # Calibrate datasets suppressWarnings({invisible(capture.output( - calibrated_data <- calibrate_datasets(data, recipe) + calibrated_data <- calibrate_datasets(recipe, data) ))}) # Compute skill metrics suppressWarnings({invisible(capture.output( -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) ))}) suppressWarnings({invisible(capture.output( -probs <- compute_probabilities(calibrated_data$hcst, recipe) +probs <- compute_probabilities(recipe, calibrated_data$hcst) ))}) # Saving diff --git a/tests/testthat/test-decadal_monthly_2.R b/tests/testthat/test-decadal_monthly_2.R index ac4f2fffd610799375a5787e4559f82c904b3feb..4dd72ebf13c632479bbd34772a54509805b7c057 100644 --- a/tests/testthat/test-decadal_monthly_2.R +++ b/tests/testthat/test-decadal_monthly_2.R @@ -8,25 +8,24 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") recipe_file <- "tests/recipes/recipe-decadal_monthly_2.yml" -recipe <- read_yaml(recipe_file) -archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive +recipe <- prepare_outputs(recipe_file) # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) # Calibrate datasets suppressWarnings({invisible(capture.output( - calibrated_data <- calibrate_datasets(data, recipe) + calibrated_data <- calibrate_datasets(recipe, data) ))}) # Compute skill metrics suppressMessages({invisible(capture.output( -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) ))}) suppressWarnings({invisible(capture.output( -probs <- compute_probabilities(calibrated_data$hcst, recipe) +probs <- compute_probabilities(recipe, calibrated_data$hcst) ))}) #====================================== diff --git a/tests/testthat/test-decadal_monthly_3.R b/tests/testthat/test-decadal_monthly_3.R index 21665f6e8dba9b8fb2de86f49d359392e9351c2c..7535e8dc6fcdedeb2eb7a69fa1f13e917e6178bc 100644 --- a/tests/testthat/test-decadal_monthly_3.R +++ b/tests/testthat/test-decadal_monthly_3.R @@ -8,25 +8,25 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") recipe_file <- "tests/recipes/recipe-decadal_monthly_3.yml" -recipe <- read_yaml(recipe_file) +recipe <- prepare_outputs(recipe_file) archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) # Calibrate datasets suppressWarnings({invisible(capture.output( - calibrated_data <- calibrate_datasets(data, recipe) + calibrated_data <- calibrate_datasets(recipe, data) ))}) # Compute skill metrics suppressWarnings({invisible(capture.output( -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) ))}) suppressWarnings({invisible(capture.output( -probs <- compute_probabilities(calibrated_data$hcst, recipe) +probs <- compute_probabilities(recipe, calibrated_data$hcst) ))}) #====================================== diff --git a/tests/testthat/test-seasonal_daily.R b/tests/testthat/test-seasonal_daily.R index 237674e09715273e3890961c3dd942103f996d3b..5b771d77fd4bf22eb74626ec7b2170602234dd0b 100644 --- a/tests/testthat/test-seasonal_daily.R +++ b/tests/testthat/test-seasonal_daily.R @@ -6,21 +6,20 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") recipe_file <- "tests/recipes/recipe-seasonal_daily_1.yml" - +recipe <- prepare_outputs(recipe_file) # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) -recipe <- read_yaml(recipe_file) - +# Calibrate data suppressWarnings({invisible(capture.output( -calibrated_data <- calibrate_datasets(data, recipe) +calibrated_data <- calibrate_datasets(recipe, data) ))}) # Compute skill metrics suppressWarnings({invisible(capture.output( -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) ))}) test_that("1. Loading", { diff --git a/tests/testthat/test-seasonal_monthly.R b/tests/testthat/test-seasonal_monthly.R index 90938d6201c62a4b0db6efff287aed8719d8cf72..86feedfbb14eb550cdf8cb26ee00283964d3df4d 100644 --- a/tests/testthat/test-seasonal_monthly.R +++ b/tests/testthat/test-seasonal_monthly.R @@ -7,38 +7,39 @@ source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") recipe_file <- "tests/recipes/recipe-seasonal_monthly_1.yml" -recipe <- read_yaml(recipe_file) +recipe <- prepare_outputs(recipe_file) archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive.yml"))$archive # Load datasets suppressWarnings({invisible(capture.output( -data <- load_datasets(recipe_file) +data <- load_datasets(recipe) ))}) +# Calibrate data suppressWarnings({invisible(capture.output( -calibrated_data <- calibrate_datasets(data, recipe) +calibrated_data <- calibrate_datasets(recipe, data) ))}) # Compute skill metrics suppressWarnings({invisible(capture.output( -skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) +skill_metrics <- compute_skill_metrics(recipe, calibrated_data$hcst, data$obs) ))}) suppressWarnings({invisible(capture.output( -probs <- compute_probabilities(calibrated_data$hcst, recipe) +probs <- compute_probabilities(recipe, calibrated_data$hcst) ))}) # Saving suppressWarnings({invisible(capture.output( save_data(recipe = recipe, data = data, calibrated_data = calibrated_data, - skill_metrics = skill_metrics, probabilities = probs, archive = archive) + skill_metrics = skill_metrics, probabilities = probs) ))}) # Plotting suppressWarnings({invisible(capture.output( -plot_data(recipe = recipe, archive = archive, data = data, - calibrated_data = calibrated_data, skill_metrics = skill_metrics, - probabilities = probs, significance = T) +plot_data(recipe = recipe, data = data, calibrated_data = calibrated_data, + skill_metrics = skill_metrics, probabilities = probs, + significance = T) ))}) outdir <- get_dir(recipe) diff --git a/tools/data_summary.R b/tools/data_summary.R index e211e202cf63cccc183cc31bd42c3466d0752f92..34b6bd6e47d54477e1afc8cd846c4f3f7b54b0bd 100644 --- a/tools/data_summary.R +++ b/tools/data_summary.R @@ -4,27 +4,29 @@ ## TODO: Adapt to daily/subseasonal cases ## TODO: Add check for missing files/NAs by dimension -data_summary <- function(object, frequency) { +data_summary <- function(data_cube, recipe) { # Get name, leadtime months and date range - object_name <- deparse(substitute(object)) - if (tolower(frequency) == "monthly_mean") { + object_name <- deparse(substitute(data_cube)) + if (recipe$Analysis$Variables$freq == "monthly_mean") { date_format <- '%b %Y' - } else if (tolower(frequency) == "daily_mean") { + } else if (recipe$Analysis$Variables$freq == "daily_mean") { date_format <- '%b %d %Y' } - months <- unique(format(as.Date(object$Dates[[1]]), format = '%B')) + months <- unique(format(as.Date(data_cube$Dates[[1]]), format = '%B')) months <- paste(as.character(months), collapse=", ") - sdate_min <- format(min(as.Date(object$Dates[[1]])), format = date_format) - sdate_max <- format(max(as.Date(object$Dates[[1]])), format = date_format) + sdate_min <- format(min(as.Date(data_cube$Dates[[1]])), format = date_format) + sdate_max <- format(max(as.Date(data_cube$Dates[[1]])), format = date_format) - print("DATA SUMMARY:") + # Create log instance and sink output to logfile and terminal + info(recipe$Run$logger, "DATA SUMMARY:") + sink(recipe$Run$logfile, append = TRUE, split = TRUE) print(paste0(object_name, " months: ", months)) print(paste0(object_name, " range: ", sdate_min, " to ", sdate_max)) print(paste0(object_name, " dimensions: ")) - print(dim(object$data)) + print(dim(data_cube$data)) print(paste0("Statistical summary of the data in ", object_name, ":")) - print(summary(object$data)) + print(summary(data_cube$data)) print("---------------------------------------------") - + sink() } diff --git a/tools/prepare_outputs.R b/tools/prepare_outputs.R index 18cc2e5827508e9d53839ccc6c45103aa956bcaf..8a6831788769db69196c0c4be100c2d00e7f2f68 100644 --- a/tools/prepare_outputs.R +++ b/tools/prepare_outputs.R @@ -5,52 +5,57 @@ #'the recipe. It returns an object of class logger that stores information on #'the recipe configuration and errors. #' -#'@param recipe Auto-S2S configuration recipe as returned by read_yaml() +#'@param recipe_file path to a YAML file with Auto-S2S configuration recipe #' -#'@return list contaning logger object, log filename and log directory name +#'@return list contaning recipe with logger, log file name and log dir name #' #'@import log4r +#'@import yaml #' #'@examples #'setwd("/esarchive/scratch/vagudets/repos/auto-s2s/") #'library(yaml) -#'recipe <- read_yaml("modules/data_load/recipe_1.yml") -#'logger <- prepare_outputs(recipe) -#'folder <- logger$foldername -#'log_file <- logger$logname -#'logger <- logger$logger +#'recipe <- prepare_outputs("modules/data_load/recipe_1.yml") +#'info(recipe$Run$logger, "This is an info message") #' #'@export -prepare_outputs <- function(recipe) { +prepare_outputs <- function(recipe_file) { # recipe: the content of the readed recipe # file: the recipe file name + recipe <- read_yaml(recipe_file) + recipe$recipe_path <- recipe_file + recipe$name <- tools::file_path_sans_ext(basename(recipe_file)) + output_dir = recipe$Run$output_dir # Create output folders: folder_name <- paste0(gsub(".yml", "", gsub("/", "_", recipe$name)), "_", gsub(" ", "", gsub(":", "", gsub("-", "", Sys.time())))) + print("Saving all outputs to:") print(output_dir) print(folder_name) - dir.create(file.path(output_dir, folder_name, 'plots'), recursive = TRUE) - dir.create(file.path(output_dir, folder_name, 'outputs')) + dir.create(file.path(output_dir, folder_name, 'outputs'), recursive = TRUE) dir.create(file.path(output_dir, folder_name, 'logs')) dir.create(file.path(output_dir, folder_name, 'logs', 'recipes')) - file.copy(recipe$filepath, file.path(output_dir, folder_name, 'logs')) + file.copy(recipe$recipe_path, file.path(output_dir, folder_name, 'logs', + 'recipes')) logfile <- file.path(output_dir, folder_name, 'logs', 'log.txt') + file.create(logfile) # Set default behaviour of log output file: if (is.null(recipe$Run)) { recipe$Run <- list(Loglevel = 'INFO', Terminal = TRUE) } - if (is.null(recipe$Run$Loglevel)) { + if (is.null(recipe$Run$Loglevel)) { recipe$Run$Loglevel <- 'INFO' } + if (!is.logical(recipe$Run$Terminal)) { recipe$Run$Terminal <- TRUE } @@ -61,9 +66,13 @@ prepare_outputs <- function(recipe) { layout = default_log_layout()))) } else { logger <- logger(threshold = recipe$Run$Loglevel, - appenders = list(file_appender(logfile, append = TRUE, + appenders = list(file_appende(logfile, append = TRUE, layout = default_log_layout()))) } - return(list(logger = logger, logname = logfile, - foldername = file.path(output_dir, folder_name))) + + recipe$Run$output_dir <- file.path(output_dir, folder_name) + recipe$Run$logger <- logger + recipe$Run$logfile <- logfile + + return(recipe) }