diff --git a/modules/Anomalies/Anomalies.R b/modules/Anomalies/Anomalies.R index 859e97bbd042be63af43c08358700b663ebe9139..06f955a8e297071cd0990e7c68778a7140d8cc28 100644 --- a/modules/Anomalies/Anomalies.R +++ b/modules/Anomalies/Anomalies.R @@ -45,13 +45,14 @@ compute_anomalies <- function(recipe, data) { data$obs <- anom$obs remove(anom) # Change variable metadata - # data$hcst$Variable$varName <- paste0(data$hcst$Variable$varName, "anomaly") - attr(data$hcst$Variable, "variable")$long_name <- - paste(attr(data$hcst$Variable, "variable")$long_name, "anomaly") - # data$obs$Variable$varName <- paste0(data$obs$Variable$varName, "anomaly") - attr(data$obs$Variable, "variable")$long_name <- - paste(attr(data$obs$Variable, "variable")$long_name, "anomaly") - + for (var in data$hcst$attrs$Variable$varName) { + # Change hcst longname + data$hcst$attrs$Variable$variables[[var]]$long_name <- + paste(data$hcst$attrs$Variable$variables[[var]]$long_name, "anomaly") + # Change obs longname + data$obs$attrs$Variable$variables[[var]]$long_name <- + paste(data$obs$attrs$Variable$variables[[var]]$long_name, "anomaly") + } # Compute forecast anomaly field if (!is.null(data$fcst)) { # Compute hindcast climatology ensemble mean @@ -71,9 +72,10 @@ compute_anomalies <- function(recipe, data) { # Get fcst anomalies data$fcst$data <- data$fcst$data - clim_hcst # Change metadata - # data$fcst$Variable$varName <- paste0(data$fcst$Variable$varName, "anomaly") - attr(data$fcst$Variable, "variable")$long_name <- - paste(attr(data$fcst$Variable, "variable")$long_name, "anomaly") + for (var in data$fcst$attrs$Variable$varName) { + data$fcst$attrs$Variable$variables[[var]]$long_name <- + paste(data$fcst$attrs$Variable$variables[[var]]$long_name, "anomaly") + } } info(recipe$Run$logger, diff --git a/modules/Loading/Loading.R b/modules/Loading/Loading.R index 598ddd0e4050105138e6579c290d1ef416924ee2..107eaf71584830d63b1aeaa3c925d8d8be23fe1c 100644 --- a/modules/Loading/Loading.R +++ b/modules/Loading/Loading.R @@ -139,12 +139,12 @@ load_datasets <- function(recipe) { } # Convert hcst to s2dv_cube object - ## TODO: Give correct dimensions to $Dates$start + ## TODO: Give correct dimensions to $Dates ## (sday, sweek, syear instead of file_date) hcst <- as.s2dv_cube(hcst) # Adjust dates for models where the time stamp goes into the next month if (recipe$Analysis$Variables$freq == "monthly_mean") { - hcst$Dates$start[] <- hcst$Dates$start - seconds(exp_descrip$time_stamp_lag) + hcst$attrs$Dates[] <- hcst$attrs$Dates - seconds(exp_descrip$time_stamp_lag) } # Load forecast @@ -198,8 +198,8 @@ load_datasets <- function(recipe) { fcst <- as.s2dv_cube(fcst) # Adjust dates for models where the time stamp goes into the next month if (recipe$Analysis$Variables$freq == "monthly_mean") { - fcst$Dates$start[] <- - fcst$Dates$start - seconds(exp_descrip$time_stamp_lag) + fcst$attrs$Dates[] <- + fcst$attrs$Dates - seconds(exp_descrip$time_stamp_lag) } } else { @@ -211,7 +211,7 @@ load_datasets <- function(recipe) { # Obtain dates and date dimensions from the loaded hcst data to make sure # the corresponding observations are loaded correctly. - dates <- hcst$Dates$start + dates <- hcst$attrs$Dates dim(dates) <- dim(Subset(hcst$data, along=c('dat', 'var', 'latitude', 'longitude', 'ensemble'), diff --git a/modules/Loading/Loading_decadal.R b/modules/Loading/Loading_decadal.R index e3677e1db3bceae19bfa8e3c27493575a43a00f6..4258fd18d8e7c9fbe32d89a010eb013c381d6ab8 100644 --- a/modules/Loading/Loading_decadal.R +++ b/modules/Loading/Loading_decadal.R @@ -11,8 +11,6 @@ source("modules/Loading/helper_loading_decadal.R") source("modules/Loading/dates2load.R") source("modules/Loading/check_latlon.R") source("tools/libs.R") -## TODO: Remove once the fun is included in CSTools -source("tools/tmp/as.s2dv_cube.R") #==================================================================== @@ -275,7 +273,7 @@ load_datasets <- function(recipe) { # Get from startR_cube # dates <- attr(hcst, 'Variables')$common$time # Get from s2dv_cube - dates <- hcst$Dates$start + dates <- hcst$attrs$Dates dates_file <- sapply(dates, format, '%Y%m') dim(dates_file) <- dim(dates) @@ -358,7 +356,6 @@ load_datasets <- function(recipe) { obs <- as.s2dv_cube(obs) ) - #------------------------------------------- # Step 4. Verify the consistance between data #------------------------------------------- @@ -378,8 +375,8 @@ load_datasets <- function(recipe) { } # time attribute - if (!identical(format(hcst$Dates$start, '%Y%m'), - format(obs$Dates$start, '%Y%m'))) { + if (!identical(format(hcst$attrs$Dates, '%Y%m'), + format(obs$attrs$Dates, '%Y%m'))) { error(recipe$Run$logger, "hcst and obs don't share the same time.") stop() diff --git a/modules/Saving/Saving.R b/modules/Saving/Saving.R index 28b5e5529bad1ae5e4402b154b674500a175dd0d..7114b6415c99bed41f95e869164e0d4f143180a1 100644 --- a/modules/Saving/Saving.R +++ b/modules/Saving/Saving.R @@ -172,8 +172,8 @@ save_forecast <- function(data_cube, lalo <- c('longitude', 'latitude') - variable <- data_cube$Variable$varName - var.longname <- attr(data_cube$Variable, 'variable')$long_name + variable <- data_cube$attrs$Variable$varName + var.longname <- data_cube$attrs$Variable$metadata[[variable]]$long_name global_attributes <- get_global_attributes(recipe, archive) fcst.horizon <- tolower(recipe$Analysis$Horizon) store.freq <- recipe$Analysis$Variables$freq @@ -186,7 +186,7 @@ save_forecast <- function(data_cube, # } # Generate vector containing leadtimes - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { ## Method 1: Use the first date as init_date. But it may be better to use @@ -220,7 +220,7 @@ save_forecast <- function(data_cube, syears <- seq(1:dim(data_cube$data)['syear'][[1]]) # expect dim = [sday = 1, sweek = 1, syear, time] - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) + syears_val <- lubridate::year(data_cube$attrs$Dates[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions fcst <- ClimProjDiags::Subset(data_cube$data, 'syear', i, drop = T) @@ -245,7 +245,7 @@ save_forecast <- function(data_cube, dims <- c(lalo, 'ensemble', 'time') var.expname <- variable var.sdname <- var.sdname - var.units <- attr(data_cube$Variable, 'variable')$units + var.units <- data_cube$attrs$Variable$metadata[[variable]]$units } metadata <- list(fcst = list(name = var.expname, @@ -269,7 +269,7 @@ save_forecast <- function(data_cube, fcst.sdate <- format(fcst.sdate, '%Y%m%d') } else { - fcst.sdate <- data_cube$load_parameters$dat1$file_date[[1]][i] + fcst.sdate <- data_cube$attrs$load_parameters$dat1$file_date[[1]][i] } # Get time dimension values and metadata @@ -277,7 +277,7 @@ save_forecast <- function(data_cube, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "exp") # Get grid data and metadata and export to netCDF @@ -285,8 +285,8 @@ save_forecast <- function(data_cube, country <- get_countries(grid) ArrayToNc(append(country, time, fcst), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) @@ -314,8 +314,8 @@ save_observations <- function(data_cube, lalo <- c('longitude', 'latitude') - variable <- data_cube$Variable$varName - var.longname <- attr(data_cube$Variable, 'variable')$long_name + variable <- data_cube$attrs$Variable$varName + var.longname <- data_cube$attrs$Variable$metadata[[variable]]$long_name global_attributes <- get_global_attributes(recipe, archive) fcst.horizon <- tolower(recipe$Analysis$Horizon) store.freq <- recipe$Analysis$Variables$freq @@ -323,7 +323,7 @@ save_observations <- function(data_cube, # Generate vector containing leadtimes ## TODO: Move to a separate function? - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { ## Method 1: Use the first date as init_date. But it may be better to use @@ -345,7 +345,7 @@ save_observations <- function(data_cube, syears <- seq(1:dim(data_cube$data)['syear'][[1]]) ## expect dim = [sday = 1, sweek = 1, syear, time] - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) + syears_val <- lubridate::year(data_cube$attrs$Dates[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions fcst <- ClimProjDiags::Subset(data_cube$data, 'syear', i, drop = T) @@ -365,11 +365,11 @@ save_observations <- function(data_cube, dims <- c('Country', 'time') var.expname <- paste0(variable, '_country') var.longname <- paste0("Country-Aggregated ", var.longname) - var.units <- attr(data_cube$Variable, 'variable')$units + var.units <- data_cube$attrs$Variable$metadata[[variable]]$units } else { dims <- c(lalo, 'time') var.expname <- variable - var.units <- attr(data_cube$Variable, 'variable')$units + var.units <- data_cube$attrs$Variable$metadata[[variable]]$units } metadata <- list(fcst = list(name = var.expname, @@ -393,10 +393,10 @@ save_observations <- function(data_cube, } else { if (store.freq == "monthly_mean") { - fcst.sdate <- data_cube$load_parameters$dat1$file_date[[1]][i] + fcst.sdate <- data_cube$attrs$load_parameters$dat1$file_date[[1]][i] fcst.sdate <- as.Date(paste0(fcst.sdate, "01"), '%Y%m%d') } else { - fcst.sdate <- as.Date(data_cube$Dates$start[i]) + fcst.sdate <- as.Date(data_cube$attrs$Dates[i]) } } @@ -414,7 +414,7 @@ save_observations <- function(data_cube, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "obs") # Get grid data and metadata and export to netCDF @@ -422,8 +422,8 @@ save_observations <- function(data_cube, country <- get_countries(grid) ArrayToNc(append(country, time, fcst), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) @@ -500,7 +500,7 @@ save_metrics <- function(skill, calendar <- archive$System[[global_attributes$system]]$calendar # Generate vector containing leadtimes - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { @@ -540,7 +540,7 @@ save_metrics <- function(skill, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "skill") # Get grid data and metadata and export to netCDF @@ -548,8 +548,8 @@ save_metrics <- function(skill, country <- get_countries(grid) ArrayToNc(append(country, time, skill), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) @@ -616,7 +616,7 @@ save_corr <- function(skill, calendar <- archive$System[[global_attributes$system]]$calendar # Generate vector containing leadtimes - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { init_month <- archive$System[[recipe$Analysis$Datasets$System$name]]$initial_month @@ -655,7 +655,7 @@ save_corr <- function(skill, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "corr") # Get grid data and metadata and export to netCDF @@ -663,8 +663,8 @@ save_corr <- function(skill, country <- get_countries(grid) ArrayToNc(append(country, time, skill), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) @@ -724,7 +724,7 @@ save_percentiles <- function(percentiles, store.freq <- recipe$Analysis$Variables$freq calendar <- archive$System[[global_attributes$system]]$calendar # Generate vector containing leadtimes - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { init_month <- archive$System[[recipe$Analysis$Datasets$System$name]]$initial_month @@ -763,7 +763,7 @@ save_percentiles <- function(percentiles, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "percentiles") # Get grid data and metadata and export to netCDF @@ -771,8 +771,8 @@ save_percentiles <- function(percentiles, country <- get_countries(grid) ArrayToNc(append(country, time, percentiles), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) @@ -801,8 +801,8 @@ save_probabilities <- function(probs, lalo <- c('longitude', 'latitude') - variable <- data_cube$Variable$varName - var.longname <- attr(data_cube$Variable, 'variable')$long_name + variable <- data_cube$attrs$Variable$varName + var.longname <- data_cube$attrs$Variable$metadata[[variable]]$long_name global_attributes <- get_global_attributes(recipe, archive) # Add anomaly computation to global attributes ## TODO: Sort out the logic once default behavior is decided @@ -820,7 +820,7 @@ save_probabilities <- function(probs, # Generate vector containing leadtimes ## TODO: Move to a separate function? - dates <- as.PCICt(ClimProjDiags::Subset(data_cube$Dates$start, 'syear', 1), + dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1), cal = calendar) if (fcst.horizon == 'decadal') { init_month <- archive$System[[recipe$Analysis$Datasets$System$name]]$initial_month @@ -838,7 +838,7 @@ save_probabilities <- function(probs, syears <- seq(1:dim(data_cube$data)['syear'][[1]]) ## expect dim = [sday = 1, sweek = 1, syear, time] - syears_val <- lubridate::year(data_cube$Dates$start[1, 1, , 1]) + syears_val <- lubridate::year(data_cube$attrs$Dates[1, 1, , 1]) for (i in syears) { # Select year from array and rearrange dimensions probs_syear <- lapply(probs, ClimProjDiags::Subset, 'syear', i, drop = 'selected') @@ -874,7 +874,7 @@ save_probabilities <- function(probs, fcst.sdate <- init_date + lubridate::years(syears_val[i] - lubridate::year(init_date)) fcst.sdate <- format(fcst.sdate, '%Y%m%d') } else { - fcst.sdate <- data_cube$load_parameters$dat1$file_date[[1]][i] + fcst.sdate <- data_cube$attrs$load_parameters$dat1$file_date[[1]][i] } # Get time dimension values and metadata @@ -882,7 +882,7 @@ save_probabilities <- function(probs, time <- times$time # Generate name of output file - outfile <- get_filename(outdir, recipe, data_cube$Variable$varName, + outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName, fcst.sdate, agg, "probs") # Get grid data and metadata and export to netCDF @@ -890,8 +890,8 @@ save_probabilities <- function(probs, country <- get_countries(grid) ArrayToNc(append(country, time, probs_syear), outfile) } else { - latitude <- data_cube$lat[1:length(data_cube$lat)] - longitude <- data_cube$lon[1:length(data_cube$lon)] + latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)] + longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)] latlon <- get_latlon(latitude, longitude) # Compile variables into a list and export to netCDF vars <- list(latlon$lat, latlon$lon, time) diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index a8664569047bb60185733e27a1250ec985cd481d..6aa6b3134d8ece2e176d3f303253c90ae63d7d3b 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -81,8 +81,8 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, stop("The element 'skill_metrics' must be a list of named arrays.") } - latitude <- data_cube$lat - longitude <- data_cube$lon + latitude <- data_cube$coords$lat + longitude <- data_cube$coords$lon system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name hcst_period <- paste0(recipe$Analysis$Time$hcst_start, "-", recipe$Analysis$Time$hcst_end) @@ -165,28 +165,27 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, # Reorder dimensions skill <- Reorder(skill, c("time", "longitude", "latitude")) # If the significance has been requested and the variable has it, - # retrieve it and reorder its dimensions. + # retrieve it and reorder its dimensions. significance_name <- paste0(name, "_significance") if ((significance) && (significance_name %in% names(skill_metrics))) { - significance_name <- paste0(name, "_significance") skill_significance <- skill_metrics[[significance_name]] - skill_significance <- Reorder(skill_significance, c("time", - "longitude", - "latitude")) + skill_significance <- Reorder(skill_significance, c("time", + "longitude", + "latitude")) # Split skill significance into list of lists, along the time dimension - # to avoid overlapping of significance dots. + # This allows for plotting the significance dots correctly. skill_significance <- ClimProjDiags::ArrayToList(skill_significance, - dim = 'time', - level = "sublist", - names = "dots") + dim = 'time', + level = "sublist", + names = "dots") } else { skill_significance <- NULL } # Define output file name and titles outfile <- paste0(outdir, name, ".png") - toptitle <- paste(display_name, "-", data_cube$Variable$varName, + toptitle <- paste(display_name, "-", data_cube$attrs$Variable$varName, "-", system_name, "-", init_month, hcst_period) - months <- unique(lubridate::month(data_cube$Dates$start, + months <- unique(lubridate::month(data_cube$attrs$Dates, label = T, abb = F)) titles <- as.vector(months) # Plot @@ -195,11 +194,11 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, asplit(skill, MARGIN=1), # Splitting array into a list longitude, latitude, special_args = skill_significance, - dot_symbol = 20, - toptitle = toptitle, - title_scale = 0.6, - titles = titles, - filled.continents=F, + dot_symbol = 20, + toptitle = toptitle, + title_scale = 0.6, + titles = titles, + filled.continents=F, brks = brks, cols = cols, col_inf = col_inf, @@ -210,7 +209,7 @@ plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, bar_label_scale = 1.5, axes_label_scale = 1.3) ) - } + } } info(recipe$Run$logger, "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") @@ -224,8 +223,8 @@ plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { stop("Visualization functions not yet implemented for daily data.") } - latitude <- fcst$lat - longitude <- fcst$lon + latitude <- fcst$coords$lat + longitude <- fcst$coords$lon system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name variable <- recipe$Analysis$Variables$name units <- attr(fcst$Variable, "variable")$units @@ -260,7 +259,7 @@ plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { # Define brks, centered on in the case of anomalies ## if (grepl("anomaly", - attr(fcst$Variable, "variable")$long_name)) { + fcst$attrs$Variable$metadata[[variable]]$long_name)) { variable <- paste(variable, "anomaly") max_value <- max(abs(ensemble_mean)) ugly_intervals <- seq(-max_value, max_value, max_value/20) @@ -282,7 +281,7 @@ plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { } toptitle <- paste("Forecast Ensemble Mean -", variable, "-", system_name, "- Initialization:", i_syear) - months <- lubridate::month(fcst$Dates$start[1, 1, which(start_date == i_syear), ], + months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], label = T, abb = F) titles <- as.vector(months) # Plots @@ -316,8 +315,8 @@ plot_most_likely_terciles <- function(recipe, archive, stop("Visualization functions not yet implemented for daily data.") } - latitude <- fcst$lat - longitude <- fcst$lon + latitude <- fcst$coords$lat + longitude <- fcst$coords$lon system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name variable <- recipe$Analysis$Variables$name start_date <- paste0(recipe$Analysis$Time$fcst_year, @@ -362,7 +361,7 @@ plot_most_likely_terciles <- function(recipe, archive, } toptitle <- paste("Most Likely Tercile -", variable, "-", system_name, "-", "Initialization:", i_syear) - months <- lubridate::month(fcst$Dates$start[1, 1, which(start_date == i_syear), ], + months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], label = T, abb = F) ## TODO: Ensure this works for daily and sub-daily cases titles <- as.vector(months) diff --git a/tests/testthat/test-decadal_daily_1.R b/tests/testthat/test-decadal_daily_1.R index c9833d2b34413422f49dcb13d24991ed624c0d80..400b864d1576aa1b51d6b169cbd8a1a8d7f46f8d 100644 --- a/tests/testthat/test-decadal_daily_1.R +++ b/tests/testthat/test-decadal_daily_1.R @@ -53,7 +53,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -72,7 +72,7 @@ dim(data$fcst$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 2, time = 90, latitude = 7, longitude = 11, ensemble = 3) ) expect_equal( -dim(data$hcst$Dates$start), +dim(data$hcst$attrs$Dates), c(sday = 1, sweek = 1, syear = 3, time = 90) ) # hcst data @@ -111,23 +111,23 @@ tolerance = 0.0001 # time value expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("1991-01-01 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("1992-01-01 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("1992-01-02 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[1, 1, 3, 90], +(data$hcst$attrs$Dates)[1, 1, 3, 90], as.POSIXct("1993-03-31 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[1, 1, 2, 90], +(data$hcst$attrs$Dates)[1, 1, 2, 90], as.POSIXct("1992-03-30 12:00:00", tz = 'UTC') ) diff --git a/tests/testthat/test-decadal_monthly_1.R b/tests/testthat/test-decadal_monthly_1.R index b76a216c18d605436a730c92504be69379e991d8..e32f71779114b54629f13f46da8a7ff942b808fa 100644 --- a/tests/testthat/test-decadal_monthly_1.R +++ b/tests/testthat/test-decadal_monthly_1.R @@ -71,7 +71,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -90,7 +90,7 @@ dim(data$fcst$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 5, longitude = 4, ensemble = 2) ) expect_equal( -dim(data$hcst$Dates$start), +dim(data$hcst$attr$Dates), c(sday = 1, sweek = 1, syear = 4, time = 3) ) expect_equal( @@ -109,19 +109,19 @@ c(281.7395, 294.2467), tolerance = 0.0001 ) expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("1991-11-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("1992-11-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("1991-12-16 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[10], +(data$hcst$attrs$Dates)[10], as.POSIXct("1993-01-16 12:00:00", tz = 'UTC') ) diff --git a/tests/testthat/test-decadal_monthly_2.R b/tests/testthat/test-decadal_monthly_2.R index cdab57f3174d710e31ad68eb17eab14304b81d84..da67c48bab3cbf7156067594802ea801a1b9ecf6 100644 --- a/tests/testthat/test-decadal_monthly_2.R +++ b/tests/testthat/test-decadal_monthly_2.R @@ -71,7 +71,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -90,7 +90,7 @@ dim(data$fcst$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 2, time = 14, latitude = 8, longitude = 5, ensemble = 3) ) expect_equal( -dim(data$hcst$Dates$start), +dim(data$hcst$attrs$Dates), c(sday = 1, sweek = 1, syear = 3, time = 14) ) #expect_equal( @@ -126,19 +126,19 @@ tolerance = 0.0001 ) expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("1990-11-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("1991-11-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("1991-12-16 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[10], +(data$hcst$attrs$Dates)[10], as.POSIXct("1991-02-15", tz = 'UTC') ) diff --git a/tests/testthat/test-decadal_monthly_3.R b/tests/testthat/test-decadal_monthly_3.R index 22fd435391ab4ae1d5cc0f9e4a9ed0898d961e20..85c15c88925d6db7b1d204f25cd007062337a7ea 100644 --- a/tests/testthat/test-decadal_monthly_3.R +++ b/tests/testthat/test-decadal_monthly_3.R @@ -55,7 +55,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -66,7 +66,7 @@ dim(data$hcst$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 25, longitude = 16, ensemble = 3) ) expect_equal( -dim(data$hcst$Dates$start), +dim(data$hcst$attrs$Dates), c(sday = 1, sweek = 1, syear = 4, time = 3) ) # hcst data @@ -87,19 +87,19 @@ tolerance = 0.0001 ) expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("2016-04-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("2017-04-16", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("2016-05-16 12:00:00", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[12], +(data$hcst$attrs$Dates)[12], as.POSIXct("2019-06-16", tz = 'UTC') ) diff --git a/tests/testthat/test-seasonal_daily.R b/tests/testthat/test-seasonal_daily.R index ddcca22fd93750647b02ecfc5290591edfc5167d..ae80d522c758f5711f0268b6359665ac276637ce 100644 --- a/tests/testthat/test-seasonal_daily.R +++ b/tests/testthat/test-seasonal_daily.R @@ -46,7 +46,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -61,7 +61,7 @@ dim(data$obs$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 4, time = 31, latitude = 4, longitude = 4, ensemble = 1) ) expect_equal( -dim(data$obs$Dates$start), +dim(data$obs$attrs$Dates), c(sday = 1, sweek = 1, syear = 4, time = 31) ) expect_equal( @@ -80,19 +80,19 @@ c(280.1490, 298.2324), tolerance = 0.0001 ) expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("1993-12-01 18:00:00 UTC", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("1994-12-01 18:00:00 UTC", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("1993-12-02 18:00:00 UTC", tz = 'UTC') ) expect_equal( -(data$obs$Dates$start)[10], +(data$obs$attrs$Dates)[10], as.POSIXct("1994-12-03 11:30:00 UTC", tz = 'UTC') ) diff --git a/tests/testthat/test-seasonal_monthly.R b/tests/testthat/test-seasonal_monthly.R index de03bf7340906650004fbdf5bcdf445992144c09..476ce06faeee12b8dcc92ab3ad21aedd1dd98c42 100644 --- a/tests/testthat/test-seasonal_monthly.R +++ b/tests/testthat/test-seasonal_monthly.R @@ -69,7 +69,7 @@ class(data$obs), ) expect_equal( names(data$hcst), -c("data", "lon", "lat", "Variable", "Datasets", "Dates", "when", "source_files", "load_parameters") +c("data", "dims", "coords", "attrs") ) expect_equal( names(data$hcst), @@ -88,7 +88,7 @@ dim(data$fcst$data), c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 3, longitude = 3, ensemble = 51) ) expect_equal( -dim(data$hcst$Dates$start), +dim(data$hcst$attrs$Dates), c(sday = 1, sweek = 1, syear = 4, time = 3) ) expect_equal( @@ -107,19 +107,19 @@ c(284.7413, 299.6219), tolerance = 0.0001 ) expect_equal( -(data$hcst$Dates$start)[1], +(data$hcst$attrs$Dates)[1], as.POSIXct("1993-11-30 23:59:59", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[2], +(data$hcst$attrs$Dates)[2], as.POSIXct("1994-11-30 23:59:59", tz = 'UTC') ) expect_equal( -(data$hcst$Dates$start)[5], +(data$hcst$attrs$Dates)[5], as.POSIXct("1993-12-31 23:59:59", tz = 'UTC') ) expect_equal( -(data$obs$Dates$start)[10], +(data$obs$attrs$Dates)[10], as.POSIXct("1995-01-15 12:00:00", tz = 'UTC') ) diff --git a/tools/data_summary.R b/tools/data_summary.R index d8f2b1b63f1a732ef08e72f712a1d24bbd49fa73..5f532dcfc641eb9798de8e44cf5d850511c31c1a 100644 --- a/tools/data_summary.R +++ b/tools/data_summary.R @@ -11,11 +11,10 @@ data_summary <- function(data_cube, recipe) { } else if (recipe$Analysis$Variables$freq == "daily_mean") { date_format <- "%b %d %Y" } - months <- unique(format(as.Date(data_cube$Dates[[1]]), format = "%B")) - months <- paste(as.character(months), collapse = ", ") - sdate_min <- format(min(as.Date(data_cube$Dates[[1]])), format = date_format) - sdate_max <- format(max(as.Date(data_cube$Dates[[1]])), format = date_format) - + months <- unique(format(as.Date(data_cube$attrs$Dates), format = '%B')) + months <- paste(as.character(months), collapse=", ") + sdate_min <- format(min(as.Date(data_cube$attrs$Dates)), format = date_format) + sdate_max <- format(max(as.Date(data_cube$attrs$Dates)), format = date_format) # Create log instance and sink output to logfile and terminal info(recipe$Run$logger, "DATA SUMMARY:") info(recipe$Run$logger, paste(object_name, "months:", months))