save_metrics.R 4.45 KB
Newer Older
save_metrics <- function(recipe,
                         skill,
                         data_cube,
                         agg = "global",
                         outdir = NULL) {
  # This function adds metadata to the skill metrics in 'skill'
  # and exports them to a netCDF file inside 'outdir'.
  
  # Define grid dimensions and names
  lalo <- c('longitude', 'latitude')
  archive <- get_archive(recipe)
  dictionary <- read_yaml("conf/variable-dictionary.yml")


  # Remove singleton dimensions and rearrange lon, lat and time dims 
  if (tolower(agg) == "global") {
    skill <- lapply(skill, function(x) {
                    Reorder(x, c(lalo, 'time'))})
  }
  # Add global and variable attributes
  global_attributes <- .get_global_attributes(recipe, archive)
  ## TODO: Sort out the logic once default behavior is decided
  if ((!is.null(recipe$Analysis$Workflow$Anomalies$compute)) && 
      (recipe$Analysis$Workflow$Anomalies$compute)) {
    global_attributes <- c(list(from_anomalies = "Yes"),
                           global_attributes)
  } else {
    global_attributes <- c(list(from_anomalies = "No"),
                           global_attributes)
  }
  attr(skill[[1]], 'global_attrs') <- global_attributes

  for (i in 1:length(skill)) {
    metric <- names(skill[i])
    long_name <- dictionary$metrics[[metric]]$long_name
    missing_val <- -9.e+33
    skill[[i]][is.na(skill[[i]])] <- missing_val
    if (tolower(agg) == "country") {
      sdname <- paste0(metric, " region-aggregated metric")
      dims <- c('Country', 'time')
    } else {
      sdname <- paste0(metric) #, " grid point metric")
      dims <- c(lalo, 'time')
    }
    metadata <- list(metric = list(name = metric,
                                   standard_name = sdname,
                                   long_name = long_name,
                                   missing_value = missing_val))
    attr(skill[[i]], 'variables') <- metadata
    names(dim(skill[[i]])) <- dims
  }

  # Time indices and metadata
  fcst.horizon <- tolower(recipe$Analysis$Horizon)
  store.freq <- recipe$Analysis$Variables$freq
  calendar <- archive$System[[global_attributes$system]]$calendar

  # Generate vector containing leadtimes
  dates <- as.PCICt(ClimProjDiags::Subset(data_cube$attrs$Dates, 'syear', 1),
                   cal = calendar)

  if (fcst.horizon == 'decadal') {
    init_month <- archive$System[[recipe$Analysis$Datasets$System$name]]$initial_month
    init_date <- as.PCICt(paste0(recipe$Analysis$Time$hcst_start, '-',
                                 sprintf('%02d', init_month), '-01'),
                  cal = calendar)
  } else {
    init_date <- as.PCICt(paste0(recipe$Analysis$Time$hcst_start,
                          recipe$Analysis$Time$sdate),
                          format = '%Y%m%d', cal = calendar)
  }

  # Get time difference in hours
  leadtimes <- as.numeric(dates - init_date)/3600

  # Select start date
  # If a fcst is provided, use that as the ref. year. Otherwise use 1970.
  if (fcst.horizon == 'decadal') {
    if (!is.null(recipe$Analysis$Time$fcst_year)) {
      #PROBLEM: May be more than one fcst_year
      fcst.sdate <- paste0(recipe$Analysis$Time$fcst_year[1],
                           sprintf('%02d', init_month), '01')
    } else {
      fcst.sdate <- paste0("1970", sprintf('%02d', init_month), '01')
    }
  } else {
    if (!is.null(recipe$Analysis$Time$fcst_year)) {
      fcst.sdate <- paste0(recipe$Analysis$Time$fcst_year,
                           recipe$Analysis$Time$sdate)
    } else {
      fcst.sdate <- paste0("1970", recipe$Analysis$Time$sdate)
    }
  }

  times <- .get_times(store.freq, fcst.horizon, leadtimes, fcst.sdate, calendar)
  time <- times$time

  # Generate name of output file
  if (is.null(outdir)) {
    outdir <- get_dir(recipe)
  }
  outfile <- get_filename(outdir, recipe, data_cube$attrs$Variable$varName,
                          fcst.sdate, agg, "skill")

  # Get grid data and metadata and export to netCDF
  if (tolower(agg) == "country") { 
    country <- get_countries(grid)
    ArrayToNc(append(country, time, skill), outfile)
  } else {
    latitude <- data_cube$coords$lat[1:length(data_cube$coords$lat)]
    longitude <- data_cube$coords$lon[1:length(data_cube$coords$lon)]
    latlon <- .get_latlon(latitude, longitude)
    # Compile variables into a list and export to netCDF
    vars <- list(latlon$lat, latlon$lon, time)
    vars <- c(vars, skill)
    ArrayToNc(vars, outfile)
  }
  info(recipe$Run$logger, "##### SKILL METRICS SAVED TO NETCDF FILE #####")
}