Insurance Risk Indices are an ensemble of indices relevant for insurance industry. These indices are based on Expert Team on Climate Change Detection Indices (ETCCDI). There are currently 5 available indices to compute for extreme: heat (tx90p), cold (tn10p), wind (wx), drought (ccd) and flooding (rx5day). The individual indices can be combined into a single index with or without weighting for each component. This combined index is roughly analogous to the Actuaris Climate Risk Index.
Note: This vignette has been writen to process and plot the output of an ESMValTool namelist configured with a specific set of parameters. If you wish to run and plot the outputs for another set of parameters, you need to re-run the corresponding namelist with the desired parameters, interpolate the output NetCDF files to a common grid, and modify the vignette code accordingly.
The following exemple will compute the 5 different indices in the North Atlantic - European Sector [60ºW-40ºE; 30ºN-70ºN] using the CMIP5 projection for the 8.5 scenario.
This example requires the following system libraries:
You will need to install specific versions of certain R packages as follows:
library(devtools)
install_git('https://earth.bsc.es/gitlab/es/startR', branch = 'develop-hotfixes-0.0.2')
Four functions should be load by running the following lines in R, until integrated into current version of s2dverification
source('https://earth.bsc.es/gitlab/es/s2dverification/raw/develop-Climdex/R/Climdex.R')
source('https://earth.bsc.es/gitlab/es/s2dverification/raw/develop-Climdex/R/Threshold.R')
source('https://earth.bsc.es/gitlab/es/s2dverification/raw/develop-Magic/vignettes/risk_index/DailyAnomaly.R')
source('https://earth.bsc.es/gitlab/es/s2dverification/raw/develop-Magic_WP6/R/WeightedMean.R')
All the other R packages involved can be installed directly from CRAN and loaded as follows:
library(s2dverification)
library(startR)
library(multiApply)
library(ggplot2)
library(climdex.pcic)
library(parallel)
The Extreme Heat Index (tx90p) is defined as the percentage of days when maximum temperature exceed the 90th percentile.
In order to evaluate the future projections, it is necessary to compute the index climatology during a reference historical period. In this example, the period 1971⨪2000 has been selected. The next steps should be followed:
Start()
. The full description and examples of its use can be found here: https://earth.bsc.es/gitlab/es/startR. The object returned by Start
will have at least one time and two spatial dimensions.var0 <- 'tasmax'
start_climatology <- '1971-01-01'
end_climatology <- '2000-12-31'
lat.min <- 30; lat.max <- 70
lon.min <- -60; lon.max <- 40
climatology_filenames <- paste0("/esarchive/scratch/pbretonn/cmip5-cp4cds/historical/day/tasmax/",
"tasmax_day_IPSL-CM5A-LR_historical_r1i1p1_19500101-20051231.nc")
reference_data <- Start(model = climatology_filenames,
var = var0, var_var = 'var_names',
time = values(list(as.POSIXct(start_climatology),
as.POSIXct(end_climatology))),
lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)),
lon_var = 'lon',
lon_reorder = CircularSort(0, 360),
return_vars = list(time = 'model', lon = 'model', lat = 'model'),
retrieve = TRUE)
metadata <- attributes(reference_data)
> dim(reference_data)
model var time lat lon
1 1 10957 21 28
dailyAno
function. Finally, the names of the dimensions are restablished.dates <- attributes(reference_data)$Variables$dat1$time
dates <- as.PCICt(dates, cal = attributes(dates)$variables$time$calendar, format = "%Y%m%d")
jdays <- as.numeric(strftime(dates, format = "%j"))
anomaly_data <- Apply(list(reference_data), target_dims = which(names(dim(reference_data)) == "time"),
AtomicFun = "dailyAno", jdays = jdays, ncores = 4)$output1
names(dim(anomaly_data))[1] <- "time"
Trend
function. In order to remove the trend from the reference_data
, the correction is calculated by substracting the detrended_data
to the anomaly_data
. Again, the original dimensional information is restored.detrended_data <- Trend(anomaly_data, posTR = which(names(dim(anomaly_data)) == "time"))
diff <- anomaly_data - detrended_data$detrended
diff <- aperm(diff, c(2,3,1,4,5))
detrended_data <- reference_data - diff
attributes(detrended_data) <- metadata
quantile <- 0.9
base_range <- as.numeric(c(substr(start_climatology, 1, 4), substr(end_climatology, 1, 4)))
thresholds <- Threshold(detrended_data, base_range = as.numeric(base_range),
qtiles = quantile, ncores = detectCores() -1)
> dim(thresholds)
jdays model var lat lon
365 1 1 21 28
Climdex()
function will return the extreme heat index during the reference periode.base_index <- Climdex(data = detrended_data, metric = 'tx90p', quantiles = thresholds, ncores = detectCores() - 1)
The output of ´Climdex´ function will be a ´list()´ object. In the ´base_index$result´ label will be save the index values.
> str(base_index)
List of 2
$ result: num [1:30, 1, 1, 1:21, 1:28] 8.22 12.33 4.38 8.49 9.59 ...
$ years : chr [1:30] "1971" "1972" "1973" "1974" ...
> dim(base_index$result)
year model var lat lon
30 1 1 21 28
base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), AtomicFun = "sd")$output1
The Extreme Heat Index is computing for the period 2020⨪2040 by following the next steps:
Start()
function.start_model <- '2020-01-01'
end_model <- '2040-12-31'
model_filenames <- paste0("/esarchive/scratch/pbretonn/cmip5-cp4cds/rcp85/day/tasmax/",
"tasmax_day_IPSL-CM5A-LR_rcp85_r1i1p1_20060101-22051231.nc")
rcp_data <- Start(model = model_filenames,
var = var0, var_var = 'var_names',
time = values(list(as.POSIXct(start_model), as.POSIXct(end_model))),
lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)),
lon_var = 'lon',
lon_reorder = CircularSort(0, 360),
return_vars = list(time = 'model', lon = 'model', lat = 'model'),
retrieve = TRUE)
The object returned by Start()
contains, among others, four arrays with the requested daily maximum temperature data for each gridpoint. summary()
function allows to check their content.
> dim(rcp_data)
model var time lat lon
1 1 7671 21 28
> summary(rcp_data)
Min. 1st Qu. Median Mean 3rd Qu. Max.
221.6 276.7 283.6 283.4 291.2 321.1
projection_index <- Climdex(data = rcp_data, metric = 'tx90p', quantiles = thresholds, ncores = detectCores() - 1)
projection_mean <- 10
base_sd_proj <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
projection_index_standardized <- (projection_index$result - projection_mean) / base_sd_proj
A Map of the mean index values during the examined period 2020-2040 is obtained and save in PNG format in the working directory with the name: “tx90p_IPSL-CMIP5_85rpc_2020-01-01_2040-12-31.png”
lat <- attr(rcp_data, "Variables")$dat1$lat
lon <- attr(rcp_data, "Variables")$dat1$lon
title <- paste0("Index for tx90p ", substr(start_model, 1, 4), "⨪",
substr(end_model, 1, 4), " 85rcp CMIP5-IPSL")
breaks <- -8 : 8
PlotEquiMap(Mean1Dim(projection_index_standardized, which(names(dim(projection_index_standardized)) ==
"year")), lon = lon, lat = lat, filled.continents = FALSE, toptitle = title, brks=breaks,
fileout = paste0('tx90p_IPSL-CMIP5_85rpc_', start_model, "_", end_model, ".png"))
The inland average of the Extreme Heat Index can be computed to plot its time evolution using WeigthedMean
function. Smoothing()
returns the smoothed time series for a 3 years moving window which can be modify using runmeanlen
parameter.
masc <- Start(model = '/esarchive/exp/ipsl-cm5a-lr/cmip5-rcp85_i1p1/constant/r0i0p0/fx/sftlf/sftlf.nc',
var = 'sftlf', var_var = 'var_names', lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)), lon_reorder = CircularSort(0,360),
return_vars = list(lon = 'model', lat = 'model'), retrieve = TRUE)
temporal <- WeightedMean(projection_index_standardized, lon = lon, lat = lat, mask = drop(masc))
temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
The next code should be runned to plot and save the original average and the 3 year smoothed data.
png("Temporal_Inland_ExtremeHeatIndex_IPSL-CMIP5_85rcp_2020-2040.png", width = 8, height = 5, units = 'in', res = 100,
type = "cairo")
plot(substr(start_model, 1, 4):substr(end_model, 1, 4), temporal, type = "l", lty = 5, lwd = 2, bty = 'n',
xlab = "Time (years)", ylab = "Extreme Heat Index", main = "Inland average Extreme Heat Index")
lines(substr(start_model, 1, 4):substr(end_model, 1, 4), temporal_3ysmooth, col = "darkgreen", lwd = 2)
legend('bottomright', c('Anual', '3 years smooth'), col = c(1, 'darkgreen'), lty = c(5, 1), lwd = 2, bty = 'n')
dev.off()
The Extreme Cold Index (tn10p) is defined as the percentage of days when minimum temperature falls behind the 10th percentile. The procedure will be equivalent at the previous Extreme Heat Index example.
var0 <- 'tasmin'
start_climatology <- '1971-01-01'
end_climatology <- '2000-12-31'
lat.min <- 30; lat.max <- 70
lon.min <- -60; lon.max <- 40
climatology_filenames <- paste0("/esarchive/scratch/pbretonn/cmip5-cp4cds/historical/day/tasmin/",
"tasmin_day_IPSL-CM5A-LR_historical_r1i1p1_19500101-20051231.nc")
reference_data <- Start(model = climatology_filenames,
var = var0, var_var = 'var_names',
time = values(list(as.POSIXct(start_climatology),
as.POSIXct(end_climatology))),
lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)),
lon_var = 'lon',
lon_reorder = CircularSort(0, 360),
return_vars = list(time = 'model', lon = 'model', lat = 'model'),
retrieve = TRUE)
metadata <- attributes(reference_data)
dates <- attributes(reference_data)$Variables$dat1$time
dates <- as.PCICt(dates, cal = attributes(dates)$variables$time$calendar, format = "%Y%m%d")
jdays <- as.numeric(strftime(dates, format = "%j"))
anomaly_data <- Apply(list(reference_data), target_dims = which(names(dim(reference_data)) == "time"),
AtomicFun = "dailyAno", jdays = jdays, ncores = 4)$output1
names(dim(anomaly_data))[1] <- "time"
detrended_data <- Trend(anomaly_data, posTR = which(names(dim(anomaly_data)) == "time"))
diff <- anomaly_data - detrended_data$detrended
diff <- aperm(diff, c(2,3,1,4,5))
detrended_data <- reference_data - diff
attributes(detrended_data) <- metadata
quantile <- 0.1
base_range <- as.numeric(c(substr(start_climatology, 1, 4), substr(end_climatology, 1, 4)))
thresholds <- Threshold(detrended_data, base_range = as.numeric(base_range),
qtiles = quantile, ncores = detectCores() -1)
base_index <- Climdex(data = detrended_data, metric = 'tx10p', quantiles = thresholds, ncores = detectCores() - 1)
base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), AtomicFun = "sd")$output1
The Extreme Cold Index is computing for the period 2020⨪2040 by following the next steps:
Start()
function.start_model <- '2020-01-01'
end_model <- '2040-12-31'
model_filenames <- paste0("/esarchive/scratch/pbretonn/cmip5-cp4cds/rcp85/day/tasmin/",
"tasmin_day_IPSL-CM5A-LR_rcp85_r1i1p1_20060101-22051231.nc")
rcp_data <- Start(model = model_filenames,
var = var0, var_var = 'var_names',
time = values(list(as.POSIXct(start_model), as.POSIXct(end_model))),
lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)),
lon_var = 'lon',
lon_reorder = CircularSort(0, 360),
return_vars = list(time = 'model', lon = 'model', lat = 'model'),
retrieve = TRUE)
projection_index <- Climdex(data = rcp_data, metric = 'tx10p',
quantiles = thresholds, ncores = detectCores() - 1)
projection_mean <- 10
base_sd_proj <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
projection_index_standardized <- (projection_index$result - projection_mean) / base_sd_proj
lat <- attr(rcp_data, "Variables")$dat1$lat
lon <- attr(rcp_data, "Variables")$dat1$lon
title <- paste0("Index for tn10p ", substr(start_model, 1, 4), "⨪",
substr(end_model, 1, 4), " 85rcp CMIP5-IPSL")
breaks <- -8 : 8
PlotEquiMap(Mean1Dim(projection_index_standardized, which(names(dim(projection_index_standardized)) ==
"year")), lon = lon, lat = lat, filled.continents = FALSE, toptitle = title, brks=breaks,
fileout = paste0('tn10p_IPSL-CMIP5_85rpc_', start_model, "_", end_model, ".png"))
masc <- Start(model = '/esarchive/exp/ipsl-cm5a-lr/cmip5-rcp85_i1p1/constant/r0i0p0/fx/sftlf/sftlf.nc',
var = 'sftlf', var_var = 'var_names', lat = values(list(lat.min, lat.max)),
lon = values(list(lon.min, lon.max)), lon_reorder = CircularSort(0,360),
return_vars = list(lon = 'model', lat = 'model'),retrieve = TRUE)
temporal <- WeightedMean(projection_index_standardized, lon = lon, lat = lat, mask = drop(masc))
temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
png("Temporal_Inland_ExtremeColdIndex_IPSL-CMIP5_85rcp_2020-2040.png", width = 8, height = 5, units= 'in', res = 100,
type = "cairo")
plot(substr(start_model, 1, 4):substr(end_model, 1, 4), temporal, type = "l", lty = 5, lwd = 2, bty = 'n',
xlab = "Time (years)", ylab = "Extreme Cold Index", main = "Inland average Extreme Cold Index")
lines(substr(start_model, 1, 4):substr(end_model, 1, 4), temporal_3ysmooth, col = "darkgreen",lwd = 2)
legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'), lty = c(5, 1), lwd = 2, bty = 'n')
dev.off()