diff --git a/.Rbuildignore b/.Rbuildignore index 0954c2a0092c62f36e7508d2d58568d39b20cdf1..0c51051b11ebbdf7186d1dd310c9236ea5062d82 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -1,6 +1,9 @@ -.git -.gitignore -.tar.gz -.pdf -./.nc +.*\.git$ +.*\.gitignore$ +.*\.tar.gz$ +.*\.pdf$ +./.nc$ +.*\.gitlab-ci.yml$ +# Ignore tests when submitting to CRAN +#^tests$ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..049daf74c688c9cf187f54b92f053aaa4b415589 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,10 @@ +stages: + - build +build: + stage: build + script: + - module load R/4.1.2-foss-2015a-bare + - module load CDO/1.9.8-foss-2015a + - R CMD build --resave-data . + - R CMD check --as-cran --no-manual --run-donttest ClimProjDiags_*.tar.gz + - R -e 'covr::package_coverage()' diff --git a/R/Extremes.R b/R/Extremes.R index f3e7b16b7e187e6cb20c705bd37b1901bf25c57e..facb6cf4a568356588948eacf246a706ec4dc446 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -25,19 +25,19 @@ #'@import climdex.pcic #'@examples #'##Example synthetic data: -#'data <- 1:(2 * 3 * 372 * 1) -#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 310 * 1) +#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) -#'threshold = rep(40, 31) #'attr(time, "variables") <- metadata #'attr(data, 'Variables')$dat1$time <- time +#'threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +#'res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, +#' max.missing.days = 5, ncores = NULL) +#'str(res) #' -#'a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, -#' max.missing.days = 5, ncores = NULL) -#'str(a) #'@export Extremes <- function(data, threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, timedim = NULL, calendar = NULL, ncores = NULL) { diff --git a/R/ShiftLon.R b/R/ShiftLon.R index aad4deb0c78b8f50e38dc6f19f3904c5cfa21b14..88b034d929f246dc677ab80af4fac430d80e527b 100644 --- a/R/ShiftLon.R +++ b/R/ShiftLon.R @@ -28,7 +28,7 @@ #'lat <- -90:90 ## lat does not change #'shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) #' -#' \donttest{ +#' \dontrun{ #'s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) #'s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) #' } diff --git a/R/Threshold.R b/R/Threshold.R index 181a5f04fd0a9c2385f8d3069de0e0a16babf6d8..f6df348fbfd481f6476ddc13997c38318c9eadfa 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -17,9 +17,9 @@ #'@importFrom stats quantile #'@examples #'##Example synthetic data: -#'data <- 1:(2 * 3 * 372 * 1) -#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 310 * 1) +#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) @@ -28,6 +28,7 @@ #' #'a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) #'str(a) +#' #'@export Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL, na.rm = FALSE) { if (is.null(data)) { @@ -102,7 +103,7 @@ Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qt "element will be used.") } dates <- as.PCICt(dates, cal = calendar) - dates = as.character(dates) + dates <- as.character(dates) jdays <- as.numeric(strftime(dates, format = "%j")) if (calendar == "gregorian" | calendar == "standard" | calendar == "proleptic_gregorian") { year <- as.numeric(strftime(dates, format = "%Y")) diff --git a/inst/doc/anomaly_agreement.html b/inst/doc/anomaly_agreement.html deleted file mode 100644 index a23f4d8ba46b0eb2c2d4bb04304c939ace0fb4ce..0000000000000000000000000000000000000000 --- a/inst/doc/anomaly_agreement.html +++ /dev/null @@ -1,474 +0,0 @@ - - - - - -Multi-model agreement - - - - - - - - - - - - - - - - - - -

Multi-model agreement

- -

Multi-model agreement performs a comparison of climate model projections anomalies. This vignette illustrates step-by-step how to perform a multi-model agreement assessment using ClimProjDiags package functionalities. The following example simulates the case of summer projections temperature anomalies for different models.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R, onces it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(ggplot2)
-
- -

2- Define the problem and the correspondent data and parameters

- -

The aim is to know, compared with a reference period:

- - - -

The ilustrative problem is to compare the monthly mean air temperature at 2 m in summer between four different models. The reference period used from the historical simulations to perform the anomalies is 1961 - 1990. While, the future scenario chosen is the rcp2.6 during the period 2006 - 2100. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

The parameters are defined by running the next lines in R:

- -
var <- 'tas'
-
-start_climatology <- '1961'
-end_climatology <- '1990'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is considered by subtracting randomly a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
multimodel_historical <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 360)) + rnorm(360)
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(360, sd = 5) * cos(2 * pi / 12 * (1 : 360)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(360, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_historical <- abind(multimodel_historical, gridpoint, along = 4)
-}
-multimodel_historical <- InsertDim(multimodel_historical, posdim = 5, lendim = 1)
-multimodel_historical <- aperm(multimodel_historical, c(4, 5, 1, 2, 3))
-names(dim(multimodel_historical)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(1961, 1, 15), ISOdate(1990, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added in order to make the data partially more realistic.

- -
multimodel_projection <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 1140)) + rnorm(1140) + 
-                 (1 : 1140) * rnorm(1, mean = 1.5) / 1140
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(1140, sd = 5) * cos(2 * pi / 12 * (1 : 1140)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(1140, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_projection <- abind(multimodel_projection, gridpoint, along = 4)
-}
-multimodel_projection <- InsertDim(multimodel_projection, posdim = 5, lendim = 1)
-multimodel_projection <- aperm(multimodel_projection, c(4, 5, 1, 2, 3))
-names(dim(multimodel_projection)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(2006, 1, 15), ISOdate(2100, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_projection, 'Variables')$dat1$time <- time
-
- -

Now, two objects called multimodel_historical and multimodel_projection are available in the R environment. A check can be done to the loaded data by comparing with the next lines (due to the random functions the results may differ between each execution):

- -
> dim(multimodel_historical)
-model   var  time   lon   lat 
-    4     1   360    12    8 
-
-> summary(multimodel_historical)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-   251.2   281.7   287.9   287.8   294.0   321.6 
-
-> dim(multimodel_projection)
-model   var  time   lon   lat 
-    4     1  1140    12     8 
-
-> summary(multimodel_projection)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-  254.8   282.8   288.8   288.8   294.9   322.6 
-
- -

3- Multi-model agreement based on seasonal analysis

- -

The multi-model agreement is a comparison based on seasonal anomalies, which are computed by following the next steps:

- - - -
summer_historical <- SeasonSelect(multimodel_historical, season = 'JJA')
-
- -

The new subsets are lists of two elements. The first element is the selected data and the second element contains the corresponding dates.

- -
> str(summer_historical)
-List of 2
- $ data : num [1:4, 1:90, 1:12, 1:8] 314 293 305 300 300 ...
- $ dates: chr [1:90] "1961-06-15 12:00:00" "1961-07-15 12:00:00" ...
-
-> dim(summer_historical$data)
-model  time   lon   lat 
-    4    90    12     8 
-
- - - -
climatology <- Mean1Dim(summer_historical$data, posdim = 2) 
-
- - - -
summer_projection <- Season(multimodel_projection, posdim = 3, 
-                            monini = 1, moninf = 6, monsup = 8)
-
- -

By running the next lines, it is possible to check the dimensions of the data:

- -
> dim(climatology)
-model   lon   lat 
-    4    12     8 
-> dim(summer_projection)
-model   var  time   lon   lat 
-    4     1   95    12     8 
-
- - - -
climatology <- InsertDim(InsertDim(climatology, posdim = 2, lendim = 1), 
-                         posdim = 3, lendim = 95)
-
- - - -
anomaly <- summer_projection - climatology
-
- -

4- Multi-model agreement spatial visualitzation

- -

In order to obtain a spatial visualitzation, the temporal mean is computed. So, the time average anomalies for all models is saved in the average object. AnoAgree() function from ClimProjDiags package calculates the percentages of models which agrees with a positive or negative mean in each grid point.

- -
average <- Mean1Dim(anomaly,  which(names(dim(anomaly)) == "time"))
-agreement <- AnoAgree(average, membersdim = which(names(dim(average)) == "model"))
-
- -

So, in a case when four models are being compared, the agreement object can take the following values: 100 (all models agree), 75 (only one model has opposite sign), 50 (only two models agree with the mean signal) and 25 (one model agrees with the sign of the mean signal because its magnitude is higher that the other three models). These values will change with the number of compared models.

- -

The next question will be answered by the example plot: Where do 80 % or more models agree in the signal? To obtain this plot, the next lines should be run in R. Notice you can modify the threshold by modifying the parameter agreement_threshold. The colour map shows the mean temperature anomaly and the dots the model agreement. The plot will be saved with the name “SpatialSummerAgreement.png”.

- -
agreement_threshold <- 80
-colorbar_lim <- ceiling(max(abs(max(average)), abs(min(average))))
-brks <- seq(-colorbar_lim, colorbar_lim, length.out = 11)
-
-PlotEquiMap(drop(Mean1Dim(average, which(names(dim(average)) == "model"))), 
-            lat = lat, lon = lon, units = "K", brks = brks, 
-            toptitle = paste(var, "- climatology:", start_climatology, "to", 
-                             end_climatology, "and future simulation:", 
-                             start_projection, "to", end_projection), 
-            filled.continents = FALSE, title_scale = 0.6,
-            dots = drop(agreement) >= agreement_threshold, 
-            fileout = "SpatialSummerAgreement.png")
-
- -

Multimodel Agreement for temperature in summer

- -

5- Multi-model agreement temporal visualization

- -

To visualize the time evolution of multi-model agreement, the spatial average is performed by a grid pixel size using the WeightedMean function from the ClimProjDiags package. Also, a smooth filter is applied with the Smoothing() function from the s2dverifiction package. In this example, a 5-year moving window filter is applied by defining the parameter runmeanlen = 5.

- -
temporal <- drop(WeightedMean(anomaly, lon = lon, lat = lat, mask = NULL))
-temporal <- Smoothing(temporal, runmeanlen = 5, numdimt = 2)
-
- -

Before visualizing, a data frame with the proper format is created.

- -
data_frame <- as.data.frame.table(t(temporal))
-years <- rep(start_projection : end_projection, 4)
-data_frame$Year <- c(years)
-names(data_frame)[2] <- "Model"
-for (i in 1 : length(levels(data_frame$Model))) {
-  levels(data_frame$Model)[i] <- paste0("model", i)
-}
-
- -

A new png file will be saved in the working directory with the name “TemporalSummerAgreement.png”.

- -
g <- ggplot(data_frame, aes(x = Year, y = Freq)) + theme_bw() + 
-            ylab("tas") +  xlab("Year") + theme(text=element_text(size = 12),
-            legend.text=element_text(size = 12), 
-            axis.title=element_text(size = 12)) +
-            stat_summary(data =  data_frame, fun.y= "mean", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3]), 
-                                      color = data_frame$Model),
-                         geom = "line", size = 0.8) + 
-            stat_summary(data =  data_frame, geom = "ribbon", 
-                         fun.ymin = "min", fun.ymax = "max", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3])), 
-                         alpha = 0.3, color = "red", fill = "red") +
-           ggtitle("Temporal Summer Agreement")
-ggsave(filename = "TemporalSummerAgreement.png", g, device = NULL, width = 8, 
-       height = 5, units = 'in', dpi = 100) 
-
- -

Note: if a warning appears when plotting the temporal time series, it might be due to the NA's values introduced when smoothing the time series.

- -

Temporal evolution of the mean summer agreement of air temperature

- - - - diff --git a/inst/doc/diurnaltemp.html b/inst/doc/diurnaltemp.html deleted file mode 100644 index 8ae3e7932cedf70a9b2197420f2bf8bf74e591f6..0000000000000000000000000000000000000000 --- a/inst/doc/diurnaltemp.html +++ /dev/null @@ -1,417 +0,0 @@ - - - - - -Diurnal Temperature Variation (DTR) Indicator - - - - - - - - - - - - - - - - - - -

Diurnal Temperature Variation (DTR) Indicator

- -

The diurnal temperature variation indicator is a proxy for energy demand. The diurnal temperature indicator is the number of days in a season when the daily temperature variation (tasmax - tasmin) exceeds the vulnerability threshold. Here, the vulnerability threshold is based on the mean daily temperature variation for a reference period plus 5 degrees.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R, once it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(s2dverification)
-library(abind)
-library(parallel)
-
- -

2- Problem, parameters and data definition

- -

To ilustrate the problem, daily maximum or minimum air temperature at 2 m for the reference period (1971 - 2000) and the future scenario rcp2.6 (2006 - 2100) in the northern hemisphere (between -40 - 20 ºE and 25 - 60 ºN) will be created artificially.

- -

A grid of 5 degrees is defined by running the next lines in R:

- -
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

The synthetic sample of maximum and minimum temperature for the historical period can be obtained by running the following lines. The maximum temperature data is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is computed by randomly subtracting a value proportional to the latitude. The minimum temperature is built by subtracting a random value (with anual cycle included) to the synthetic maximum temperature. Furthermore, attributes of time and dimensions are added to both samples.

- -
tmax_historical <- NULL
-tmin_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 10958)) - abs(rnorm(10958))
-
-
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(10958, mean = j * 0.001,
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(10958, mean = j * 0.75, sd = 0.5))})
-  tmax_historical <- abind(tmax_historical, gridnew1, along = 3)
-  tmin_historical <- abind(tmin_historical, gridnew2, along = 3)
-}
-
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_historical <- InsertDim(InsertDim(tmin_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-attr(tmin_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is done to build the synthetic data for the future projections. However, a trend is added.

- -
tmax_projection <- NULL
-tmin_projection <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698 
-
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 34698)) - abs(rnorm(34698)) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 40000
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(34698, mean = j * 0.01, 
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(34698, mean = j * 0.75, sd = 0.5))})
-  tmax_projection <- abind(tmax_projection, gridnew1, along = 3)
-  tmin_projection <- abind(tmin_projection, gridnew2, along = 3)
-}
-
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_projection <- InsertDim(InsertDim(tmin_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-attr(tmin_projection, 'Variables')$dat1$time <- time
-
- -

3- Reference diurnal temperature variation

- -

The function DTRRef, from the ClimProjDiags package, computes the mean between maximum and minimum temperature for each season during the selected reference period:

- -
dtr_reference <- DTRRef(tmax = tmax_historical, tmin = tmin_historical, by.seasons = TRUE, ncores = NULL)
-
- -

The label dtr_reference contains the output for each season and gridpoint:

- -
> str(dtr_reference)
-List of 2
- $ dtr.ref: num [1:4, 1, 1, 1:12, 1:8] 5.09 2.9 5.04 2.91 5.09 ...
- $ season : chr [1:4] "DJF" "MAM" "JJA" "SON"
-
- -

4- Computing the diurnal temperature variation indicator

- -

The diurnal temperature variation indicator is computed with the function DTRIndicator indicating the maximum and minimum temperature for the future projection and the reference temperature variation:

- -
dtr_indicator <- DTRIndicator(tmax = tmax_projection, tmin = tmin_projection, 
-                              ref = dtr_reference, 
-                              by.seasons = TRUE, ncores = NULL)
-
- -

The function returns a list of three elements, the label indicator being the desired output:

- -
> str(dtr_indicator)
-List of 3
- $ indicator: int [1:96, 1:4, 1, 1, 1:12, 1:8] 0 1 0 0 0 2 0 1 1 0 ...
- $ year     : chr [1:96] "2006" "2007" "2008" "2009" ...
- $ season   : chr [1:4] "DJF" "JJA" "MAM" "SON"
-
- -

Note: the total number of years in dtr_indicator$year (96) is greater than the period examined (in this case 95 years of future projection). This is because the last december of the time series belongs to the subsequent winter (in this example 2101 winter).

- -

5- Visualizing the diurnal temperature variation indicator

- -

A four panel plot can be generated to visualize the seasonal indicator by running the following lines:

- -
dtr_rcp <- array(dim = c(length(lon), length(lat), 4))             
-for (j in 1:4){
-              dtr_rcp[,,j] <- Mean1Dim(dtr_indicator$indicator[,j,,,,], 1)
-}            
-breaks <- 0 : (max(dtr_rcp) + 1)         
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_rcp, 
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "DTR", filled.continents = FALSE, 
-           units = "Days", title_scale = 0.5, axelab = FALSE, 
-           draw_separators = TRUE, subsampleg = 1, 
-           brks = breaks, color_fun = clim.palette("yellowred"),
-           bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialDTR.png")
-
- -

Diuranl Temperature Range Indicator

- -

Furthermore, the future diurnal temperature variation can be compared with the one observed during the reference period. So, the diurnal temperature variation indicator is computed for the reference period by running:

- -
dtr_indicator_reference <- DTRIndicator(tmax = tmax_historical, 
-                                        tmin = tmin_historical, 
-                                        ref = dtr_reference, by.seasons = TRUE, 
-                                        ncores = NULL)
-
- -

The comparison between the reference and the future projection diurnal temperature variation indicator can be computed With a simple subtraction. To visualize the result, PlotLayout function will be applied again. The resulting plot will be saved in the working directory.

- -
dtr_diff <- array(dim=c(length(lat), length(lon), 4))
-for (i in 1:4){
-  dtr_diff[,,i] <- Mean1Dim(dtr_indicator$indicator[,i,1,1,,], 1) - 
-                   Mean1Dim(dtr_indicator_reference$indicator[,i,1,1,,], 1)
-}
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_diff,
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "Change in DTR indicator",
-           filled.continents = FALSE, units = "Days",
-           axelab = FALSE, draw_separators = TRUE, subsampleg = 1, 
-           brks = -2 : 2, bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialComparisonDTR.png")
-
- -

Spatial Comparison of DTR

- -

Note: the outputs of this example, including the figures, are artificial.

- - - - diff --git a/inst/doc/extreme_indices.html b/inst/doc/extreme_indices.html deleted file mode 100644 index 2d064b97d9bcd51b49abb619d177ef48c096bb8f..0000000000000000000000000000000000000000 --- a/inst/doc/extreme_indices.html +++ /dev/null @@ -1,348 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Extreme Indices

-

The extreme indices are an ensemble of indices based on the Expert Team on Climate Change Detection Indices (ETCCDI). There are currently 5 available indices to be computed: extreme heat (tx90p), extreme cold (tn10p), extreme wind (wx), drought (ccd) and flooding (rx5day). The individual indices can be combined into a single index with or without weighting for each component.

-
-

1- Load dependencies

-

This example requires the following system libraries:

- -

The ClimProjDiags R package should be loaded by running the following lines in R once it’s integrated into CRAN mirror.

-
library(ClimProjDiags)
-

All the other R packages involved can be installed directly from CRAN and loaded as follows:

-
library(s2dverification)
-library(abind)
-library(multiApply)
-library(ggplot2)
-library(parallel)
-
-
-

2- Synthetic data

-

Daily maximum and minimum temperature, wind speed and precipitation are necessary to compute the different indices in both, the reference period (1971 - 2000) and the future projection (2006 - 2100). The defined region will be in the northern hemisphere between -40 - 20 ºE and 25 - 60 ºN.

-

Maximum temperature is generated considering the annual cycle:

-
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, 
-                   grid1 + rnorm(10958, sd = 5) * cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-

A similar procedure is considered to build the synthetic data for the future projections. However, a little trend is added.

-
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - 
-                               rnorm(34698, mean = j * 0.5, sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-

To build synthetic precipitation data, a lognormal distribution is considered:

-
ppt_historical <- rlnorm(10958 * 12 * 8)
-dim(ppt_historical) <- c(model = 1, var = 1, time = 10958, lon = 12, lat = 8)
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_historical, 'Variables')$dat1$time <- time
-ppt_projection <- rlnorm(34698 * 12 * 8)
-dim(ppt_projection) <- c(model = 1, var = 1, time = 34698, lon = 12, lat = 8)
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_projection, 'Variables')$dat1$time <- time
-
-
-

3- Computing the Extreme Heat Index

-

The Extreme Heat Index (t90p) is defined as the percentage of days when the maximum temperature exceeds the 90th percentile.

-

In order to evaluate the future projections, it is necessary to compute the index during a reference historical period. The next steps should be followed:

-

To remove seasonality effects, the anomaly is computed for each day and gridpoint by applying the DailyAno function. The name of the first dimensions is defined as ‘time’ dimension.

-
anomaly_data <- apply(tmax_historical, c(1,2,4,5), DailyAno, dates = attributes(tmax_historical)$Variables$dat1$time)
-
-names(dim(anomaly_data))[1] <- "time"
-

This data can be detrended by applying the Trend function from s2dverification package. In order to remove the trend from the tmax_historical, the correction is calculated by subtracting the detrended_data to the anomaly_data.

-
detrended_data <- Trend(anomaly_data, 
-                        posTR = which(names(dim(anomaly_data)) == "time"))
-diff <- anomaly_data - detrended_data$detrended
-diff <- aperm(diff, c(2,3,1,4,5))
-detrended_data <- tmax_historical - diff
-

For each gridpoint and day of the year (from the 1st of January to the 31st of December), the maximum temperature on the position at the 90 percent of the series will be calculated as the threshold.

-
quantile <- 0.9
-thresholds <- Threshold(detrended_data, qtiles = quantile, 
-                        ncores = detectCores() -1)
-
> dim(thresholds)
-jdays model   var   lon   lat 
-  366     1     1    12     8 
-

By indicating the metric and introducing the threshold, Climdex() function will return the extreme heat index during the reference period.

-
base_index <- Climdex(data = detrended_data, metric = 't90p', 
-                      threshold = thresholds, ncores = detectCores() - 1)
-

The output of ´Climdex´ function will be a ´list()´ object. Index values are saved in the ´base_index$result´ label.

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 11.23 8.74 10.41 11.78 10.14 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-> dim(base_index$result)
- year model   var   lon   lat 
-   30     1     1    12     8 
-

Now, the standard deviation is computed in order to standardize the index. Notice that, by definition, the mean of the percentage of the number of days exceeding the 90th percentile is 10. Only standard deviation is computed.

-
base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The index can be computed by considering the threshold obtain for the reference period.

-
projection_index <- Climdex(data = tmax_projection, metric = 't90p', 
-                            threshold = thresholds, ncores = detectCores() - 1)
-

It is normalized with mean 10 and the standard deviation of the reference period.

-
base_mean <- 10
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-HeatExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

A spatial representation of the mean index values is obtained and saved in PNG format in the working directory with the name: “SpatialExtremeHeatIndex.png”. The matrix masc is built and shown as dots in the plot indicating wich pixels are considered land.

-
masc <- rep(0, 8 * 12)
-masc[c(5 : 12, 18 : 24, 31 : 34, 43, 44, 47, 56 : 60, 67 : 72, 79, 
-       82 : 84, 93 : 96)] <- 1
-dim(masc) <- c(12, 8)
-PlotEquiMap(Mean1Dim(HeatExtremeIndex, 
-                     which(names(dim(HeatExtremeIndex)) == "year")), 
-                     lon = lon, lat = lat, filled.continents = FALSE, 
-                     toptitle = "Extreme Heat Index", dots = masc,
-                     fileout = "SpatialExtremeHeatIndex.png")
-

Spatial distribution of Extreme Heat Index

-

The inland average of the Extreme Heat Index can be computed to plot its time evolution using WeigthedMean function. Smoothing() returns the smoothed time series for a 3 year moving window which can be modified using runmeanlen parameter.

-
temporal <- WeightedMean(HeatExtremeIndex, lon = lon, lat = lat, mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-

The next code should be run to plot and save the original average and the 3 year smoothed data.

-
png("Temporal_Inland_ExtremeHeatIndex.png", width = 8, height = 5, units = 'in', 
-    res = 100, type = "cairo")
-  plot(2006 : 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-       xlab = "Time (years)", ylab = "Extreme Heat Index", 
-       main = "Inland average Extreme Heat Index")
-  lines(2006 : 2100, temporal_3ysmooth, col = "darkgreen", lwd = 2)
-  legend('bottomright', c('Anual', '3 years smooth'), col = c(1, 'darkgreen'), 
-         lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal Inland Extreme Heat Index

-
-
-

4- Extreme Drought Index

-

The Extreme Drought Index (cdd), which measures the maximum length of a dry spell, is defined as the maximum number of consecutive days with the daily precipitation amount lower than 1 mm.

-

To compute the Extreme Drought Index during the reference period and its standard deviation and mean:

-

Note: Precipitation data is not detrended. Furthermore, this index doesn’t require to compute a threshold as Climdex function integrates the threshold of precipitation amount lower than 1 mm internally. However, this case requires the calculation of the mean.

-
base_index <- Climdex(data = ppt_historical, metric = 'cdd',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The object base_index contains the output of the Climdex function as two list with the next dimensions:

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 6 11 8 8 8 12 9 10 6 8 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-

The Extreme Drought Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'cdd', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-DroughtExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Drought Index:

-
PlotEquiMap(Mean1Dim(DroughtExtremeIndex, 
-                     which(names(dim(DroughtExtremeIndex)) == "year")), 
-            lon = lon, lat = lat, filled.continents = FALSE, 
-            toptitle = "Drought Index", brks = seq(-1, 1, 0.01), 
-            fileout = "SpatialDroughtIndex.png")
-

Spatial distribution of Extreme Drought Index

-

Evolution of inland average of the Extreme Drought Index:

-
temporal <- WeightedMean(DroughtExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_5ysmooth <- Smoothing(temporal, runmeanlen = 5, numdimt = 1)
-png("Temporal_Inland_ExtremeDroughtIndex.png", width = 8, height = 5, units= 'in', 
-    res = 100,  type = "cairo")
-plot(2006: 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Drought Index", 
-     main = "Inland average Extreme Drought Index")
-lines(2006 : 2100, temporal_5ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'),
-        lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Drought Index

-
-
-

5- Extreme Flooding Index

-

The Extreme Flooding Index (rx5day) is defined as the maximum precipitation amount in 5 consecutive days.

-

The Extreme Flooding Index during the reference period and its standard deviation and mean can be calculated by executing:

-
base_index <- Climdex(data = ppt_historical, metric = 'rx5day',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The Extreme Flooding Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'rx5day', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-FloodingExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Flooding Index:

-
PlotEquiMap(Mean1Dim(FloodingExtremeIndex, 
-            which(names(dim(FloodingExtremeIndex)) == "year")), lon = lon, 
-            lat = lat, filled.continents = FALSE, 
-            toptitle = "Extreme Flooding Index", 
-            brks = seq(-1, 1, 0.1), fileout = "SpatialFloodingIndex.png")
-

Spatial distribution of Extreme Flooding Index

- -
temporal <- WeightedMean(FloodingExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-png("Temporal_Inland_ExtremeFloodingIndex.png", width = 8, height = 5, 
-    units= 'in', res = 100, type = "cairo")
-plot(2006 :  2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Flooding Index", 
-     main = "Inland average Extreme Flooding Index")
-lines(2006 :  2100, temporal_3ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'), 
-       lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Flooding Index

-
-
-

6- Combining Indices

-

The individual indices can be combined into a single index with or without weighting for each component. This combined index is roughly analogous to the Actuaries Climate Risk Index. Extreme Indices should be saved in the same list object.

-
indices <- list()
-indices[[1]] <- HeatExtremeIndex
-indices[[2]] <- DroughtExtremeIndex
-indices[[3]] <- FloodingExtremeIndex
-

If the weights parameter is defined as NULL, all indices will be equally weighted if the operation parameter is set as mean (by default). To define other weights a vector of length equal to the number of considered indices (5 in this example) and with total sum equal to 1.

-
aci <- CombineIndices(indices = indices, weights = NULL)
-

A spatial visualization can be performed by executing:

-
PlotEquiMap(Mean1Dim(aci, which(names(dim(aci)) == "year")), lon = lon, 
-            lat = lat,  filled.continents = FALSE, toptitle = "Indices Combination",
-            fileout = "CombinedIndices.png")
-

Spatial distribution of Combined Indices

-

Note: This vignette shows the computation of three indices, however, five different indices can be computed with the Climdex function. To consider other combination settings run ?CombinedIndices.

-
-
- - - - - - - - diff --git a/inst/doc/heatcoldwaves.html b/inst/doc/heatcoldwaves.html deleted file mode 100644 index c272ce8a24f36682e9f60b1be7b550b9e6e23a9a..0000000000000000000000000000000000000000 --- a/inst/doc/heatcoldwaves.html +++ /dev/null @@ -1,381 +0,0 @@ - - - - - -Heatwave and coldwave duration - - - - - - - - - - - - - - - - - - -

Heatwave and coldwave duration

- -

Heatwave and coldwave duration is considered to be the total duration of “extreme spells”, i.e. the total number of days in a season when the temperature exceeds a threshold for a given number of consecutive days. Other tools for computing such events are also available, but the novelty of this tool is that the users can select their own thresholds (based on quantiles) and the minimum duration of an event. The duration of heatwaves and coldwaves helps to understand potential changes in energy demand.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R onces it's integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(parallel)
-
- -

The ilustrative problem is to analyze the daily maximum or minimum air temperature at 2m. The reference period used to compute a threshold is 1971 - 2000. The future scenario chosen is the rcp2.6 during the period 2006 - 2100 for which the heat/cold wave duration will be determined. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

These parameters are defined by running:

- -
var <- 'tasmax'
-
-start_climatology <- '1971'
-end_climatology <- '2000'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbations to a sinusoidal function. The latitudinal behavior of the temperature is considered by randomly subtracting a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(10958, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1),
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added. -´

- -
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + 
-         rnorm(34698) + (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(34698, mean = j * 0.5, 
-                                                      sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-
- -

2- Heatwaves

- -

The heatwaves duration is the number of consecutive days for which the maximum temperature is exceeding a threshold for a minimum number of days during summer.

- -

2.1- Defining heatwaves threshold

- -

In this example, the threshold is defined as the 90th percentile of maximum temperature during the period 1971-2000.

- -

The summer data can be picked by the SeasonSelect function from the ClimProjDiags package.

- -
summer_tmax_historical <- SeasonSelect(tmax_historical, season = 'JJA')
-
- -

The output of SeasonSelect is a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * 30 years = 2760 dates. While the data dimensions also keeps the longitude and latitude dimensions.

- -
> str(summer_tmax_historical)
-List of 2
- $ data : num [1:2760, 1:12, 1:8] 302 285 301 302 312 ...
- $ dates: POSIXct[1:2760], format: "1971-06-01 12:00:00" "1971-06-04 12:00:00" ...
-
- -

To define the heatwave it is necessary to select the maximum temperature threshold that must be exceeded. In this example, the 90th percentile of the maximum temperature during the reference period is selected. This calculation is performed using Threshold function from ClimProjDiags package.

- -
quantile <- 0.9
-thresholds <- Threshold(data = summer_tmax_historical$data, 
-                        dates = summer_tmax_historical$dates, 
-                        calendar ="proleptic_gregorian", 
-                        qtiles = quantile, ncores = detectCores() -1)
-
- -

The output of Threshold is an array of the 92 days of summer in the first dimension and the spatial dimensions in the second and third position in this case:

- -
> str(thresholds)
- num [1:92, 1:12, 1:8] 310 308 312 309 310 ...
-
- -

2.2- Heatwaves projection

- -

The same selection should be done for the future projection data by applying SeasonSelect.

- -
summer_tmax_projection <- SeasonSelect(tmax_projection, season = 'JJA')
-
- -

The corresponding output is again a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * (2100 - 2006 + 1) years = 8740 dates.

- -
> str(summer_tmax_projection)
-List of 2
- $ data : num [1:8740, 1:12, 1:8] 303 300 315 297 298 ...
- $ dates: POSIXct[1:8740], format: "2006-06-01 12:00:00" "2006-06-02 12:00:00"...
-
- -

2.3- Heatwaves duration

- -

The duration of the heatwaves is obtained by using the WaveDuration function from ClimProjDiags package. By setting spell.length = 5, the minimum length of a heatwave is defined as 5 days in this example. So, this function returns the number of days for each summer in which the maximum temperature is exceeding the 90th percentile of the reference period when they occur in a cluster of a minimum length of 5 consecutive days. This means that isolated events are not included.

- -
duration <- WaveDuration(data = summer_tmax_projection$data, 
-                         threshold = thresholds, op = ">", spell.length = 5, 
-                         dates = summer_tmax_projection$dates, 
-                         calendar = "proleptic_gregorian")
-
- -
> str(duration)
-List of 2
- $ result: num [1:95, 1:12, 1:8] 5 5 0 5 9 11 5 11 0 5 ...
- $ years : chr [1:95] "2006-JJA" "2007-JJA" "2008-JJA" "2009-JJA" ...
-
- -

The spatial representation of the maximum, mean and minimum duration of heatwaves can be plotted and saved in the working directory by running:

- -
breaks <- seq(0,92,4)
-
-PlotEquiMap(apply(duration$result, c(2, 3), max), lon = lon, lat = lat, 
-            brks = breaks, filled.continents = FALSE, title_scale = 0.8,
-            toptitle = "Heat wave duration", 
-            cols = heat.colors(length(breaks)-1)[(length(breaks)-1):1],
-            fileout = "SpatialHeatwave.png")
-
- -

Spatial distribution of Heatwave duration

- - - - diff --git a/man/Extremes.Rd b/man/Extremes.Rd index ddc57e8cab54aafb7acd93620d5bc51e7b512699..8042619d0ee0b4aaf0368b227cb3b80d3f73c552 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -53,17 +53,17 @@ This routine compares data to the thresholds using the given operator, generatin } \examples{ ##Example synthetic data: -data <- 1:(2 * 3 * 372 * 1) -dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 310 * 1) +dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) -threshold = rep(40, 31) attr(time, "variables") <- metadata attr(data, 'Variables')$dat1$time <- time +threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, + max.missing.days = 5, ncores = NULL) +str(res) -a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, - max.missing.days = 5, ncores = NULL) -str(a) } diff --git a/man/ShiftLon.Rd b/man/ShiftLon.Rd index bf8e0de789399e25c652b65736c712ee29c67c9b..7d7a9d493b03d4aec16b54e6d19e69f2813a967e 100644 --- a/man/ShiftLon.Rd +++ b/man/ShiftLon.Rd @@ -40,7 +40,7 @@ lon <- array(data = 0:359, dim = c(lon = 360)) lat <- -90:90 ## lat does not change shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) - \donttest{ + \dontrun{ s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) } diff --git a/man/Threshold.Rd b/man/Threshold.Rd index a0fa10aec9ff6aab2e2993a9cbfb2be4338f309b..184cb912e15617daa4e01f9a188f5581343f1004 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -37,9 +37,9 @@ This function computes the threshold based on a quantile value for each day of t } \examples{ ##Example synthetic data: -data <- 1:(2 * 3 * 372 * 1) -dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 310 * 1) +dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) @@ -48,4 +48,5 @@ attr(data, 'Variables')$dat1$time <- time a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) + }