diff --git a/conf/archive.yml b/conf/archive.yml index 63abfb0ad9a4d203f76f5ff992d4f953baaee5b4..9d994ee511af7fe1646f355806a8bade6026d107 100644 --- a/conf/archive.yml +++ b/conf/archive.yml @@ -3,7 +3,9 @@ archive: src: "/esarchive/" System: - system5c3s: + system5c3s: + name: "ECMWF SEAS5" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/ecmwf/system5c3s/" daily_mean: {"tas":"_f6h/", "rsds":"_s0-24h/", "prlr":"_s0-24h/", "sfcWind":"_f6h/", @@ -17,6 +19,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "/esarchive/exp/ecmwf/system5c3s/monthly_mean/tas_f6h/tas_20180501.nc" system7c3s: + name: "Méteo-France System 7" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/meteofrance/system7c3s/" monthly_mean: {"tas":"_f6h/", "g500":"_f12h/", "prlr":"_f24h/", "sfcWind": "_f6h/", @@ -27,6 +31,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_system7c3s.txt" system21_m1: + name: "DWD GCFS 2.1" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/dwd/system21_m1/" monthly_mean: {"tas":"_f6h/", "prlr":"_f24h", "g500":"_f12h/", "sfcWind":"_f6h/", @@ -37,6 +43,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_system21_m1.txt" system35c3s: + name: "CMCC-SPS3.5" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/cmcc/system35c3s/" monthly_mean: {"tas":"_f6h/", "g500":"_f12h/", "prlr":"_f24h/", "sfcWind": "_f6h/", @@ -47,6 +55,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_system35c3s.txt" system2c3s: + name: "JMA System 2" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/jma/system2c3s/" monthly_mean: {"tas":"_f6h/", "prlr":"_f6h/", "tasmax":"_f6h/", "tasmin":"_f6h/"} @@ -56,6 +66,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_system2c3s.txt" eccc1: + name: "ECCC CanCM4i" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/eccc/eccc1/" monthly_mean: {"tas":"_f6h/", "prlr":"_f6h/", "tasmax":"_f6h/", "tasmin":"_f6h/"} @@ -65,6 +77,8 @@ archive: calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_eccc1.txt" glosea6_system600-c3s: + name: "UKMO GloSea 6 6.0" + institution: "European Centre for Medium-Range Weather Forecasts" src: "exp/ukmo/glosea6_system600-c3s/" monthly_mean: {"tas":"_f6h/", "tasmin":"_f24h/", "tasmax":"_f24h/", "prlr":"_f24h/"} @@ -73,8 +87,9 @@ archive: hcst: 28 calendar: "proleptic_gregorian" reference_grid: "conf/grid_description/griddes_ukmo600.txt" - ncep-cfsv2: + name: "NCEP CFSv2" + institution: "NOAA NCEP" #? src: "exp/ncep/cfs-v2/" monthly_mean: {"tas":"_f6h/", "prlr":"_f6h/", "tasmax":"_f6h/", "tasmin":"_f6h/"} @@ -84,7 +99,9 @@ archive: calendar: "gregorian" reference_grid: "conf/grid_description/griddes_ncep-cfsv2.txt" Reference: - era5: + era5: + name: "ERA5" + institution: "European Centre for Medium-Range Weather Forecasts" src: "recon/ecmwf/era5/" daily_mean: {"tas":"_f1h-r1440x721cds/", "rsds":"_f1h-r1440x721cds/", @@ -103,6 +120,8 @@ archive: calendar: "standard" reference_grid: "/esarchive/recon/ecmwf/era5/monthly_mean/tas_f1h-r1440x721cds/tas_201805.nc" era5land: + name: "ERA5-Land" + institution: "European Centre for Medium-Range Weather Forecasts" src: "recon/ecmwf/era5land/" daily_mean: {"tas":"_f1h/", "rsds":"_f1h/", "prlr":"_f1h/", "sfcWind":"_f1h/"} @@ -111,7 +130,9 @@ archive: "sfcWind":"_f1h/", "rsds":"_f1h/"} calendar: "proleptic_gregorian" reference_grid: "/esarchive/recon/ecmwf/era5land/daily_mean/tas_f1h/tas_201805.nc" - uerra: + uerra: + name: "ECMWF UERRA" + institution: "European Centre for Medium-Range Weather Forecasts" src: "recon/ecmwf/uerra_mescan/" daily_mean: {"tas":"_f6h/"} monthly_mean: {"tas":"_f6h/"} diff --git a/conf/archive_decadal.yml b/conf/archive_decadal.yml index 61e3f2b8eb5d781be670d619e2113c9e5619d229..2b74bff89ab2c27db06ba8e46a55b125fee21151 100644 --- a/conf/archive_decadal.yml +++ b/conf/archive_decadal.yml @@ -3,6 +3,8 @@ archive: System: # ---- EC-Earth3-i1: + name: "EC-Earth3-i1" + institution: "EC-Earth-Consortium" src: hcst: "exp/ecearth/a1ua/cmorfiles/DCPP/EC-Earth-Consortium/EC-Earth3/dcppA-hindcast/" fcst: @@ -23,6 +25,8 @@ archive: #NOTE: EC-Earth3-i2 the first file of each sdate has 2 time step only (Nov-Dec). # The rest files are Jan to Dec. EC-Earth3-i2: + name: "EC-Earth3-i2" + institution: "EC-Earth-Consortium" src: hcst: "exp/CMIP6/dcppA-hindcast/ec-earth3/DCPP/EC-Earth-Consortium/EC-Earth3/dcppA-hindcast/" fcst: @@ -41,6 +45,8 @@ archive: # ---- EC-Earth3-i4: + name: "EC-Earth3-i4" + institution: "EC-Earth-Consortium" src: hcst: "exp/ecearth/a3w5/original_files/cmorfiles/DCPP/EC-Earth-Consortium/EC-Earth3/dcppA-hindcast/" fcst: "exp/ecearth/a3w5/original_files/cmorfiles/DCPP/EC-Earth-Consortium/EC-Earth3/dcppB-forecast/" @@ -77,6 +83,8 @@ archive: # ---- HadGEM3-GC31-MM: + name: "HadGEM3-GC31-MM" + institution: src: hcst: "exp/CMIP6/dcppA-hindcast/HadGEM3-GC31-MM/DCPP/MOHC/HadGEM3-GC31-MM/dcppA-hindcast/" fcst: "exp/CMIP6/dcppB-forecast/HadGEM3-GC31-MM/DCPP/MOHC/HadGEM3-GC31-MM/dcppB-forecast/" @@ -92,10 +100,12 @@ archive: calendar: "360-day" member: r1i1p1f2,r2i1p1f2,r3i1p1f2,r4i1p1f2,r5i1p1f2,r6i1p1f2,r7i1p1f2,r8i1p1f2,r9i1p1f2,r10i1p1f2 initial_month: 11 - reference_grid: "/esarchive/exp/CMIP6/dcppA-hindcast/dcppA-hindcast/HadGEM3-GC31-MM/DCPP/MOHC/HadGEM3-GC31-MM/dcppA-hindcast/r1i1p1f2/Amon/tas/gr/v20200316/tas_Amon_HadGEM3_dcppA-hindcast_s2018-r1i1p1f2_gr_201811-202903.nc" #'r432x324' + reference_grid: "/esarchive/exp/CMIP6/dcppA-hindcast/HadGEM3-GC31-MM/DCPP/MOHC/HadGEM3-GC31-MM/dcppA-hindcast/r1i1p1f2/Amon/tas/gn/v20200417/tas_Amon_HadGEM3-GC31-MM_dcppA-hindcast_s1960-r1i1p1f2_gn_196011-196012.nc" #'r432x324' # ---- BCC-CSM2-MR: + name: "BCC-CSM2-MR" + institution: "Beijing Climate Center, Beijing 100081, China" src: hcst: "exp/CMIP6/dcppA-hindcast/BCC-CSM2-MR/DCPP/BCC/BCC-CSM2-MR/dcppA-hindcast/" fcst: @@ -115,8 +125,10 @@ archive: # ---- CanESM5: + name: "CanESM5" + institution: src: - hcst: "exp/canesm5/cmip6-dcppA-hindcast_i1p2/original_files/cmorfiles/DCPP/CCCma/CanESM5/dcppA-hindcast/" + hcst: "exp/canesm5/cmip6-dcppA-hindcast/original_files/cmorfiles/DCPP/CCCma/CanESM5/dcppA-hindcast/" fcst: "exp/canesm5/cmip6-dcppB-forecast_i1p2/original_files/cmorfiles/DCPP/CCCma/CanESM5/dcppB-forecast/" first_dcppB_syear: 2020 monthly_mean: @@ -130,10 +142,13 @@ archive: calendar: "365_day" member: r1i1p2f1,r2i1p2f1,r3i1p2f1,r4i1p2f1,r5i1p2f1,r6i1p2f1,r7i1p2f1,r8i1p2f1, r9i1p2f1, r10i1p2f1, r11i1p2f1,r12i1p2f1,r13i1p2f1,r14i1p2f1,r15i1p2f1,r16i1p2f1,r17i1p2f1,r18i1p2f1, r19i1p2f1, r20i1p2f1,r21i1p2f1,r22i1p2f1,r23i1p2f1,r24i1p2f1,r25i1p2f1,r26i1p2f1,r27i1p2f1,r28i1p2f1, r29i1p2f1, r30i1p2f1, r31i1p2f1,r32i1p2f1,r33i1p2f1,r34i1p2f1,r35i1p2f1,r36i1p2f1,r37i1p2f1,r38i1p2f1, r39i1p2f1, r40i1p2f1 initial_month: 1 #next year Jan - reference_grid: "/esarchive/exp/canesm5/cmip6-dcppA-hindcast_i1p2/original_files/cmorfiles/DCPP/CCCma/CanESM5/dcppA-hindcast/r1i1p2f1/Amon/tas/gn/v20190429/tas_Amon_CanESM5_dcppA-hindcast_s2008-r1i1p2f1_gn_200901-201812.nc" + reference_grid: "/esarchive/exp/canesm5/cmip6-dcppA-hindcast/original_files/cmorfiles/DCPP/CCCma/CanESM5/dcppA-hindcast/r1i1p2f1/Amon/tas/gn/v20190429/tas_Amon_CanESM5_dcppA-hindcast_s2008-r1i1p2f1_gn_200901-201812.nc" # ---- +#NOTE: no data there CESM1-1-CAM5-CMIP5: + name: "CESM1-1-CAM5-CMIP5" + institution: src: hcst: "exp/ncar/cesm-dple-dcppA-hindcast/cmorfiles/DCPP/NCAR/CESM1-1-CAM5-CMIP5/dcppA-hindcast" fcst: @@ -151,7 +166,10 @@ archive: reference_grid: "/esarchive/exp/ncar/cesm-dple-dcppA-hindcast/cmorfiles/DCPP/NCAR/CESM1-1-CAM5-CMIP5/dcppA-hindcast/r1i1p1f1/Amon/tas/gn/v20200101/tas_Amon_CESM1-1-CAM5-CMIP5_dcppA-hindcast_s2008-r1i1p1f1_gn_200811-201812.nc" # ---- +#NOTE: in tapes CMCC-CM2-SR5: + name: "CMCC-CM2-SR5" + institution: src: hcst: "exp/CMIP6/dcppA-hindcast/CMCC-CM2-SR5/DCPP/CMCC/CMCC-CM2-SR5/dcppA-hindcast/" fcst: "exp/CMIP6/dcppB-forecast/CMCC-CM2-SR5/DCPP/CMCC/CMCC-CM2-SR5/dcppB-forecast/" @@ -170,6 +188,8 @@ archive: # ---- FGOALS-f3-L: + name: "FGOALS-f3-L" + institution: "Chinese Academy of Sciences, Beijing 100029, China" src: hcst: "exp/CMIP6/dcppA-hindcast/FGOALS-f3-L/DCPP/CAS/FGOALS-f3-L/dcppA-hindcast/" fcst: "exp/CMIP6/dcppB-forecast/FGOALS-f3-L/DCPP/CAS/FGOALS-f3-L/dcppB-forecast/" @@ -189,6 +209,8 @@ archive: # ---- IPSL-CM6A-LR: + name: "IPSL-CM6A-LR" + institution: "IPSL" src: hcst: "exp/CMIP6/dcppA-hindcast/IPSL-CM6A-LR/DCPP/IPSL/IPSL-CM6A-LR/dcppA-hindcast/" fcst: @@ -206,6 +228,8 @@ archive: # ---- MIROC6: + name: + institution: src: hcst: "exp/CMIP6/dcppA-hindcast/MIROC6/DCPP/MIROC/MIROC6/dcppA-hindcast/" fcst: @@ -223,6 +247,8 @@ archive: # ---- MPI-ESM1.2-HR: + name: "MPI-ESM1.2-HR" + institution: "MIROC" src: hcst: "exp/CMIP6/dcppA-hindcast/MPI-ESM1-2-HR/DCPP/MPI-M/MPI-ESM1-2-HR/dcppA-hindcast/" fcst: @@ -240,6 +266,8 @@ archive: # ---- MPI-ESM1.2-LR: + name: "MPI-ESM1.2-LR" + institution: "Max-Planck-Institute for Meteorology" src: hcst: "exp/CMIP6/dcppA-hindcast/MPI-ESM1-2-LR/DCPP/MPI-M/MPI-ESM1-2-LR/dcppA-hindcast/" fcst: @@ -257,6 +285,8 @@ archive: # ---- MRI-ESM2-0: + name: "MRI-ESM2-0" + institution: "Meteorological Research Institute, Japan" src: hcst: "exp/CMIP6/dcppA-hindcast/MRI-ESM2-0/DCPP/MRI/MRI-ESM2-0/dcppA-hindcast/" fcst: @@ -275,6 +305,8 @@ archive: # ---- #NOTE: NorCPM1-i1 and i2 are under the same directory NorCPM1-i1: + name: "NorCPM1-i1" + institution: "NCC" src: hcst: "exp/CMIP6/dcppA-hindcast/NorCPM1/DCPP/NCC/NorCPM1/dcppA-hindcast/" fcst: @@ -284,14 +316,16 @@ archive: version: {"tas":"v20200320", "pr":"v20200320", "psl":"v20200320"} daily_mean: grid: {"pr":"gn", "tas":"gn", "tasmax":"gn", "tasmin":"gn"} - version: {"pr":"v20191005", "tas":"v20200320", "tasmax":"v20191005", "tasmin":"v20191005"} + version: {"pr":"v20191005", "tas":"v20191029", "tasmax":"v20191005", "tasmin":"v20191005"} calendar: "noleap" member: r1i1p1f1,r2i1p1f1,r3i1p1f1,r4i1p1f1,r5i1p1f1,r6i1p1f1,r7i1p1f1,r8i1p1f1,r9i1p1f1,r10i1p1f1 initial_month: 10 - reference_grid: "/esarchive/exp/CMIP6/dcppA-hindcast/NorCPM1/DCPP/NCC/NorCPM1/dcppA-hindcast/r1i1p1f1/Amon/tas/gn/v20200320/tas_Amon_NorCPM1_dcppA-hindcast_s2008-r1i1p1f1_gn_200810-201812.nc" + reference_grid: "/esarchive/exp/CMIP6/dcppA-hindcast/NorCPM1/DCPP/NCC/NorCPM1/dcppA-hindcast/r1i1p1f1/Amon/tas/gn/v20191029/tas_Amon_NorCPM1_dcppA-hindcast_s2008-r1i1p1f1_gn_200810-201812.nc" # ---- NorCPM1-i2: + name: "NorCPM1-i2" + institution: "NCC" src: hcst: "exp/CMIP6/dcppA-hindcast/NorCPM1/DCPP/NCC/NorCPM1/dcppA-hindcast/" fcst: @@ -312,6 +346,8 @@ archive: Reference: GHCNv4: + name: + institution: src: "obs/noaa/ghcn_v4/" monthly_mean: {"tas":"", "tasanomaly":""} daily_mean: @@ -319,6 +355,8 @@ archive: reference_grid: "/esarchive/obs/noaa/ghcn_v4/monthly_mean/tasanomaly/tasanomaly_201811.nc" # ---- ERA5: + name: "ERA5" + institution: "European Centre for Medium-Range Weather Forecasts" src: "recon/ecmwf/era5/" monthly_mean: {"tas":"_f1h-r1440x721cds", "prlr":"_f1h-r1440x721cds", "psl":"_f1h-r1440x721cds", "tos":"_f1h-r1440x721cds"} daily_mean: {"tas":"_f1h-r1440x721cds/", "rsds":"_f1h-r1440x721cds/", @@ -333,6 +371,8 @@ archive: # ---- JRA-55: + name: "JRA-55" + institution: "European Centre for Medium-Range Weather Forecasts" src: "recon/jma/jra55/" monthly_mean: {"tas":"_f6h", "psl":"_f6h", "tos":"", "pr":"_s0-3h", "prlr":"_s0-3h"} daily_mean: {"tas":"_f6h", "psl":"_f6h", "prlr":"_s0-3h", "sfcWind":"_f6h"} @@ -341,14 +381,18 @@ archive: # ---- GISTEMPv4: + name: "GISTEMPv4" + institution: "NASA Goddard Institute for Space Studies" src: "obs/noaa-nasa/ghcnersstgiss/" monthly_mean: {"tasanomaly":""} daily_mean: calendar: "standard" - reference_grid: "/esarchive/obs/noaa-nasa/ghcnersstgiss/monthly_mean/tasanomaly_200811.nc" + reference_grid: "/esarchive/obs/noaa-nasa/ghcnersstgiss/monthly_mean/tasanomaly/tasanomaly_200811.nc" # ---- HadCRUT4: + name: "HadCRUT4" + institution: "Met Office Hadley Centre / Climatic Research Unit, University of East Anglia" src: "obs/ukmo/hadcrut_v4.6/" monthly_mean: {"tasanomaly":""} daily_mean: @@ -357,9 +401,11 @@ archive: # ---- HadSLP2: + name: "HadSLP2" + institution: src: "obs/ukmo/hadslp_v2/" monthly_mean: {"psl":""} daily_mean: calendar: "proleptic_gregorian" - reference_grid: "/esarchive/obs/ukmo/hadslp_v2/monthly_mean/psl_200811.nc" + reference_grid: "/esarchive/obs/ukmo/hadslp_v2/monthly_mean/psl/psl_200811.nc" diff --git a/conf/variable-dictionary.yml b/conf/variable-dictionary.yml index f535c4e1295b4527c7634ada2118138f1bacedb1..5125215418cc5294fed85349b09b2392ee16ffae 100644 --- a/conf/variable-dictionary.yml +++ b/conf/variable-dictionary.yml @@ -1,41 +1,166 @@ vars: +## NOTE: The units field in this file corresponds to CMOR standards. +## Some variables in esarchive may have different units than stated here. +## Use with caution. # ECVs tas: units: "K" long_name: "Near-Surface Air Temperature" standard_name: "air_temperature" + accum: no + tos: + units: "degC" + long_name: "Sea Surface Temperature" + standard_name: "sea_surface_temperature" + accum: no # outname: "t2" tasmax: units: "K" long_name: "Maximum Near-Surface Air Temperature" standard_name: "air_temperature" + accum: no tasmin: units: "K" long_name: "Minimum Near-Surface Air Temperature" standard_name: "air_temperature" + accum: no + ts: + units: "K" + long_name: "Surface Temperature" + standard_name: "surface_temperature" + accum: no sfcWind: units: "m s-1" long_name: "Near-Surface Wind Speed" standard_name: "wind_speed" + accum: no + sfcWindmax: + units: "m s-1" + long_name: "Daily Maximum Near-Surface Wind Speed" + standard_name: "wind_speed" + accum: no # outname: "wind" rsds: units: "W m-2" long_name: "Surface Downwelling Shortwave Radiation" standard_name: "surface_downwelling_shortwave_flux_in_air" positive: "down" + accum: yes # outname: "rswin" prlr: - units: "mm" + units: "mm/day" long_name: "Total precipitation" standard_name: "total_precipitation_flux" #? Not in CF + accum: yes # outname: "acprec" g500: units: "m2 s-2" long_name: "Geopotential" standard_name: "geopotential" - + accum: no + pr: + units: "kg m-2 s-1" + long_name: "Precipitation" + standard_name: "precipitation_flux" + accum: yes + prc: + units: "kg m-2 s-1" + long_name: "Convective Precipitation" + standard_name: "convective_precipitation_flux" + accum: yes + psl: + units: "Pa" + long_name: "Sea Level Pressure" + standard_name: "air_pressure_at_mean_sea_level" + accum: no + clt: + units: "%" + long_name: "Total Cloud Cover Percentage" + standard_name: "cloud_area_fraction" + accum: no + hurs: + units: "%" + long_name: "Near-Surface Relative Humidity" + standard_name: "relative_humidity" + accum: no + hursmin: + units: "%" + long_name: "Daily Minimum Near-Surface Relative Humidity" + standard_name: "relative_humidity" + accum: no + hursmax: + units: "%" + long_name: "Daily Maximum Near-Surface Relative Humidity" + standard_name: "relative_humidity" + accum: no + hfls: + units: "W m-2" + long_name: "Surface Upward Latent Heat Flux" + standard_name: "surface_upward_latent_heat_flux" + accum: no + huss: + units: "1" + long_name: "Near-Surface Specific Humidity" + standard_name: "specific_humidity" + accum: no + rsut: + units: "W m-2" + long_name: "TOA Outgoing Shortwave Radiation" + standard_name: "toa_outgoing_shortwave_flux" + accum: no + rlut: + units: "W m-2" + long_name: "TOA Outgoing Longwave Radiation" + standard_name: "toa_outgoing_longwave_flux" + accum: no + rsdt: + units: "W m-2" + long_name: "TOA Incident Shortwave Radiation" + standard_name: "toa_incoming_shortwave_flux" + accum: no + ta: + units: "K" + long_name: "Air Temperature" + standard_name: "air_temperature" + accum: no + ua: + units: "m s-1" + long_name: "Eastward Wind" + standard_name: "eastward_wind" + accum: no + uas: + units: "m s-1" + long_name: "Eastward Near-Surface Wind" + standard_name: "eastward_wind" + accum: no + va: + units: "m s-1" + long_name: "Northward Wind" + standard_name: "northward_wind" + accum: no + vas: + units: "m s-1" + long_name: "Northward Near-Surface Wind" + standard_name: "northward wind" + accum: no + zg: + units: "m" + long_name: "Geopotential Height" + standard_name: "geopotential_height" + accum: no + evspsbl: + units: "kg m-2 s-1" + long_name: "Evaporation Including Sublimation and Transpiration" + standard_name: "water_evapotranspiration_flux" + accum: no + hfss: + units: "W m-2" + long_name: "Surface Upward Sensible Heat Flux" + standard_name: "surface_upward_sensible_heat_flux" + accum: no + # Coordinates coords: longitude: @@ -48,6 +173,7 @@ coords: standard_name: "latitude" long_name: "Latitude" axis: "Y" +## TODO: Add plevels # Skill metrics metrics: diff --git a/modules/Loading/Loading.R b/modules/Loading/Loading.R index ee4381a03e05dc0f92d6e8cc31b3024cc541b7b9..a93be8cad6c4406664e94ce8c13246b96e14eeef 100644 --- a/modules/Loading/Loading.R +++ b/modules/Loading/Loading.R @@ -84,16 +84,19 @@ load_datasets <- function(recipe_file) { # ----------- obs.path <- paste0(archive$src, - obs.dir, store.freq, "/$var$", - reference_descrip[[store.freq]][[variable]],"$var$_$file_date$.nc") + obs.dir, store.freq, "/$var$", + reference_descrip[[store.freq]][[variable]], + "$var$_$file_date$.nc") hcst.path <- paste0(archive$src, - hcst.dir, store.freq, "/$var$", - exp_descrip[[store.freq]][[variable]], "$var$_$file_date$.nc") + hcst.dir, store.freq, "/$var$", + exp_descrip[[store.freq]][[variable]], + "$var$_$file_date$.nc") fcst.path <- paste0(archive$src, - hcst.dir, store.freq, "/$var$", - exp_descrip[[store.freq]][[variable]], "$var$_$file_date$.nc") + hcst.dir, store.freq, "/$var$", + exp_descrip[[store.freq]][[variable]], + "$var$_$file_date$.nc") # Define regrid parameters: #------------------------------------------------------------------- @@ -258,28 +261,28 @@ load_datasets <- function(recipe_file) { dim(dates) <- dim(dates_file) obs <- Start(dat = obs.path, - var = variable, - file_date = sort(unique(dates_file)), - time = dates, - time_var = 'time', - time_across = 'file_date', - merge_across_dims = TRUE, - merge_across_dims_narm = TRUE, - latitude = values(list(lats.min, lats.max)), - latitude_reorder = Sort(), - longitude = values(list(lons.min, lons.max)), - longitude_reorder = circularsort, - transform = regrid_params$obs.transform, - transform_params = list(grid = regrid_params$obs.gridtype, - method = regrid_params$obs.gridmethod), - transform_vars = c('latitude', 'longitude'), - synonims = list(latitude = c('lat','latitude'), - longitude = c('lon','longitude')), - return_vars = list(latitude = 'dat', - longitude = 'dat', - time = 'file_date'), - split_multiselected_dims = TRUE, - retrieve = TRUE) + var = variable, + file_date = sort(unique(dates_file)), + time = dates, + time_var = 'time', + time_across = 'file_date', + merge_across_dims = TRUE, + merge_across_dims_narm = TRUE, + latitude = values(list(lats.min, lats.max)), + latitude_reorder = Sort(), + longitude = values(list(lons.min, lons.max)), + longitude_reorder = circularsort, + transform = regrid_params$obs.transform, + transform_params = list(grid = regrid_params$obs.gridtype, + method = regrid_params$obs.gridmethod), + transform_vars = c('latitude', 'longitude'), + synonims = list(latitude = c('lat','latitude'), + longitude = c('lon','longitude')), + return_vars = list(latitude = 'dat', + longitude = 'dat', + time = 'file_date'), + split_multiselected_dims = TRUE, + retrieve = TRUE) } # Adds ensemble dim to obs (for consistency with hcst/fcst) @@ -302,7 +305,36 @@ load_datasets <- function(recipe_file) { } } - + # Remove negative values in accumulative variables + dictionary <- read_yaml("conf/variable-dictionary.yml") + if (dictionary$vars[[variable]]$accum) { + info(logger, "Accumulated variable: setting negative values to zero.") + obs$data[obs$data < 0] <- 0 + hcst$data[hcst$data < 0] <- 0 + if (!is.null(fcst)) { + fcst$data[fcst$data < 0] <- 0 + } + } + + # Convert prlr from m/s to mm/day + ## TODO: Make a unit conversion function? + if (variable == "prlr") { + # Verify that the units are m/s and the same in obs and hcst + if ((attr(obs$Variable, "variable")$units != + attr(hcst$Variable, "variable")$units) && + (attr(obs$Variable, "variable")$units == "m s-1")) { + + info(logger, "Converting precipitation from m/s to mm/day.") + obs$data <- obs$data*84000*1000 + attr(obs$Variable, "variable")$units <- "mm/day" + hcst$data <- hcst$data*84000*1000 + attr(hcst$Variable, "variable")$units <- "mm/day" + if (!is.null(fcst)) { + fcst$data <- fcst$data*84000*1000 + attr(fcst$Variable, "variable")$units <- "mm/day" + } + } + } # Print a summary of the loaded data for the user, for each object data_summary(hcst, store.freq) @@ -361,16 +393,6 @@ load_datasets <- function(recipe_file) { ############################################################################ ############################################################################ - ## TODO: we need to define accumulated vars - #filters negative values in accum vars - #if (accum){ - # obs$data[obs$data < 0 ] <- 0 - # hcst$data[hcst$data < 0 ] <- 0 - # if (!is.null(fcst)){ - # fcst$data[fcst$data < 0 ] <- 0 - # } - #} - return(list(hcst = hcst, fcst = fcst, obs = obs)) } diff --git a/modules/Loading/Loading_decadal.R b/modules/Loading/Loading_decadal.R index 7f99ac68b130a9fa2cd1073a645feac4872531ab..9c4bb33dbf9805c3719c7003997fa6e9b1a70f7a 100644 --- a/modules/Loading/Loading_decadal.R +++ b/modules/Loading/Loading_decadal.R @@ -23,12 +23,21 @@ source("tools/tmp/as.s2dv_cube.R") load_datasets <- function(recipe_file) { recipe <- read_yaml(recipe_file) - recipe$filename <- recipe_file + recipe$filepath <- recipe_file + recipe$name <- tools::file_path_sans_ext(basename(recipe_file)) + archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$archive # Print Start() info or not DEBUG <- FALSE + ## TODO: this should come from the main script + # Create output folder and log: + logger <- prepare_outputs(recipe = recipe) + folder <- logger$foldername + log_file <- logger$logname + logger <- logger$logger + #------------------------- # Read from recipe: #------------------------- @@ -103,7 +112,6 @@ load_datasets <- function(recipe_file) { Start_default_arg_list <- list( dat = path_list, #file.path(hcst.path, hcst.files), var = variable, - ensemble = member, syear = paste0(sdates_hcst), chunk = 'all', chunk_depends = 'syear', @@ -115,6 +123,7 @@ load_datasets <- function(recipe_file) { latitude_reorder = Sort(decreasing = TRUE), longitude = values(list(lons.min, lons.max)), longitude_reorder = circularsort, + ensemble = member, transform = regrid_params$fcst.transform, transform_extra_cells = 2, transform_params = list(grid = regrid_params$fcst.gridtype, @@ -144,7 +153,7 @@ load_datasets <- function(recipe_file) { # Reshape and reorder dimensions ## dat should be 1, syear should be length of dat; reorder dimensions dim(hcst) <- c(dat = 1, syear = as.numeric(dim(hcst))[1], dim(hcst)[2:6]) - hcst <- s2dv::Reorder(hcst, c('dat', 'var', 'ensemble', 'syear', 'time', 'latitude', 'longitude')) + hcst <- s2dv::Reorder(hcst, c('dat', 'var', 'syear', 'time', 'latitude', 'longitude', 'ensemble')) # Manipulate time attr because Start() cannot read it correctly wrong_time_attr <- attr(hcst, 'Variables')$common$time # dim: [time], the first syear only @@ -161,11 +170,12 @@ load_datasets <- function(recipe_file) { tmp_time_attr <- attr(hcst, 'Variables')$common$time # change syear to c(sday, sweek, syear) - dim(hcst) <- c(dim(hcst)[1:3], sday = 1, sweek = 1, dim(hcst)[4:7]) - if (!identical(dim(tmp_time_attr), dim(hcst)[6:7])) { + # dim(hcst) should be [dat, var, sday, sweek, syear, time, latitude, longitude, ensemble] + dim(hcst) <- c(dim(hcst)[1:2], sday = 1, sweek = 1, dim(hcst)[3:7]) + if (!identical(dim(tmp_time_attr), dim(hcst)[c('syear', 'time')])) { stop("hcst has problem in matching data and time attr dimension.") } - dim(attr(hcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(hcst)[6:7]) + dim(attr(hcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(tmp_time_attr)) #TODO: as.s2dv_cube() needs to be improved to recognize "variable" is under $dat1 if (multi_path) { @@ -213,8 +223,9 @@ load_datasets <- function(recipe_file) { # Reshape and reorder dimensions ## dat should be 1, syear should be length of dat; reorder dimensions + ## dim(fcst) should be [dat, var, syear, time, latitude, longitude, ensemble] dim(fcst) <- c(dat = 1, syear = as.numeric(dim(fcst))[1], dim(fcst)[2:6]) - fcst <- s2dv::Reorder(fcst, c('dat', 'var', 'ensemble', 'syear', 'time', 'latitude', 'longitude')) + fcst <- s2dv::Reorder(fcst, c('dat', 'var', 'syear', 'time', 'latitude', 'longitude', 'ensemble')) # Manipulate time attr because Start() cannot read it correctly wrong_time_attr <- attr(fcst, 'Variables')$common$time # dim: [time], the first syear only @@ -231,11 +242,12 @@ load_datasets <- function(recipe_file) { tmp_time_attr <- attr(fcst, 'Variables')$common$time # change syear to c(sday, sweek, syear) - dim(fcst) <- c(dim(fcst)[1:3], sday = 1, sweek = 1, dim(fcst)[4:7]) - if (!identical(dim(tmp_time_attr), dim(fcst)[6:7])) { + # dim(fcst) should be [dat, var, sday, sweek, syear, time, latitude, longitude, ensemble] + dim(fcst) <- c(dim(fcst)[1:2], sday = 1, sweek = 1, dim(fcst)[3:7]) + if (!identical(dim(tmp_time_attr), dim(fcst)[c('syear', 'time')])) { stop("fcst has problem in matching data and time attr dimension.") } - dim(attr(fcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(fcst)[6:7]) + dim(attr(fcst, 'Variables')$common$time) <- c(sday = 1, sweek = 1, dim(tmp_time_attr)) #TODO: as.s2dv_cube() needs to be improved to recognize "variable" is under $dat1 if (multi_path) { @@ -247,7 +259,8 @@ load_datasets <- function(recipe_file) { fcst <- as.s2dv_cube(fcst) ) - if (!identical(dim(hcst$data)[-6], dim(fcst$data)[-6])) { + # Only syear could be different + if (!identical(dim(hcst$data)[-5], dim(fcst$data)[-5])) { stop("hcst and fcst do not share the same dimension structure.") } @@ -334,20 +347,12 @@ load_datasets <- function(recipe_file) { # sday sweek syear time # 1 1 2 14 - -# # TODO: Reorder obs dims to match hcst dims? -# # Adds ensemble dim to obs (for consistency with hcst/fcst) -# default_dims <- c(dat = 1, var = 1, sweek = 1, -# sday = 1, syear = 1, time = 1, -# latitude = 1, longitude = 1, ensemble = 1) -# default_dims[names(dim(obs))] <- dim(obs) -# dim(obs) <- default_dims - - if (!identical(dim(obs), dim(hcst$data)[-3])) { + # Only ensemble dim could be different + if (!identical(dim(obs), dim(hcst$data)[-9])) { stop("obs and hcst dimensions do not match.") } # Add ensemble dim to obs - dim(obs) <- c(dim(obs)[1:2], ensemble = 1, dim(obs)[3:8]) + dim(obs) <- c(dim(obs), ensemble = 1) # Change class from startR_array to s2dv_cube suppressWarnings( @@ -356,19 +361,7 @@ load_datasets <- function(recipe_file) { #------------------------------------------- -# Step 4. Print the summary of data -#------------------------------------------- - - # Print a summary of the loaded data for the user, for each object - data_summary(hcst, store.freq) - data_summary(obs, store.freq) - if (!is.null(fcst)) { - data_summary(fcst, store.freq) - } - - -#------------------------------------------- -# Step 5. Verify the consistance btwn hcst and obs +# Step 4. Verify the consistance between data #------------------------------------------- # dimension if (any(!names(dim(obs$data)) %in% names(dim(hcst$data)))) { @@ -413,5 +406,53 @@ load_datasets <- function(recipe_file) { stop("hcst and fcst don't share the same longitude.") } + +#------------------------------------------- +# Step 5. Tune data +#------------------------------------------- + # Remove negative values in accumulative variables + dictionary <- read_yaml("conf/variable-dictionary.yml") + if (dictionary$vars[[variable]]$accum) { + info(logger, " Accumulated variable: setting negative values to zero.") + obs$data[obs$data < 0] <- 0 + hcst$data[hcst$data < 0] <- 0 + if (!is.null(fcst)) { + fcst$data[fcst$data < 0] <- 0 + } + } + # Convert precipitation to mm/day + ## TODO: Make a function? + if (variable == "prlr") { + # Verify that the units are m/s and the same in obs and hcst + if ((attr(obs$Variable, "variable")$units != + attr(hcst$Variable, "variable")$units) && + (attr(obs$Variable, "variable")$units == "m s-1")) { + + info(logger, "Converting precipitation from m/s to mm/day.") + obs$data <- obs$data*84000*1000 + attr(obs$Variable, "variable")$units <- "mm/day" + hcst$data <- hcst$data*84000*1000 + attr(hcst$Variable, "variable")$units <- "mm/day" + if (!is.null(fcst)) { + fcst$data <- fcst$data*84000*1000 + attr(fcst$Variable, "variable")$units <- "mm/day" + } + } + } + +#------------------------------------------- +# Step 6. Print summary +#------------------------------------------- + + # Print a summary of the loaded data for the user, for each object + data_summary(hcst, store.freq) + data_summary(obs, store.freq) + if (!is.null(fcst)) { + data_summary(fcst, store.freq) + } + + print("##### DATA LOADING COMPLETED SUCCESSFULLY #####") + + return(list(hcst = hcst, fcst = fcst, obs = obs)) } diff --git a/modules/Loading/testing_recipes/recipe_5.yml b/modules/Loading/testing_recipes/recipe_system2c3s-prlr-nofcst.yml similarity index 100% rename from modules/Loading/testing_recipes/recipe_5.yml rename to modules/Loading/testing_recipes/recipe_system2c3s-prlr-nofcst.yml diff --git a/modules/Loading/testing_recipes/recipe_system5c3s-rsds.yml b/modules/Loading/testing_recipes/recipe_system5c3s-rsds.yml new file mode 100644 index 0000000000000000000000000000000000000000..ca52e8cc161ece1eb0a23e1be391df1b2318bf5f --- /dev/null +++ b/modules/Loading/testing_recipes/recipe_system5c3s-rsds.yml @@ -0,0 +1,46 @@ +Description: + Author: V. Agudetse + +Analysis: + Horizon: Seasonal + Variables: + name: rsds + freq: monthly_mean + Datasets: + System: + name: system5c3s + Multimodel: False + Reference: + name: era5 + Time: + sdate: '1101' + fcst_year: '2020' + hcst_start: '1993' + hcst_end: '2016' + ftime_min: 1 + ftime_max: 3 + Region: + latmin: -10 + latmax: 10 + lonmin: 0 + lonmax: 20 + Regrid: + method: bilinear + type: to_system + Workflow: + Calibration: + method: mse_min + Skill: + metric: RPS RPSS CRPS CRPSS FRPSS BSS10 BSS90 EnsCorr Corr + Probabilities: + percentiles: [[1/3, 2/3], [1/10, 9/10], [1/4, 2/4, 3/4]] + Indicators: + index: no + ncores: 1 + remove_NAs: yes + Output_format: S2S4E +Run: + Loglevel: INFO + Terminal: yes + output_dir: /esarchive/scratch/vagudets/repos/auto-s2s/out-logs/ + code_dir: /esarchive/scratch/vagudets/repos/auto-s2s/ diff --git a/modules/Loading/testing_recipes/recipe_2.yml b/modules/Loading/testing_recipes/recipe_system5c3s-tas.yml similarity index 100% rename from modules/Loading/testing_recipes/recipe_2.yml rename to modules/Loading/testing_recipes/recipe_system5c3s-tas.yml diff --git a/modules/Loading/testing_recipes/recipe_system7c3s-prlr.yml b/modules/Loading/testing_recipes/recipe_system7c3s-prlr.yml new file mode 100644 index 0000000000000000000000000000000000000000..197c109c0e5dbb7f2419131cc2f4a328995dc34f --- /dev/null +++ b/modules/Loading/testing_recipes/recipe_system7c3s-prlr.yml @@ -0,0 +1,46 @@ +Description: + Author: V. Agudetse + +Analysis: + Horizon: Seasonal + Variables: + name: prlr + freq: monthly_mean + Datasets: + System: + name: system7c3s + Multimodel: False + Reference: + name: era5 + Time: + sdate: '1101' + fcst_year: '2020' + hcst_start: '1993' + hcst_end: '2016' + ftime_min: 1 + ftime_max: 3 + Region: + latmin: -10 + latmax: 10 + lonmin: 0 + lonmax: 20 + Regrid: + method: bilinear + type: to_system + Workflow: + Calibration: + method: mse_min + Skill: + metric: RPS RPSS CRPS CRPSS FRPSS BSS10 BSS90 EnsCorr Corr + Probabilities: + percentiles: [[1/3, 2/3], [1/10, 9/10], [1/4, 2/4, 3/4]] + Indicators: + index: no + ncores: 1 + remove_NAs: no + Output_format: S2S4E +Run: + Loglevel: INFO + Terminal: yes + output_dir: /esarchive/scratch/vagudets/repos/auto-s2s/out-logs/ + code_dir: /esarchive/scratch/vagudets/repos/auto-s2s/ diff --git a/modules/Loading/testing_recipes/recipe_6.yml b/modules/Loading/testing_recipes/recipe_system7c3s-tas-specs.yml similarity index 100% rename from modules/Loading/testing_recipes/recipe_6.yml rename to modules/Loading/testing_recipes/recipe_system7c3s-tas-specs.yml diff --git a/modules/Loading/testing_recipes/recipe_4.yml b/modules/Loading/testing_recipes/recipe_system7c3s-tas.yml similarity index 98% rename from modules/Loading/testing_recipes/recipe_4.yml rename to modules/Loading/testing_recipes/recipe_system7c3s-tas.yml index 4e7896f526108a94abdda8672f000155d7522ccb..f83f91bb44f645ddbc84f6541f57fb82a09caa7b 100644 --- a/modules/Loading/testing_recipes/recipe_4.yml +++ b/modules/Loading/testing_recipes/recipe_system7c3s-tas.yml @@ -18,7 +18,7 @@ Analysis: hcst_start: '1993' hcst_end: '2016' ftime_min: 1 - ftime_max: 3 + ftime_max: 6 Region: latmin: -10 latmax: 10 diff --git a/modules/Loading/testing_recipes/recipe_1.yml b/modules/Loading/testing_recipes/recipe_tas-daily-regrid-to-reference.yml similarity index 100% rename from modules/Loading/testing_recipes/recipe_1.yml rename to modules/Loading/testing_recipes/recipe_tas-daily-regrid-to-reference.yml diff --git a/modules/Loading/testing_recipes/recipe_3.yml b/modules/Loading/testing_recipes/recipe_tas-daily-regrid-to-system.yml similarity index 100% rename from modules/Loading/testing_recipes/recipe_3.yml rename to modules/Loading/testing_recipes/recipe_tas-daily-regrid-to-system.yml diff --git a/modules/Saving/Saving.R b/modules/Saving/Saving.R index f50a06ff760742f199ccf3582dd21f688b4081ee..713741fb6435165f42d7d9a5b0dc3686c5edd857 100644 --- a/modules/Saving/Saving.R +++ b/modules/Saving/Saving.R @@ -76,18 +76,21 @@ save_data <- function(recipe, archive, data, } } -get_global_attributes <- function(recipe) { +get_global_attributes <- function(recipe, archive) { # Generates metadata of interest to add to the global attributes of the # netCDF files. parameters <- recipe$Analysis hcst_period <- paste0(parameters$Time$hcst_start, " to ", parameters$Time$hcst_end) current_time <- paste0(as.character(Sys.time()), " ", Sys.timezone()) + system_name <- parameters$Datasets$System$name + reference_name <- parameters$Datasets$Reference$name attrs <- list(reference_period = hcst_period, - institution = "BSC-CNS", - system = parameters$Datasets$System$name, - reference = parameters$Datasets$Reference$name, + institution_system = archive$System[[system_name]]$institution, + institution_reference = archive$Reference[[reference_name]]$institution, + system = system_name, + reference = reference_name, calibration_method = parameters$Workflow$Calibration$method, computed_on = current_time) @@ -162,7 +165,7 @@ save_forecast <- function(data_cube, variable <- data_cube$Variable$varName var.longname <- attr(data_cube$Variable, 'variable')$long_name - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) fcst.horizon <- tolower(recipe$Analysis$Horizon) store.freq <- recipe$Analysis$Variables$freq calendar <- archive$System[[global_attributes$system]]$calendar @@ -300,7 +303,7 @@ save_observations <- function(data_cube, variable <- data_cube$Variable$varName var.longname <- attr(data_cube$Variable, 'variable')$long_name - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) fcst.horizon <- tolower(recipe$Analysis$Horizon) store.freq <- recipe$Analysis$Variables$freq calendar <- archive$Reference[[global_attributes$reference]]$calendar @@ -445,7 +448,7 @@ save_metrics <- function(skill, } # Add global and variable attributes - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) attr(skill[[1]], 'global_attrs') <- global_attributes for (i in 1:length(skill)) { @@ -553,7 +556,7 @@ save_corr <- function(skill, } # Add global and variable attributes - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) attr(skill[[1]], 'global_attrs') <- global_attributes for (i in 1:length(skill)) { @@ -659,7 +662,7 @@ save_percentiles <- function(percentiles, } # Add global and variable attributes - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) attr(percentiles[[1]], 'global_attrs') <- global_attributes for (i in 1:length(percentiles)) { @@ -759,7 +762,7 @@ save_probabilities <- function(probs, variable <- data_cube$Variable$varName var.longname <- attr(data_cube$Variable, 'variable')$long_name - global_attributes <- get_global_attributes(recipe) + global_attributes <- get_global_attributes(recipe, archive) fcst.horizon <- tolower(recipe$Analysis$Horizon) store.freq <- recipe$Analysis$Variables$freq calendar <- archive$System[[global_attributes$system]]$calendar diff --git a/modules/Saving/export_2_nc-s2s4e.R b/modules/Saving/export_2_nc-s2s4e.R deleted file mode 100644 index abf526e5f105021b41ece65ff5b52be09b8e4f51..0000000000000000000000000000000000000000 --- a/modules/Saving/export_2_nc-s2s4e.R +++ /dev/null @@ -1,583 +0,0 @@ -library(easyNCDF) - -save_bias <- function(variable, - data, - fcst.sdate, - outfile, - leadtimes, - grid, - agg, - fcst.type) { - - lalo <- c('longitude', 'latitude') #decathlon subseasonal - - ## TODO: Sort out different aggregation cases - # if (tolower(agg) == "global") { - # data <- Reorder(data, c(lalo,'time')) - # } else { - # data <- Reorder(data, c('country', 'time')) - # } - - if (variable %in% c("tas", "tasmin", "tasmax")) { - obs <- data; units <- "ºC"; - var.longname <- "Temperature bias" - } else { - # Unit conversion - data.conv <- convert_data(list(fcst=data,test=data),variable,leadtimes,fcst.type,"forecast") - obs <- data.conv$data$fcst; units <- data.conv$units; - var.longname <- data.conv$var.longname - remove(data.conv) - } - - if (tolower(agg) == "country"){ - dims <- c('Country', 'time') - var.expname <- paste0(variable, '_country') - var.sdname <- paste0("Country-Aggregated ", var.longname) - } else { - dims <- c(lalo,'time') - var.expname <- get_outname(variable,VARS_DICT) - var.sdname <- var.longname - } - - metadata <- list(obs = list(name = var.expname, - standard_name = var.sdname, - units = units)) - attr(obs, 'variables') <- metadata - names(dim(obs)) <- dims - - times <- get_times(fcst.type, leadtimes, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(list(country, time, time_step, obs), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - ArrayToNc(list(latlon$lat, latlon$lon, time, obs, time_step), outfile) - - } -} - -save_obs_country_file <- - function(variable, - obs, - fcst.sdate, - outfile, - fcst.type, - monthnames) { - - if (fcst.type == 'seasonal'){ - mask.path <- 'masks/mask_europe_system5_2.Rdata' - } else { - mask.path <- 'masks/mask_europe_S2S_ecmwf.Rdata' - } - - load(mask.path) - - ifelse(exists("lonlat_dctln"), - lalo <- c('longitude','latitude'), #decathlon subseasonal - lalo <- c('latitude','longitude')) #no decathlon - - obs <- Reorder(obs, c(lalo,'time')) -# obs <- Reorder(obs, c('latitude','longitude','time')) - - obs.country <-Apply(data=list(pointdata=obs), -# target_dims=c('latitude','longitude'), - target_dims=lalo, - output_dims=c('country'), - mask.path=mask.path, - ncores=2, - split_factor=1, - fun = Country_mean)[[1]] - - vars <- yaml.load_file(VARS_DICT)$vars - units <- vars[[variable]]$units - var.longname <- vars[[variable]]$longname - - metadata <- list(obs.country = - list( - name = paste0(variable, "_country"), - standard_name = paste0(var.longname, " (Country-Aggregated)"), - units = units - ) - ) - - attr(obs.country, 'variables') <- metadata - names(dim(obs.country)) <- c('Country', 'time') - - times <- get_times(fcst.type, monthnames, fcst.sdate) - time <- times$time - time_step <- times$time_step - - country <- 1:length(europe.countries.iso) - dim(country) <- length(country) - metadata <- list( country = list( - standard_name = paste(europe.countries.iso, collapse=" "), - units = 'Country ISO 3166-1 alpha 3 Code')) #if region, these units are incorrect. - attr(country, 'variables') <- metadata - names(dim(country)) <- 'Country' - - - ArrayToNc(list(country,time,time_step,obs.country), - outfile) - - } - -save_obs <- function(variable, - data, - fcst.sdate, - outfile, - leadtimes, - grid, - agg, - fcst.type) { - - lalo <- c('longitude','latitude') #decathlon subseasonal - - if (tolower(agg) == "global"){ - data <- Reorder(data, c(lalo,'time')) - } else { - data <- Reorder(data, c('country', 'time')) - } - - data.conv <- convert_data(list(fcst=data,test=data),variable,leadtimes,fcst.type,"forecast") - obs <- data.conv$data$fcst; units <- data.conv$units; - var.longname <- data.conv$var.longname - remove(data.conv) - - if (tolower(agg) == "country"){ - dims <- c('Country', 'time') - var.expname <- paste0(variable, '_country') - var.sdname <- paste0("Country-Aggregated ", var.longname) - } else { - dims <- c(lalo,'time') - var.expname <- get_outname(variable,VARS_DICT) - var.sdname <- var.longname - } - - metadata <- list(obs = list(name = var.expname, - standard_name = var.sdname, - units = units)) - attr(obs, 'variables') <- metadata - names(dim(obs)) <- dims - - times <- get_times(fcst.type, leadtimes, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(list(country, time, time_step, obs), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - ArrayToNc(list(latlon$lat, latlon$lon, time, obs, time_step), outfile) - - } -} - - -save_forecast <- function(variable, - fcst, - fcst.sdate, - outfile, - leadtimes, - grid, - agg, - fcst.type) { - - lalo <- c('longitude','latitude') #decathlon subseasonal - - if (tolower(agg) == "global"){ - fcst <- Reorder(fcst, c(lalo,'member', - 'time')) - } else { - fcst <- Reorder(fcst, c('country','member', 'time')) - } - - # Unit conversion - fcst.conv <- convert_data(list(fcst=fcst,test=fcst),variable,leadtimes,fcst.type,"forecast") - fcst <- fcst.conv$data$fcst; units <- fcst.conv$units; - var.longname <- fcst.conv$var.longname - remove(fcst.conv) - - if (tolower(agg) == "country"){ - dims <- c('Country', 'ensemble', 'time') - var.expname <- paste0(variable, '_country') - var.sdname <- paste0("Country-Aggregated ", var.longname) - } else { - dims <- c(lalo,'ensemble', 'time') - var.expname <- get_outname(variable,VARS_DICT) - var.sdname <- var.longname - } - - metadata <- list(fcst = list(name = var.expname, - standard_name = var.sdname, - units = units)) - attr(fcst, 'variables') <- metadata - names(dim(fcst)) <- dims - - times <- get_times(fcst.type, leadtimes, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(list(country, time, time_step, fcst), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - ArrayToNc(list(latlon$lat, latlon$lon, time, fcst, time_step), outfile) - - } -} - -save_probs <- function(variable, - probs, - fcst.sdate, - outfile, - monthnames, - grid, - agg, - fcst.type) { - - lalo <- c('longitude','latitude') #decathlon subseasonal - - if (tolower(agg) == "global"){ - probs <- lapply(probs, function(x){ - Reorder(x, c('bin',lalo, 'time'))}) - } - - pbn <- Subset(probs$tercile, 'bin', list(1), drop='selected') - pn <- Subset(probs$tercile, 'bin', list(2), drop='selected') - pan <- Subset(probs$tercile, 'bin', list(3), drop='selected') - p10 <- Subset(probs$extreme, 'bin', list(1), drop='selected') - p90 <- Subset(probs$extreme, 'bin', list(3), drop='selected') - - pn.sdname <- paste('Probability below normal category ', sep=''); - pan.sdname <- paste('Probability above normal category ', sep=''); - pbn.sdname <- paste('Probability normal category ', sep=''); - p10.sdname <- paste('Probability below extreme category ', sep=''); - p90.sdname <- paste('Probability above extreme category ', sep=''); - - if (tolower(agg) == "country"){ - dims <- c('Country', 'time') - pn.sdanme <- paste0('Country-Aggregated ', pn.sdname) - pbn.sdanme <- paste0('Country-Aggregated ', pbn.sdname) - pan.sdanme <- paste0('Country-Aggregated ', pan.sdname) - p10.sdanme <- paste0('Country-Aggregated ', p10.sdname) - p90.sdanme <- paste0('Country-Aggregated ', p90.sdname) - } else { - dims <- c(lalo, 'time') - pn.sdanme <- paste0('Global ', pn.sdname) - pbn.sdanme <- paste0('Global ', pbn.sdname) - pan.sdanme <- paste0('Global ', pan.sdname) - p10.sdanme <- paste0('Global ', p10.sdname) - p90.sdanme <- paste0('Global ', p90.sdname) - } - - metadata <- list(pbn = list(name = 'prob_bn', - standard_name = pbn.sdname ), - pn = list(name = 'prob_n', - standard_name = pn.sdname), - pan = list(name = 'prob_an', - standard_name = pan.sdname), - p10 = list(name = 'prob_bp10', - standard_name = p10.sdname), - p90 = list(name = 'prob_ap90', - standard_name = p90.sdname)) - - attr(pbn, 'variables') <- metadata[1] - attr(pn, 'variables') <- metadata[2] - attr(pan, 'variables') <- metadata[3] - attr(p10, 'variables') <- metadata[4] - attr(p90, 'variables') <- metadata[5] - - names(dim(pbn)) <- dims - names(dim(pn)) <- dims - names(dim(pan)) <- dims - names(dim(p10)) <- dims - names(dim(p90)) <- dims - - times <- get_times(fcst.type, monthnames, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(list(country, time, pbn, pn, pan, p10, p90, time_step), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - latitude <- latlon$lat; longitude <- latlon$lon - ArrayToNc(list(latitude, longitude, time, pbn, pn, pan, p10, p90, - time_step), outfile) - } - - } - -get_times <- function (fcst.type, leadtimes, sdate){ - - switch(tolower(fcst.type), - "seasonal" = {len <- length(leadtimes); ref <- 'months since '; - stdname <- paste(strtoi(leadtimes), collapse=", ")}, - "sub_obs" = {len <- 52; ref <- 'week of the year '; - stdname <- paste(strtoi(leadtimes), collapse=", ")}, - "subseasonal" = {len <- 4; ref <- 'weeks since '; - stdname <- ''} - ) - - time <- 1:len - dim(time) <- length(time) - #metadata <- list(time = list(standard_name = stdname, - metadata <- list(time = list( - units = paste0(ref, sdate, ' 00:00:00'))) - attr(time, 'variables') <- metadata - names(dim(time)) <- 'time' - - time_step <- 1 - dim(time_step) <- length(time_step) - metadata <- list(time_step = list(units = paste0( - ref, sdate, ' 00:00:00'))) - attr(time_step, 'variables') <- metadata - names(dim(time_step)) <- 'time_step' - - sdate <- 1:length(sdate) - dim(sdate) <- length(sdate) - metadata <- list(sdate = list(standard_name = paste(strtoi(sdate), collapse=", "), - units = paste0('Init date'))) - attr(sdate, 'variables') <- metadata - names(dim(sdate)) <- 'sdate' - - return(list(time_step=time_step, time=time, sdate=sdate)) - -} - -get_countries <- function(europe.countries.iso){ - - country <- 1:length(europe.countries.iso) - dim(country) <- length(country) - metadata <- list( country = list( - standard_name = paste(europe.countries.iso, collapse=" "), - units = 'Country ISO 3166-1 alpha 3 Code')) - attr(country, 'variables') <- metadata - names(dim(country)) <- 'Country' - return(country) - -} - -get_latlon <- function(lat, lon){ - - longitude <- lon - dim(longitude) <- length(longitude) - metadata <- list(longitude = list(units = 'degrees_east')) - attr(longitude, 'variables') <- metadata - names(dim(longitude)) <- 'longitude' - - latitude <- lat - dim(latitude) <- length(latitude) - metadata <- list(latitude = list(units = 'degrees_north')) - attr(latitude, 'variables') <- metadata - names(dim(latitude)) <- 'latitude' - - return(list(lat=latitude,lon=longitude)) - -} - -save_metrics <- function(variable, - skill, - fcst.sdate, - grid, - outfile, - monthnames, - fcst.type, - agg) -{ - - lalo <- c('longitude', 'latitude') - - ## TODO: Sort out aggregation - if (tolower(agg) == "global") { - skill <- lapply(skill, function(x){ - Reorder(x[[1]], c(lalo, 'time'))}) - } - - for (i in 1:length(skill)) { - - metric <- names(skill[i]) - if (tolower(agg) == "country"){ - sdname <- paste0(names(metric), " region-aggregated metric") - dims <- c('Country', 'time') - } else { - sdname <- paste0(names(metric), " grid point metric") - dims <- c(lalo, 'time') - } - metadata <- list(name = metric, standard_name = sdname) - - attr(skill[i], 'variables') <- metadata - names(dim(skill[[i]])) <- dims - } - - times <- get_times(fcst.type, monthnames, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(append(country, time, skill, time_step), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - vars <- list(latlon$lat, latlon$lon, time) - vars <- c(vars, skill, list(time_step)) - ArrayToNc(vars, outfile) - } -} - -convert_seasosnal_prlr <- function(data2convert, leadtimes,filetype){ - - ind <- 1:length(leadtimes) - #dates <- paste0(leadtimes,"01") - # computes the last day of the month - lastday <- sapply(ind, function(x) - {as.integer(substr((seq(as.Date(leadtimes[x],"%Y%m%d"), - length=2,by="months")-1)[2], - 9, 10))}) - - if (filetype == "terciles"){ - ter <- sapply(ind, function(x) { - Subset(data2convert$tercile, along='time', - indices=x,drop='selected')*1000*3600*24*lastday[x]}, - simplify='array') - ext <- sapply(ind, function(x) { - Subset(data2convert$extreme, along='time', - indices=x,drop='selected')*1000*3600*24*lastday[x]}, - simplify='array') - - data2convert <- list(tercile=ter, extreme=ext) - } else { - - ens <- sapply(ind, function(x) { - Subset(data2convert$fcst, along='time', - indices=x,drop='selected')*1000*3600*24*lastday[x]}, - simplify='array') - - data2convert <- list(fcst=ens) - } - - return(data2convert) -} - -convert_data <- function(data,variable, leadtimes, fcst.type,filetype){ - - vars <- yaml.load_file(VARS_DICT)$vars - units <- vars[[variable]]$units - var.longname <- vars[[variable]]$longname - - if (variable %in% c("tas","tasmin","tasmax")){ - data <- lapply(data, function(x){ x - 273.15}) - } else if (variable %in% c("psl")){ - data <- lapply(data, function(x){ x/100}) - } else { - print("WARNING: NO DATA CONVERSION APPLIED") - } - - - return(list(data=data, units=units, var.longname=var.longname)) - -} - - -## TODO: implement lists as in save_metrics -save_terciles <- function(variable, - terciles, - fcst.sdate, - grid, - outfile, - leadtimes, - fcst.type, - agg) { - - lalo <- c('longitude','latitude') #decathlon subseasonal - - if (tolower(agg) == "global"){ - terciles <- lapply(terciles, function(x){ - Reorder(x, c('bin',lalo, 'time'))}) - } - - terciles.conv <- convert_data(terciles,variable,leadtimes,fcst.type,"terciles") - terciles <- terciles.conv$data; units <- terciles.conv$units; - var.longname <- terciles.conv$var.longname - remove(terciles.conv) - - p33 <- Subset(terciles$tercile, 'bin', list(1), drop='selected') - - p66 <- Subset(terciles$tercile, 'bin', list(2), drop='selected') - p10 <- Subset(terciles$extreme, 'bin', list(1), drop='selected') - p90 <- Subset(terciles$extreme, 'bin', list(2), drop='selected') - - p33.sdname <- paste('Lower Tercile ', sep=''); - p66.sdname <- paste('Upper Tercile ', sep=''); - p10.sdname <- paste('Lower extreme', sep=''); - p90.sdname <- paste('Upper extreme', sep=''); - - if (tolower(agg) == "country"){ - dims <- c('Country', 'time') - p33.sdanme <- paste0('Country-Aggregated ', p33.sdname) - p66.sdanme <- paste0('Country-Aggregated ', p66.sdname) - p10.sdanme <- paste0('Country-Aggregated ', p10.sdname) - p90.sdanme <- paste0('Country-Aggregated ', p90.sdname) - } else { - dims <- c(lalo, 'time') - p33.sdanme <- paste0('Global ', p33.sdname) - p66.sdanme <- paste0('Global ', p66.sdname) - p10.sdanme <- paste0('Gloabl ', p10.sdname) - p90.sdanme <- paste0('Global ', p90.sdname) - } - - metadata <- list(pbn = list(name = 'p33', - standard_name = p33.sdname ), - pn = list(name = 'p66', - standard_name = p66.sdname), - pan = list(name = 'p10', - standard_name = p10.sdname), - p10 = list(name = 'p90', - standard_name = p90.sdname)) - - attr(p33, 'variables') <- metadata[1] - attr(p66, 'variables') <- metadata[2] - attr(p10, 'variables') <- metadata[3] - attr(p90, 'variables') <- metadata[4] - - names(dim(p33)) <- dims - names(dim(p66)) <- dims - names(dim(p10)) <- dims - names(dim(p90)) <- dims - - times <- get_times(fcst.type, leadtimes, fcst.sdate) - time <- times$time - time_step <- times$time_step - - if (tolower(agg) == "country") { - - country <- get_countries(grid) - ArrayToNc(list(country, time, p33, p66, p10, p90, time_step), outfile) - - } else { - - latlon <- get_latlon(grid$lat, grid$lon) - ArrayToNc(list(latlon$lat, latlon$lon, time, p33, p66, p10, p90, time_step), outfile) - } -} diff --git a/modules/Skill/Skill.R b/modules/Skill/Skill.R index 5b6807b62338ea1fb55dc857d8f5ec02c6d9cc4e..fb5498e6c0c66f45b583d3d2d71fdb09f9e9e196 100644 --- a/modules/Skill/Skill.R +++ b/modules/Skill/Skill.R @@ -7,13 +7,15 @@ # - reliability diagram # - ask Carlos which decadal metrics he is currently using +source("modules/Skill/compute_quants.R") +source("modules/Skill/compute_probs.R") source("modules/Skill/s2s.metrics.R") ## TODO: Remove once the new version of s2dv is released -source("modules/Skill/CRPS.R") -source("modules/Skill/CRPSS.R") -source("modules/Skill/RandomWalkTest.R") -source("modules/Skill/Bias.R") -source("modules/Skill/AbsBiasSS.R") +source("modules/Skill/tmp/CRPS.R") +source("modules/Skill/tmp/CRPSS.R") +source("modules/Skill/tmp/RandomWalkTest.R") +source("modules/Skill/tmp/Bias.R") +source("modules/Skill/tmp/AbsBiasSS.R") ## TODO: Implement this in the future ## Which parameter are required? @@ -213,10 +215,8 @@ compute_probabilities <- function(data, recipe) { ncores <- recipe$Analysis$ncores } - ## TODO: Remove commented lines and include warning if quantile() - ## can not accept na.rm = FALSE if (is.null(recipe$Analysis$remove_NAs)) { - na.rm = T + na.rm = F } else { na.rm = recipe$Analysis$remove_NAs } @@ -230,30 +230,32 @@ compute_probabilities <- function(data, recipe) { for (element in recipe$Analysis$Workflow$Probabilities$percentiles) { # Parse thresholds in recipe thresholds <- sapply(element, function (x) eval(parse(text = x))) - # Compute quantiles and probability bins - probs <- Compute_probs(data$data, thresholds, - ncores = ncores, - na.rm = TRUE) - # Separate quantiles into individual arrays and name them percentile_xx - for (i in seq(1:dim(probs$quantiles)['bin'][[1]])) { - named_quantiles <- append(named_quantiles, - list(probs$quantiles[i, , , , , , ,])) - names(named_quantiles)[length(named_quantiles)] <- paste0("percentile_", - as.integer(thresholds[i]*100)) + quants <- compute_quants(data$data, thresholds, + ncores = ncores, + na.rm = na.rm) + probs <- compute_probs(data$data, quants, + ncores = ncores, + na.rm = na.rm) + for (i in seq(1:dim(quants)['bin'][[1]])) { + named_quantiles <- append(named_quantiles, + list(ClimProjDiags::Subset(quants, + 'bin', i))) + names(named_quantiles)[length(named_quantiles)] <- paste0("percentile_", + as.integer(thresholds[i]*100)) } - # Separate probability bins into individual arrays and name them: - # prob_b = prob. below, prob_a = prob. above, prob_xx_to_yy - for (i in seq(1:dim(probs$probs)['bin'][[1]])) { - if (i == 1) { - name_i <- paste0("prob_b", as.integer(thresholds[1]*100)) - } else if (i == dim(probs$probs)['bin'][[1]]) { - name_i <- paste0("prob_a", as.integer(thresholds[i-1]*100)) - } else { - name_i <- paste0("prob_", as.integer(thresholds[i-1]*100), "_to_", - as.integer(thresholds[i]*100)) - } - named_probs <- append(named_probs, list(probs$probs[i, , , , , , , ,])) - names(named_probs)[length(named_probs)] <- name_i + for (i in seq(1:dim(probs)['bin'][[1]])) { + if (i == 1) { + name_i <- paste0("prob_b", as.integer(thresholds[1]*100)) + } else if (i == dim(probs)['bin'][[1]]) { + name_i <- paste0("prob_a", as.integer(thresholds[i-1]*100)) + } else { + name_i <- paste0("prob_", as.integer(thresholds[i-1]*100), "_to_", + as.integer(thresholds[i]*100)) + } + named_probs <- append(named_probs, + list(ClimProjDiags::Subset(probs, + 'bin', i))) + names(named_probs)[length(named_probs)] <- name_i } } named_probs <- lapply(named_probs, function(x) {.drop_dims(x)}) diff --git a/modules/Skill/compute_probs.R b/modules/Skill/compute_probs.R new file mode 100644 index 0000000000000000000000000000000000000000..a662df1483569c2bc3bd38d71640473bd16484fc --- /dev/null +++ b/modules/Skill/compute_probs.R @@ -0,0 +1,34 @@ +## TODO: Document header +compute_probs <- function(data, quantiles, + ncores=1, quantile_dims=c('syear', 'ensemble'), + probs_dims=list('ensemble', 'bin'), + split_factor=1, na.rm=FALSE) { + + if (na.rm == FALSE) { + c2p <- function(x, t) { + # If the array contains any NA values, return NA + if (any(is.na(x))) { + rep(NA, dim(t)[['bin']] + 1) + } else { + colMeans(convert2prob(as.vector(x), threshold = as.vector(t))) + } + } + } else { + c2p <- function(x, t) { + if (any(!is.na(x))) { # If the array contains some non-NA values + colMeans(convert2prob(as.vector(x), threshold = as.vector(t))) + } else { # If the array contains NAs only + rep(NA, dim(t)[['bin']] + 1) # vector with as many NAs as prob bins. + } + } + } + + probs <- Apply(data = list(x = data, t = quantiles), + target_dims = probs_dims, + c2p, + output_dims = "bin", + split_factor = split_factor, + ncores = ncores)[[1]] + + return(probs) +} diff --git a/modules/Skill/compute_quants.R b/modules/Skill/compute_quants.R new file mode 100644 index 0000000000000000000000000000000000000000..60ad981fb377c928641a43996566cef1e8b4bbdf --- /dev/null +++ b/modules/Skill/compute_quants.R @@ -0,0 +1,30 @@ +## TODO: Document header + +compute_quants <- function(data, thresholds, + ncores=1, quantile_dims=c('syear', 'ensemble'), + probs_dims=list('ensemble', 'bin'), + split_factor=1, na.rm=FALSE) { + + if (na.rm == FALSE) { + get_quantiles <- function(x, t) { + if (any(is.na(x))) { + rep(NA, length(t)) + } else { + quantile(as.vector(x), t, na.rm = FALSE) + } + } + } else { + get_quantiles <- function(x, t) { + quantile(as.vector(x), t, na.rm = TRUE) + } + } + + quantiles <- Apply(data, + target_dims = quantile_dims, + function(x, t) {get_quantiles(as.vector(x), thresholds)}, + output_dims = "bin", + ncores = ncores, + split_factor = split_factor)[[1]] + + return(quantiles) +} diff --git a/modules/Skill/s2s.metrics.R b/modules/Skill/s2s.metrics.R index 7c0aa30e190ac7a00b10eeedbc74fa845047f446..04e9d801abd8efd0bdec6f39ec50b0d4fa1c34fc 100644 --- a/modules/Skill/s2s.metrics.R +++ b/modules/Skill/s2s.metrics.R @@ -1,7 +1,4 @@ -source("modules/Skill/s2s.probs.R") - - # MERGES verification dimns int single sdate dim along which the # verification metrics will be computed mergedims <- function(data, indims, outdim) { @@ -127,24 +124,36 @@ Compute_verif_metrics <- function(exp, obs, skill_metrics, } else if (metric == "frpss_sign") { - terciles_obs <- Compute_probs(obs, c(1/3, 2/3), - quantile_dims=c(time.dim), - ncores=ncores, - split_factor=1, - na.rm=na.rm) + terciles_obs <- compute_quants(obs, c(1/3, 2/3), + quantile_dims=c(time.dim), + ncores=ncores, + split_factor=1, + na.rm=na.rm) + + terciles_exp <- compute_quants(exp, c(1/3, 2/3), + quantile_dims=c(time.dim, 'ensemble'), + ncores=ncores, + split_factor=1, + na.rm=na.rm) + + probs_obs <- compute_probs(obs, terciles_obs, + quantile_dims=c(time.dim), + ncores=ncores, + split_factor=1, + na.rm=na.rm) - terciles_exp <- Compute_probs(exp, c(1/3, 2/3), - quantile_dims=c(time.dim, 'ensemble'), - ncores=ncores, - split_factor=1, - na.rm=na.rm) + probs_exp <- compute_probs(exp, terciles_exp, + quantile_dims=c(time.dim), + ncores=ncores, + split_factor=1, + na.rm=na.rm) - probs_clim = array(data = 1/3, dim = dim(terciles_obs$probs)) + probs_clim = array(data = 1/3, dim = dim(probs_obs)) frps <- NULL n_members <- dim(exp)[which(names(dim(exp)) == 'ensemble')][] frps$clim <- multiApply::Apply(data = list(probs_exp = probs_clim, - probs_obs = terciles_obs$probs), + probs_obs = probs_obs), target_dims = 'bin', fun = .rps_from_probs, n_categories = 3, @@ -152,8 +161,8 @@ Compute_verif_metrics <- function(exp, obs, skill_metrics, Fair = TRUE, ncores = ncores)$output1 - frps$exp <- multiApply::Apply(data = list(probs_exp = terciles_exp$probs, - probs_obs = terciles_obs$probs), + frps$exp <- multiApply::Apply(data = list(probs_exp = probs_exp, + probs_obs = probs_obs), target_dims = 'bin', fun = .rps_from_probs, n_categories = 3, diff --git a/modules/Skill/s2s.probs.R b/modules/Skill/s2s.probs.R deleted file mode 100644 index 889330453d15bb3e90e7f14de5eb5b456697643d..0000000000000000000000000000000000000000 --- a/modules/Skill/s2s.probs.R +++ /dev/null @@ -1,42 +0,0 @@ - - -Compute_probs <- function(data, thresholds, - ncores=1, quantile_dims=c('syear', 'ensemble'), - probs_dims=list('ensemble', 'bin'), - split_factor=1, na.rm=FALSE) { - - quantiles <- Apply(data, - quantile_dims, - function(x, na.rm) {quantile(as.vector(x), - thresholds,na.rm=na.rm)}, - output_dims="bin", - ncores=ncores, - na.rm=na.rm, - split_factor=split_factor)[[1]] - - if (na.rm == FALSE) { - c2p <- function(x, t) { - colMeans(convert2prob(as.vector(x), threshold = as.vector(t))) - } - } else { - c2p <- function(x, t) { - if (any(!is.na(x))){ - colMeans(convert2prob(as.vector(x), threshold = as.vector(t))) - } else { - rep(NA, dim(t)[['bin']] + 1) # vector with as many NAs as probability bins. - } - } - } - - probs <- Apply(data = list(x = data, t = quantiles), - target_dims = probs_dims, - c2p, - output_dims = "bin", - split_factor = split_factor, - ncores = ncores)[[1]] - - return(list(probs=probs, quantiles=quantiles)) - -} - - diff --git a/modules/Skill/AbsBiasSS.R b/modules/Skill/tmp/AbsBiasSS.R similarity index 100% rename from modules/Skill/AbsBiasSS.R rename to modules/Skill/tmp/AbsBiasSS.R diff --git a/modules/Skill/Bias.R b/modules/Skill/tmp/Bias.R similarity index 100% rename from modules/Skill/Bias.R rename to modules/Skill/tmp/Bias.R diff --git a/modules/Skill/CRPS.R b/modules/Skill/tmp/CRPS.R similarity index 100% rename from modules/Skill/CRPS.R rename to modules/Skill/tmp/CRPS.R diff --git a/modules/Skill/CRPSS.R b/modules/Skill/tmp/CRPSS.R similarity index 100% rename from modules/Skill/CRPSS.R rename to modules/Skill/tmp/CRPSS.R diff --git a/modules/Skill/RandomWalkTest.R b/modules/Skill/tmp/RandomWalkTest.R similarity index 100% rename from modules/Skill/RandomWalkTest.R rename to modules/Skill/tmp/RandomWalkTest.R diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R new file mode 100644 index 0000000000000000000000000000000000000000..b07d6f1f8bf2f7da22b8c2b7578ee0c35c111151 --- /dev/null +++ b/modules/Visualization/Visualization.R @@ -0,0 +1,328 @@ +## TODO: Remove once released in s2dv/CSTools +source("modules/Visualization/tmp/PlotMostLikelyQuantileMap.R") +source("modules/Visualization/tmp/PlotCombinedMap.R") +source("modules/Visualization/tmp/PlotLayout.R") + +## TODO: Add the possibility to read the data directly from netCDF +## TODO: Adapt to multi-model case +## TODO: Add param 'raw'? + +plot_data <- function(recipe, + archive, + data, + calibrated_data = NULL, + skill_metrics = NULL, + probabilities = NULL, + significance = F) { + + # Try to produce and save several basic plots. + # recipe: the auto-s2s recipe as read by read_yaml() + # archive: the auto-s2s archive as read by read_yaml() + # data: list containing the hcst, obs and (optional) fcst s2dv_cube objects + # calibrated_data: list containing the calibrated hcst and (optional) fcst + # s2dv_cube objects + # skill_metrics: list of arrays containing the computed skill metrics + # significance: Bool. Whether to include significance dots where applicable + + outdir <- paste0(get_dir(recipe), "/plots/") + dir.create(outdir, showWarnings = FALSE, recursive = TRUE) + + if ((is.null(skill_metrics)) && (is.null(calibrated_data)) && + (is.null(data$fcst))) { + stop("The Visualization module has been called, but there is no data ", + "that can be plotted.") + } + + # Plot skill metrics + if (!is.null(skill_metrics)) { + plot_skill_metrics(recipe, archive, data$hcst, skill_metrics, outdir, + significance) + } + + # Plot forecast ensemble mean + if (!is.null(calibrated_data$fcst)) { + plot_ensemble_mean(recipe, archive, calibrated_data$fcst, outdir) + } else if (!is.null(data$fcst)) { + warning("Only the uncalibrated forecast was provided. Using this data ", + "to plot the forecast ensemble mean.") + plot_ensemble_mean(recipe, archive, data$fcst, outdir) + } + + # Plot Most Likely Terciles + if ((!is.null(probabilities)) && (!is.null(calibrated_data$fcst))) { + plot_most_likely_terciles(recipe, archive, calibrated_data$fcst, + probabilities$percentiles, outdir) + } else if ((!is.null(probabilities)) && (!is.null(data$fcst))) { + warning("Only the uncalibrated forecast was provided. Using this data ", + "to plot the most likely terciles.") + plot_most_likely_terciles(recipe, archive, data$fcst, + probabilities$percentiles, outdir) + } +} + +plot_skill_metrics <- function(recipe, archive, data_cube, skill_metrics, + outdir, significance = F) { + + # Abort if frequency is daily + if (recipe$Analysis$Variables$freq == "daily_mean") { + stop("Visualization functions not yet implemented for daily data.") + } + + latitude <- data_cube$lat + longitude <- data_cube$lon + system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name + hcst_period <- paste0(recipe$Analysis$Time$hcst_start, "-", + recipe$Analysis$Time$hcst_end) + init_month <- lubridate::month(as.numeric(substr(recipe$Analysis$Time$sdate, + start = 1, stop = 2)), + label = T, abb = T) + + # Group different metrics by type + skill_scores <- c("rpss", "bss90", "bss10", "frpss", "crpss", "mean_bias_ss", + "enscorr", "rpss_specs", "bss90_specs", "bss10_specs", + "enscorr_specs") + scores <- c("rps", "frps", "crps", "frps_specs") + + for (name in c(skill_scores, scores, "mean_bias", "enssprerr")) { + + if (name %in% names(skill_metrics)) { + # Define plot characteristics and metric name to display in plot + if (name %in% c("rpss", "bss90", "bss10", "frpss", "crpss", + "rpss_specs", "bss90_specs", "bss10_specs")) { + display_name <- toupper(strsplit(name, "_")[[1]][1]) + skill <- skill_metrics[[name]] + brks <- seq(-1, 1, by = 0.1) + col2 <- grDevices::hcl.colors(length(brks) - 1, "RdBu", rev = TRUE) + + } else if (name == "mean_bias_ss") { + display_name <- "Mean Bias Skill Score" + skill <- skill_metrics[[name]] + brks <- seq(-1, 1, by = 0.1) + col2 <- grDevices::hcl.colors(length(brks) - 1, "RdBu", rev = TRUE) + + } else if (name %in% c("enscorr", "enscorr_specs")) { + display_name <- "Ensemble Mean Correlation" + skill <- skill_metrics[[name]] + brks <- seq(-1, 1, by = 0.1) + col2 <- grDevices::hcl.colors(length(brks) - 1, "RdBu", rev = TRUE) + + } else if (name %in% scores) { + skill <- skill_metrics[[name]] + display_name <- toupper(strsplit(name, "_")[[1]][1]) + brks <- seq(0, 1, by = 0.1) + col2 <- grDevices::hcl.colors(length(brks) - 1, "Reds") + + } else if (name == "enssprerr") { + ## TODO: Adjust colorbar parameters + skill <- skill_metrics[[name]] + display_name <- "Spread-to-Error Ratio" + brks <- pretty(0:max(skill, na.rm = T), n = 20, min.n = 10) + col2 <- grDevices::hcl.colors(length(brks) - 1, "RdBu", rev = TRUE) + + } else if (name == "mean_bias") { + skill <- skill_metrics[[name]] + display_name <- "Mean Bias" + max_value <- max(abs(skill)) + ugly_intervals <- seq(-max_value, max_value, (max_value*2)/10) + brks <- pretty(ugly_intervals, n = 20, min.n = 10) + col2 <- grDevices::hcl.colors(length(brks) - 1, "RdBu", rev = TRUE) + } + + options(bitmapType = "cairo") + + # Reorder dimensions + skill <- Reorder(skill, c("time", "longitude", "latitude")) + # If the significance has been requested and the variable has it, + # retrieve it and reorder its dimensions. + significance_name <- paste0(name, "_significance") + if ((significance) && (significance_name %in% names(skill_metrics))) { + significance_name <- paste0(name, "_significance") + skill_significance <- skill_metrics[[significance_name]] + skill_significance <- Reorder(skill_significance, c("time", + "longitude", + "latitude")) + } else { + skill_significance <- NULL + } + # Define output file name and titles + outfile <- paste0(outdir, name, ".png") + toptitle <- paste(display_name, "-", data_cube$Variable$varName, + "-", system_name, "-", init_month, hcst_period) + months <- unique(lubridate::month(data_cube$Dates$start, + label = T, abb = F)) + titles <- as.vector(months) + # Plot + PlotLayout(PlotEquiMap, c('longitude', 'latitude'), + skill, longitude, latitude, + dots = skill_significance, + dot_symbol = 20, + toptitle = toptitle, + title_scale = 0.6, + titles = titles, + filled.continents=F, + brks = brks, + cols = col2, + fileout = outfile, + bar_label_digits = 3) + } + } + + print("##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") +} + +plot_ensemble_mean <- function(recipe, archive, fcst, outdir) { + + # Abort if frequency is daily + if (recipe$Analysis$Variables$freq == "daily_mean") { + stop("Visualization functions not yet implemented for daily data.") + } + + latitude <- fcst$lat + longitude <- fcst$lon + system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name + variable <- recipe$Analysis$Variables$name + units <- attr(fcst$Variable, "variable")$units + start_date <- paste0(recipe$Analysis$Time$fcst_year, + recipe$Analysis$Time$sdate) + + # Compute ensemble mean + ensemble_mean <- s2dv::MeanDims(fcst$data, 'ensemble') + # Drop extra dims, add time dim if missing: + ensemble_mean <- drop(ensemble_mean) + + if (!("time" %in% names(dim(ensemble_mean)))) { + dim(ensemble_mean) <- c("time" = 1, dim(ensemble_mean)) + } + if (!'syear' %in% names(dim(ensemble_mean))) { + ensemble_mean <- Reorder(ensemble_mean, c("time", "longitude", "latitude")) + } else { + ensemble_mean <- Reorder(ensemble_mean, c("syear", "time", "longitude", "latitude")) + } + ## TODO: Redefine column colors, possibly depending on variable + if (variable == 'prlr') { + palette = "BrBG" + rev = F + } else { + palette = "RdBu" + rev = T + } + + brks <- pretty(range(ensemble_mean, na.rm = T), n = 15, min.n = 8) + col2 <- grDevices::hcl.colors(length(brks) - 1, palette, rev = rev) + # color <- colorRampPalette(col2)(length(brks) - 1) + options(bitmapType = "cairo") + + for (i_syear in start_date) { + # Define name of output file and titles + if (length(start_date) == 1) { + i_ensemble_mean <- ensemble_mean + outfile <- paste0(outdir, "forecast_ensemble_mean.png") + } else { + i_ensemble_mean <- ensemble_mean[which(start_date == i_syear), , , ] + outfile <- paste0(outdir, "forecast_ensemble_mean_", i_syear, ".png") + } + toptitle <- paste("Forecast Ensemble Mean -", variable, "-", system_name, + "- Initialization:", i_syear) + months <- lubridate::month(fcst$Dates$start[1, 1, which(start_date == i_syear), ], + label = T, abb = F) + titles <- as.vector(months) + # Plots + PlotLayout(PlotEquiMap, c('longitude', 'latitude'), + i_ensemble_mean, longitude, latitude, + filled.continents = F, + toptitle = toptitle, + title_scale = 0.6, + titles = titles, + units = units, + cols = col2, + brks = brks, + fileout = outfile, + bar_label_digits = 4) + } + + print("##### FCST ENSEMBLE MEAN PLOT SAVED TO OUTPUT DIRECTORY #####") +} + +plot_most_likely_terciles <- function(recipe, archive, + fcst, + percentiles, + outdir) { + + # Abort if frequency is daily + if (recipe$Analysis$Variables$freq == "daily_mean") { + stop("Visualization functions not yet implemented for daily data.") + } + + latitude <- fcst$lat + longitude <- fcst$lon + system_name <- archive$System[[recipe$Analysis$Datasets$System$name]]$name + variable <- recipe$Analysis$Variables$name + start_date <- paste0(recipe$Analysis$Time$fcst_year, + recipe$Analysis$Time$sdate) + if (is.null(recipe$Analysis$remove_NAs)) { + recipe$Analysis$remove_NAs <- FALSE + } + if (is.null(recipe$Analysis$ncores)) { + recipe$Analysis$ncores <- 1 + } + + # Compute probability bins for the forecast + if (is.null(percentiles$percentile_33) | is.null(percentiles$percentile_33)) { + stop("The quantile array does not contain the 33rd and 66th percentiles,", + " the most likely tercile map cannot be plotted.") + } + + terciles <- abind(percentiles$percentile_33, percentiles$percentile_66, + along = 0) + names(dim(terciles)) <- c("bin", names(dim(percentiles$percentile_33))) + probs_fcst <- compute_probs(fcst$data, terciles, + ncores = recipe$Analysis$ncores, + na.rm = recipe$Analysis$remove_NAs) + + # Drop extra dims, add time dim if missing: + probs_fcst <- drop(probs_fcst) + if (!("time" %in% names(dim(probs_fcst)))) { + dim(probs_fcst) <- c("time" = 1, dim(probs_fcst)) + } + if (!'syear' %in% names(dim(probs_fcst))) { + probs_fcst <- Reorder(probs_fcst, c("time", "bin", "longitude", "latitude")) + } else { + probs_fcst <- Reorder(probs_fcst, c("syear", "time", "bin", "longitude", "latitude")) + } + + for (i_syear in start_date) { + # Define name of output file and titles + if (length(start_date) == 1) { + i_probs_fcst <- probs_fcst + outfile <- paste0(outdir, "forecast_most_likely_tercile.png") + } else { + i_probs_fcst <- probs_fcst[which(start_date == i_syear), , , , ] + outfile <- paste0(outdir, "forecast_most_likely_tercile_", i_syear, ".png") + } + toptitle <- paste("Most Likely Tercile -", variable, "-", system_name, "-", + "Initialization:", i_syear) + months <- lubridate::month(fcst$Dates$start[1, 1, which(start_date == i_syear), ], + label = T, abb = F) + ## TODO: Ensure this works for daily and sub-daily cases + titles <- as.vector(months) + + # Plots + ## NOTE: PlotLayout() and PlotMostLikelyQuantileMap() are still being worked + ## on. + suppressWarnings( + PlotLayout(PlotMostLikelyQuantileMap, c('bin', 'longitude', 'latitude'), + cat_dim = 'bin', + i_probs_fcst, longitude, latitude, + coast_width = 1.5, + title_scale = 0.6, + legend_scale = 0.8, #cex_bar_titles = 0.6, + toptitle = toptitle, + titles = titles, + fileout = outfile, + bar_label_digits = 2, + triangle_ends = c(F, F), width = 11, height = 8) + ) + } + + print("##### MOST LIKELY TERCILE PLOT SAVED TO OUTPUT DIRECTORY #####") +} diff --git a/modules/Visualization/tmp/PlotCombinedMap.R b/modules/Visualization/tmp/PlotCombinedMap.R new file mode 100644 index 0000000000000000000000000000000000000000..a7b5fc9701765a9969a29ff27373405a9a89198e --- /dev/null +++ b/modules/Visualization/tmp/PlotCombinedMap.R @@ -0,0 +1,608 @@ +#'Plot Multiple Lon-Lat Variables In a Single Map According to a Decision Function +#'@description Plot a number a two dimensional matrices with (longitude, latitude) dimensions on a single map with the cylindrical equidistant latitude and longitude projection. +#'@author Nicolau Manubens, \email{nicolau.manubens@bsc.es} +#'@author Veronica Torralba, \email{veronica.torralba@bsc.es} +#' +#'@param maps List of matrices to plot, each with (longitude, latitude) dimensions, or 3-dimensional array with the dimensions (longitude, latitude, map). Dimension names are required. +#'@param lon Vector of longitudes. Must match the length of the corresponding dimension in 'maps'. +#'@param lat Vector of latitudes. Must match the length of the corresponding dimension in 'maps'. +#'@param map_select_fun Function that selects, for each grid point, which value to take among all the provided maps. This function receives as input a vector of values for a same grid point for all the provided maps, and must return a single selected value (not its index!) or NA. For example, the \code{min} and \code{max} functions are accepted. +#'@param display_range Range of values to be displayed for all the maps. This must be a numeric vector c(range min, range max). The values in the parameter 'maps' can go beyond the limits specified in this range. If the selected value for a given grid point (according to 'map_select_fun') falls outside the range, it will be coloured with 'col_unknown_map'. +#'@param map_dim Optional name for the dimension of 'maps' along which the multiple maps are arranged. Only applies when 'maps' is provided as a 3-dimensional array. Takes the value 'map' by default. +#'@param brks Colour levels to be sent to PlotEquiMap. This parameter is optional and adjusted automatically by the function. +#'@param cols List of vectors of colours to be sent to PlotEquiMap for the colour bar of each map. This parameter is optional and adjusted automatically by the function (up to 5 maps). The colours provided for each colour bar will be automatically interpolated to match the number of breaks. Each item in this list can be named, and the name will be used as title for the corresponding colour bar (equivalent to the parameter 'bar_titles'). +#'@param col_unknown_map Colour to use to paint the grid cells for which a map is not possible to be chosen according to 'map_select_fun' or for those values that go beyond 'display_range'. Takes the value 'white' by default. +#'@param mask Optional numeric array with dimensions (latitude, longitude), with values in the range [0, 1], indicating the opacity of the mask over each grid point. Cells with a 0 will result in no mask, whereas cells with a 1 will result in a totally opaque superimposed pixel coloured in 'col_mask'. +#'@param col_mask Colour to be used for the superimposed mask (if specified in 'mask'). Takes the value 'grey' by default. +#'@param dots Array of same dimensions as 'var' or with dimensions +#' c(n, dim(var)), where n is the number of dot/symbol layers to add to the +#' plot. A value of TRUE at a grid cell will draw a dot/symbol on the +#' corresponding square of the plot. By default all layers provided in 'dots' +#' are plotted with dots, but a symbol can be specified for each of the +#' layers via the parameter 'dot_symbol'. +#'@param bar_titles Optional vector of character strings providing the titles to be shown on top of each of the colour bars. +#'@param legend_scale Scale factor for the size of the colour bar labels. Takes 1 by default. +#'@param cex_bar_titles Scale factor for the sizes of the bar titles. Takes 1.5 by default. +#'@param fileout File where to save the plot. If not specified (default) a graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, bmp and tiff +#'@param width File width, in the units specified in the parameter size_units (inches by default). Takes 8 by default. +#'@param height File height, in the units specified in the parameter size_units (inches by default). Takes 5 by default. +#'@param size_units Units of the size of the device (file or window) to plot in. Inches ('in') by default. See ?Devices and the creator function of the corresponding device. +#'@param res Resolution of the device (file or window) to plot in. See ?Devices and the creator function of the corresponding device. +#'@param drawleg Where to draw the common colour bar. Can take values TRUE, +#' FALSE or:\cr +#' 'up', 'u', 'U', 'top', 't', 'T', 'north', 'n', 'N'\cr +#' 'down', 'd', 'D', 'bottom', 'b', 'B', 'south', 's', 'S' (default)\cr +#' 'right', 'r', 'R', 'east', 'e', 'E'\cr +#' 'left', 'l', 'L', 'west', 'w', 'W' +#'@param ... Additional parameters to be passed on to \code{PlotEquiMap}. + +#'@seealso \code{PlotCombinedMap} and \code{PlotEquiMap} +#' +#'@importFrom s2dv PlotEquiMap ColorBar +#'@importFrom maps map +#'@importFrom graphics box image layout mtext par plot.new +#'@importFrom grDevices adjustcolor bmp colorRampPalette dev.cur dev.new dev.off hcl jpeg pdf png postscript svg tiff +#'@examples +#'# Simple example +#'x <- array(1:(20 * 10), dim = c(lat = 10, lon = 20)) / 200 +#'a <- x * 0.6 +#'b <- (1 - x) * 0.6 +#'c <- 1 - (a + b) +#'lons <- seq(0, 359.5, length = 20) +#'lats <- seq(-89.5, 89.5, length = 10) +#'PlotCombinedMap(list(a, b, c), lons, lats, +#' toptitle = 'Maximum map', +#' map_select_fun = max, +#' display_range = c(0, 1), +#' bar_titles = paste('% of belonging to', c('a', 'b', 'c')), +#' brks = 20, width = 10, height = 8) +#' +#'Lon <- c(0:40, 350:359) +#'Lat <- 51:26 +#'data <- rnorm(51 * 26 * 3) +#'dim(data) <- c(map = 3, lon = 51, lat = 26) +#'mask <- sample(c(0,1), replace = TRUE, size = 51 * 26) +#'dim(mask) <- c(lat = 26, lon = 51) +#'PlotCombinedMap(data, lon = Lon, lat = Lat, map_select_fun = max, +#' display_range = range(data), mask = mask, +#' width = 12, height = 8) +#' +#'@export +PlotCombinedMap <- function(maps, lon, lat, + map_select_fun, display_range, + map_dim = 'map', + brks = NULL, cols = NULL, + col_unknown_map = 'white', + mask = NULL, col_mask = 'grey', + dots = NULL, + bar_titles = NULL, legend_scale = 1, + cex_bar_titles = 1.5, + plot_margin = NULL, bar_margin = rep(0, 4), + fileout = NULL, width = 8, height = 5, + size_units = 'in', res = 100, drawleg = T, + ...) { + args <- list(...) + + # If there is any filenames to store the graphics, process them + # to select the right device + if (!is.null(fileout)) { + deviceInfo <- .SelectDevice(fileout = fileout, width = width, height = height, + units = size_units, res = res) + saveToFile <- deviceInfo$fun + fileout <- deviceInfo$files + } + + # Check probs + error <- FALSE + if (is.list(maps)) { + if (length(maps) < 1) { + stop("Parameter 'maps' must be of length >= 1 if provided as a list.") + } + check_fun <- function(x) { + is.numeric(x) && (length(dim(x)) == 2) + } + if (!all(sapply(maps, check_fun))) { + error <- TRUE + } + ref_dims <- dim(maps[[1]]) + equal_dims <- all(sapply(maps, function(x) identical(dim(x), ref_dims))) + if (!equal_dims) { + stop("All arrays in parameter 'maps' must have the same dimension ", + "sizes and names when 'maps' is provided as a list of arrays.") + } + num_maps <- length(maps) + maps <- unlist(maps) + dim(maps) <- c(ref_dims, map = num_maps) + map_dim <- 'map' + } + if (!is.numeric(maps)) { + error <- TRUE + } + if (is.null(dim(maps))) { + error <- TRUE + } + if (length(dim(maps)) != 3) { + error <- TRUE + } + if (error) { + stop("Parameter 'maps' must be either a numeric array with 3 dimensions ", + " or a list of numeric arrays of the same size with the 'lon' and ", + "'lat' dimensions.") + } + dimnames <- names(dim(maps)) + + # Check map_dim + if (is.character(map_dim)) { + if (is.null(dimnames)) { + stop("Specified a dimension name in 'map_dim' but no dimension names provided ", + "in 'maps'.") + } + map_dim <- which(dimnames == map_dim) + if (length(map_dim) < 1) { + stop("Dimension 'map_dim' not found in 'maps'.") + } else { + map_dim <- map_dim[1] + } + } else if (!is.numeric(map_dim)) { + stop("Parameter 'map_dim' must be either a numeric value or a ", + "dimension name.") + } + if (length(map_dim) != 1) { + stop("Parameter 'map_dim' must be of length 1.") + } + map_dim <- round(map_dim) + + # Work out lon_dim and lat_dim + lon_dim <- NULL + if (!is.null(dimnames)) { + lon_dim <- which(dimnames %in% c('lon', 'longitude'))[1] + } + if (length(lon_dim) < 1) { + lon_dim <- (1:3)[-map_dim][1] + } + lon_dim <- round(lon_dim) + + lat_dim <- NULL + if (!is.null(dimnames)) { + lat_dim <- which(dimnames %in% c('lat', 'latitude'))[1] + } + if (length(lat_dim) < 1) { + lat_dim <- (1:3)[-map_dim][2] + } + lat_dim <- round(lat_dim) + + # Check lon + if (!is.numeric(lon)) { + stop("Parameter 'lon' must be a numeric vector.") + } + if (length(lon) != dim(maps)[lon_dim]) { + stop("Parameter 'lon' does not match the longitude dimension in 'maps'.") + } + + # Check lat + if (!is.numeric(lat)) { + stop("Parameter 'lat' must be a numeric vector.") + } + if (length(lat) != dim(maps)[lat_dim]) { + stop("Parameter 'lat' does not match the longitude dimension in 'maps'.") + } + + # Check map_select_fun + if (is.numeric(map_select_fun)) { + if (length(dim(map_select_fun)) != 2) { + stop("Parameter 'map_select_fun' must be an array with dimensions ", + "'lon' and 'lat' if provided as an array.") + } + if (!identical(dim(map_select_fun), dim(maps)[-map_dim])) { + stop("The dimensions 'lon' and 'lat' in the 'map_select_fun' array must ", + "have the same size, name and order as in the 'maps' parameter.") + } + } + if (!is.function(map_select_fun)) { + stop("The parameter 'map_select_fun' must be a function or a numeric array.") + } + + # Check display_range + if (!is.numeric(display_range) || length(display_range) != 2) { + stop("Parameter 'display_range' must be a numeric vector of length 2.") + } + + # Check brks + if (is.null(brks) || (is.numeric(brks) && length(brks) == 1)) { + num_brks <- 5 + if (is.numeric(brks)) { + num_brks <- brks + } + brks <- seq(from = display_range[1], to = display_range[2], length.out = num_brks) + } + if (!is.numeric(brks)) { + stop("Parameter 'brks' must be a numeric vector.") + } + + # Check cols + col_sets <- list(c("#A1D99B", "#74C476", "#41AB5D", "#238B45"), + c("#6BAED6FF", "#4292C6FF", "#2171B5FF", "#08519CFF"), + c("#FFEDA0FF", "#FED976FF", "#FEB24CFF", "#FD8D3CFF"), + c("#FC4E2AFF", "#E31A1CFF", "#BD0026FF", "#800026FF"), + c("#FCC5C0", "#FA9FB5", "#F768A1", "#DD3497")) + if (is.null(cols)) { + if (length(col_sets) >= dim(maps)[map_dim]) { + chosen_sets <- 1:(dim(maps)[map_dim]) + chosen_sets <- chosen_sets + floor((length(col_sets) - length(chosen_sets)) / 2) + } else { + chosen_sets <- array(1:length(col_sets), dim(maps)[map_dim]) + } + cols <- col_sets[chosen_sets] + } else { + if (!is.list(cols)) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (!all(sapply(cols, is.character))) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (length(cols) != dim(maps)[map_dim]) { + stop("Parameter 'cols' must be a list of the same length as the number of ", + "maps in 'maps'.") + } + } + for (i in 1:length(cols)) { + if (length(cols[[i]]) != (length(brks) - 1)) { + cols[[i]] <- colorRampPalette(cols[[i]])(length(brks) - 1) + } + } + + # Check bar_titles + if (is.null(bar_titles)) { + if (!is.null(names(cols))) { + bar_titles <- names(cols) + } else { + bar_titles <- paste0("Map ", 1:length(cols)) + } + } else { + if (!is.character(bar_titles)) { + stop("Parameter 'bar_titles' must be a character vector.") + } + if (length(bar_titles) != length(cols)) { + stop("Parameter 'bar_titles' must be of the same length as the number of ", + "maps in 'maps'.") + } + } + + # Check legend_scale + if (!is.numeric(legend_scale)) { + stop("Parameter 'legend_scale' must be numeric.") + } + + # Check col_unknown_map + if (!is.character(col_unknown_map)) { + stop("Parameter 'col_unknown_map' must be a character string.") + } + + # Check col_mask + if (!is.character(col_mask)) { + stop("Parameter 'col_mask' must be a character string.") + } + + # Check mask + if (!is.null(mask)) { + if (!is.numeric(mask)) { + stop("Parameter 'mask' must be numeric.") + } + if (length(dim(mask)) != 2) { + stop("Parameter 'mask' must have two dimensions.") + } + if ((dim(mask)[1] != dim(maps)[lat_dim]) || + (dim(mask)[2] != dim(maps)[lon_dim])) { + stop("Parameter 'mask' must have dimensions c(lat, lon).") + } + } + # Check dots + if (!is.null(dots)) { + if (length(dim(dots)) != 2) { + stop("Parameter 'mask' must have two dimensions.") + } + if ((dim(dots)[1] != dim(maps)[lat_dim]) || + (dim(dots)[2] != dim(maps)[lon_dim])) { + stop("Parameter 'mask' must have dimensions c(lat, lon).") + } + } + + #---------------------- + # Identify the most likely map + #---------------------- + brks_norm <- seq(0, 1, length.out = length(brks)) + if (is.function(map_select_fun)) { + range_width <- display_range[2] - display_range[1] + ml_map <- apply(maps, c(lat_dim, lon_dim), function(x) { + if (any(is.na(x))) { + res <- NA + } else { + res <- which(x == map_select_fun(x)) + if (length(res) > 0) { + res <- res[1] + if (map_select_fun(x) < display_range[1] || + map_select_fun(x) > display_range[2]) { + res <- -0.5 + } else { + res <- res + (map_select_fun(x) - display_range[1]) / range_width + if (map_select_fun(x) == display_range[1]) { + res <- res + brks_norm[2] / (num_brks * 2) + } + } + } else { + res <- -0.5 + } + } + res + }) + } else { + stop("Providing 'map_select_fun' as array not implemented yet.") + ml_map <- map_select_fun + } + nmap <- dim(maps)[map_dim] + nlat <- length(lat) + nlon <- length(lon) + + #---------------------- + # Set latitudes from minimum to maximum + #---------------------- + if (lat[1] > lat[nlat]){ + lat <- lat[nlat:1] + indices <- list(nlat:1, TRUE) + ml_map <- do.call("[", c(list(x = ml_map), indices)) + if (!is.null(mask)){ + mask <- mask[nlat:1, ] + } + if (!is.null(dots)){ + dots <- dots[nlat:1,] + } + } + + #---------------------- + # Set layout and parameters + #---------------------- + # Open connection to graphical device + if (!is.null(fileout)) { + saveToFile(fileout) + } else if (names(dev.cur()) == 'null device') { + dev.new(units = size_units, res = res, width = width, height = height) + } + #NOTE: I think plot.new() is not necessary in any case. +# plot.new() + par(font.main = 1) + # If colorbars need to be plotted, re-define layout. + if (drawleg) { + layout(matrix(c(rep(1, nmap),2:(nmap + 1)), 2, nmap, byrow = TRUE), heights = c(6, 1.5)) + } + + #---------------------- + # Set colors and breaks and then PlotEquiMap + #---------------------- + tcols <- c(col_unknown_map, cols[[1]]) + for (k in 2:nmap) { + tcols <- append(tcols, c(col_unknown_map, cols[[k]])) + } + + tbrks <- c(-1, brks_norm + rep(1:nmap, each = length(brks))) + + if (is.null(plot_margin)) { + plot_margin <- c(5, 4, 4, 2) + 0.1 # default of par()$mar + } + + PlotEquiMap(var = ml_map, lon = lon, lat = lat, + brks = tbrks, cols = tcols, drawleg = FALSE, + filled.continents = FALSE, dots = dots, mar = plot_margin, ...) + + #---------------------- + # Add overplot on top + #---------------------- + if (!is.null(mask)) { + dims_mask <- dim(mask) + latb <- sort(lat, index.return = TRUE) + dlon <- lon[2:dims_mask[2]] - lon[1:(dims_mask[2] - 1)] + wher <- which(dlon > (mean(dlon) + 1)) + if (length(wher) > 0) { + lon[(wher + 1):dims_mask[2]] <- lon[(wher + 1):dims_mask[2]] - 360 + } + lonb <- sort(lon, index.return = TRUE) + + cols_mask <- sapply(seq(from = 0, to = 1, length.out = 10), + function(x) adjustcolor(col_mask, alpha.f = x)) + image(lonb$x, latb$x, t(mask)[lonb$ix, latb$ix], + axes = FALSE, col = cols_mask, + breaks = seq(from = 0, to = 1, by = 0.1), + xlab='', ylab='', add = TRUE, xpd = TRUE) + if (!exists('coast_color')) { + coast_color <- 'black' + } + if (min(lon) < 0) { + map('world', interior = FALSE, add = TRUE, lwd = 1, col = coast_color) # Low resolution world map (lon -180 to 180). + } else { + map('world2', interior = FALSE, add = TRUE, lwd = 1, col = coast_color) # Low resolution world map (lon 0 to 360). + } + box() + } + + #---------------------- + # Add colorbars + #---------------------- + if ('toptitle' %in% names(args)) { + size_title <- 1 + if ('title_scale' %in% names(args)) { + size_title <- args[['title_scale']] + } + old_mar <- par('mar') + old_mar[3] <- old_mar[3] - (2 * size_title + 1) + par(mar = old_mar) + } + + if (drawleg) { + for (k in 1:nmap) { + ColorBar(brks = brks, cols = cols[[k]], vertical = FALSE, + draw_separators = TRUE, extra_margin = c(2, 0, 2, 0), + label_scale = legend_scale * 1.5) + if (!is.null(bar_titles)) { + mtext(bar_titles[[k]], 3, line = -3, cex = cex_bar_titles) + } + #TODO: Change to below code. Plot title together. extra_margin needs to be adjusted. +# ColorBar(brks = brks, cols = cols[[k]], vertical = FALSE, +# draw_separators = TRUE, extra_margin = c(1, 0, 1, 0), +# label_scale = legend_scale * 1.5, title = bar_titles[[k]], title_scale = cex_bar_titles) + } + } + + # If the graphic was saved to file, close the connection with the device + if (!is.null(fileout)) dev.off() +} + +# Color bar for PlotMostLikelyQuantileMap +multi_ColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE, subsampleg = NULL, + bar_limits = NULL, var_limits = NULL, + triangle_ends = NULL, plot = TRUE, + draw_separators = FALSE, + bar_titles = NULL, title_scale = 1, label_scale = 1, extra_margin = rep(0, 4), + ...) { + + minimum_value <- ceiling(1 / nmap * 10 * 1.1) * 10 + display_range = c(minimum_value, 100) + + # Check brks + if (is.null(brks) || (is.numeric(brks) && length(brks) == 1)) { + num_brks <- 5 + if (is.numeric(brks)) { + num_brks <- brks + } + brks <- seq(from = display_range[1], to = display_range[2], length.out = num_brks) + } + if (!is.numeric(brks)) { + stop("Parameter 'brks' must be a numeric vector.") + } + # Check cols + col_sets <- list(c("#A1D99B", "#74C476", "#41AB5D", "#238B45"), + c("#6BAED6FF", "#4292C6FF", "#2171B5FF", "#08519CFF"), + c("#FFEDA0FF", "#FED976FF", "#FEB24CFF", "#FD8D3CFF"), + c("#FC4E2AFF", "#E31A1CFF", "#BD0026FF", "#800026FF"), + c("#FCC5C0", "#FA9FB5", "#F768A1", "#DD3497")) + if (is.null(cols)) { + if (length(col_sets) >= nmap) { + chosen_sets <- 1:nmap + chosen_sets <- chosen_sets + floor((length(col_sets) - length(chosen_sets)) / 2) + } else { + chosen_sets <- array(1:length(col_sets), nmap) + } + cols <- col_sets[chosen_sets] + } else { + if (!is.list(cols)) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (!all(sapply(cols, is.character))) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (length(cols) != dim(maps)[map_dim]) { + stop("Parameter 'cols' must be a list of the same length as the number of ", + "maps in 'maps'.") + } + } + for (i in 1:length(cols)) { + if (length(cols[[i]]) != (length(brks) - 1)) { + cols[[i]] <- grDevices::colorRampPalette(cols[[i]])(length(brks) - 1) + } + } + + # Check bar_titles + if (is.null(bar_titles)) { + if (nmap == 3) { + bar_titles <- c("Below normal (%)", "Normal (%)", "Above normal (%)") + } else if (nmap == 5) { + bar_titles <- c("Low (%)", "Below normal (%)", + "Normal (%)", "Above normal (%)", "High (%)") + } else { + bar_titles <- paste0("Cat. ", 1:nmap, " (%)") + } + } + + if (plot) { + for (k in 1:nmap) { + s2dv::ColorBar(brks = brks, cols = cols[[k]], vertical = FALSE, subsampleg = subsampleg, + bar_limits = bar_limits, var_limits = var_limits, + triangle_ends = triangle_ends, plot = TRUE, + draw_separators = draw_separators, + title = bar_titles[[k]], title_scale = title_scale, + label_scale = label_scale, extra_margin = extra_margin) + } + } else { + #TODO: col_inf and col_sup + return(list(brks = brks, cols = cols)) + } + +} + +#TODO: use s2dv:::.SelectDevice and remove this function here? +.SelectDevice <- function(fileout, width, height, units, res) { + # This function is used in the plot functions to check the extension of the + # files where the graphics will be stored and select the right R device to + # save them. + # If the vector of filenames ('fileout') has files with different + # extensions, then it will only accept the first one, changing all the rest + # of the filenames to use that extension. + + # We extract the extension of the filenames: '.png', '.pdf', ... + ext <- regmatches(fileout, regexpr("\\.[a-zA-Z0-9]*$", fileout)) + + if (length(ext) != 0) { + # If there is an extension specified, select the correct device + ## units of width and height set to accept inches + if (ext[1] == ".png") { + saveToFile <- function(fileout) { + png(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] == ".jpeg") { + saveToFile <- function(fileout) { + jpeg(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] %in% c(".eps", ".ps")) { + saveToFile <- function(fileout) { + postscript(file = fileout, width = width, height = height) + } + } else if (ext[1] == ".pdf") { + saveToFile <- function(fileout) { + pdf(file = fileout, width = width, height = height) + } + } else if (ext[1] == ".svg") { + saveToFile <- function(fileout) { + svg(filename = fileout, width = width, height = height) + } + } else if (ext[1] == ".bmp") { + saveToFile <- function(fileout) { + bmp(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] == ".tiff") { + saveToFile <- function(fileout) { + tiff(filename = fileout, width = width, height = height, res = res, units = units) + } + } else { + warning("file extension not supported, it will be used '.eps' by default.") + ## In case there is only one filename + fileout[1] <- sub("\\.[a-zA-Z0-9]*$", ".eps", fileout[1]) + ext[1] <- ".eps" + saveToFile <- function(fileout) { + postscript(file = fileout, width = width, height = height) + } + } + # Change filenames when necessary + if (any(ext != ext[1])) { + warning(paste0("some extensions of the filenames provided in 'fileout' are not ", ext[1],". The extensions are being converted to ", ext[1], ".")) + fileout <- sub("\\.[a-zA-Z0-9]*$", ext[1], fileout) + } + } else { + # Default filenames when there is no specification + warning("there are no extensions specified in the filenames, default to '.eps'") + fileout <- paste0(fileout, ".eps") + saveToFile <- postscript + } + + # return the correct function with the graphical device, and the correct + # filenames + list(fun = saveToFile, files = fileout) +} + diff --git a/modules/Visualization/tmp/PlotLayout.R b/modules/Visualization/tmp/PlotLayout.R new file mode 100644 index 0000000000000000000000000000000000000000..e5ae98007526ca845338e8788c41b9f531d23f69 --- /dev/null +++ b/modules/Visualization/tmp/PlotLayout.R @@ -0,0 +1,732 @@ +#'Arrange and Fill Multi-Pannel Layouts With Optional Colour Bar +#' +#'This function takes an array or list of arrays and loops over each of them +#'to plot all the sub-arrays they contain on an automatically generated +#'multi-pannel layout. A different plot function (not necessarily from +#'s2dv) can be applied over each of the provided arrays. The input +#'dimensions of each of the functions have to be specified, either with the +#'names or the indices of the corresponding input dimensions. It is possible +#'to draw a common colour bar at any of the sides of the multi-pannel for all +#'the s2dv plots that use a colour bar. Common plotting arguments +#'for all the arrays in 'var' can be specified via the '...' parameter, and +#'specific plotting arguments for each array can be fully adjusted via +#''special_args'. It is possible to draw titles for each of the figures, +#'layout rows, layout columns and for the whole figure. A number of parameters +#'is provided in order to adjust the position, size and colour of the +#'components. Blank cells can be forced to appear and later be filled in +#'manually with customized plots.\cr +#'This function pops up a blank new device and fills it in, so it cannot be +#'nested in complex layouts. +#' +#'@param fun Plot function (or name of the function) to be called on the +#' arrays provided in 'var'. If multiple arrays are provided in 'var', a +#' vector of as many function names (character strings!) can be provided in +#' 'fun', one for each array in 'var'. +#'@param plot_dims Numeric or character string vector with identifiers of the +#' input plot dimensions of the plot function specified in 'fun'. If +#' character labels are provided, names(dim(var)) or attr('dimensions', var) +#' will be checked to locate the dimensions. As many plots as +#' prod(dim(var)[-plot_dims]) will be generated. If multiple arrays are +#' provided in 'var', 'plot_dims' can be sent a list with a vector of plot +#' dimensions for each. If a single vector is provided, it will be used for +#' all the arrays in 'var'. +#'@param var Multi-dimensional array with at least the dimensions expected by +#' the specified plot function in 'fun'. The dimensions reqired by the +#' function must be specified in 'plot_dims'. The dimensions can be +#' disordered and will be reordered automatically. Dimensions can optionally +#' be labelled in order to refer to them with names in 'plot_dims'. All the +#' available plottable sub-arrays will be automatically plotted and arranged +#' in consecutive cells of an automatically arranged layout. A list of +#' multiple (super-)arrays can be specified. The process will be repeated for +#' each of them, by default applying the same plot function to all of them +#' or, if properly specified in 'fun', a different plot function will be +#' applied to each of them. NAs can be passed to the list: a NA will yield a +#' blank cell in the layout, which can be populated after +#' (see .SwitchToFigure). +#'@param \dots Parameters to be sent to the plotting function 'fun'. If +#' multiple arrays are provided in 'var' and multiple functions are provided +#' in 'fun', the parameters provided through \dots will be sent to all the +#' plot functions, as common parameters. To specify concrete arguments for +#' each of the plot functions see parameter 'special_args'. +#'@param special_args List of sub-lists, each sub-list having specific extra +#' arguments for each of the plot functions provided in 'fun'. If you want to +#' fix a different value for each plot in the layout you can do so by +#' a) splitting your array into a list of sub-arrays (each with the data for +#' one plot) and providing it as parameter 'var', +#' b) providing a list of named sub-lists in 'special_args', where the names +#' of each sub-list match the names of the parameters to be adjusted, and +#' each value in a sub-list contains the value of the corresponding parameter. +#' For example, if the plots are two maps with different arguments, the +#' structure would be like:\cr +#' var:\cr +#' List of 2\cr +#' $ : num [1:360, 1:181] 1 3.82 5.02 6.63 8.72 ...\cr +#' $ : num [1:360, 1:181] 2.27 2.82 4.82 7.7 10.32 ...\cr +#' special_args:\cr +#' List of 2\cr +#' $ :List of 2\cr +#' ..$ arg1: ...\cr +#' ..$ arg2: ...\cr +#' $ :List of 1\cr +#' ..$ arg1: ...\cr +#'@param nrow Numeric value to force the number of rows in the automatically +#' generated layout. If higher than the required, this will yield blank cells +#' in the layout (which can then be populated). If lower than the required +#' the function will stop. By default it is configured to arrange the layout +#' in a shape as square as possible. Blank cells can be manually populated +#' after with customized plots (see SwitchTofigure). +#'@param ncol Numeric value to force the number of columns in the +#' automatically generated layout. If higher than the required, this will +#' yield blank cells in the layout (which can then be populated). If lower +#' than the required the function will stop. By default it is configured to +#' arrange the layout in a shape as square as possible. Blank cells can be +#' manually populated after with customized plots (see SwitchTofigure). +#'@param toptitle Topt title for the multi-pannel. Blank by default. +#'@param row_titles Character string vector with titles for each of the rows +#' in the layout. Blank by default. +#'@param col_titles Character string vector with titles for each of the +#' columns in the layout. Blank by default. +#'@param bar_scale Scale factor for the common colour bar. Takes 1 by default. +#'@param title_scale Scale factor for the multi-pannel title. Takes 1 by +#' default. +#'@param title_margin_scale Scale factor for the margins surrounding the top +#' title. Takes 1 by default. +#'@param title_left_shift_scale When plotting row titles, a shift is added +#' to the horizontal positioning of the top title in order to center it to +#' the region of the figures (without taking row titles into account). This +#' shift can be reduced. A value of 0 will remove the shift completely, +#' centering the title to the total width of the device. This parameter will +#' be disregarded if no 'row_titles' are provided. +#'@param subtitle_scale Scale factor for the row titles and column titles +#' (specified in 'row_titles' and 'col_titles'). Takes 1 by default. +#'@param subtitle_margin_scale Scale factor for the margins surrounding the +#' subtitles. Takes 1 by default. +#'@param units Title at the top of the colour bar, most commonly the units of +#' the variable provided in parameter 'var'. +#'@param brks,cols,bar_limits,triangle_ends Usually only providing 'brks' is +#' enough to generate the desired colour bar. These parameters allow to +#' define n breaks that define n - 1 intervals to classify each of the values +#' in 'var'. The corresponding grid cell of a given value in 'var' will be +#' coloured in function of the interval it belongs to. These parameters are +#' sent to \code{ColorBar()} to generate the breaks and colours. Additional +#' colours for values beyond the limits of the colour bar are also generated +#' and applied to the plot if 'bar_limits' or 'brks' and 'triangle_ends' are +#' properly provided to do so. See ?ColorBar for a full explanation. +#'@param col_inf,col_sup Colour identifiers to colour the values in 'var' that +#' go beyond the extremes of the colour bar and to colour NA values, +#' respectively. 'colNA' takes 'white' by default. 'col_inf' and 'col_sup' +#' will take the value of 'colNA' if not specified. See ?ColorBar for a full +#' explanation on 'col_inf' and 'col_sup'. +#'@param color_fun,subsampleg,bar_extra_labels,draw_bar_ticks,draw_separators,triangle_ends_scale,bar_label_digits,bar_label_scale,units_scale,bar_tick_scale,bar_extra_margin Set of parameters to control the visual aspect of the drawn colour bar. See ?ColorBar for a full explanation. +#'@param drawleg Where to draw the common colour bar. Can take values TRUE, +#' FALSE or:\cr +#' 'up', 'u', 'U', 'top', 't', 'T', 'north', 'n', 'N'\cr +#' 'down', 'd', 'D', 'bottom', 'b', 'B', 'south', 's', 'S' (default)\cr +#' 'right', 'r', 'R', 'east', 'e', 'E'\cr +#' 'left', 'l', 'L', 'west', 'w', 'W' +#'@param titles Character string vector with titles for each of the figures in +#' the multi-pannel, from top-left to bottom-right. Blank by default. +#'@param bar_left_shift_scale When plotting row titles, a shift is added to +#' the horizontal positioning of the colour bar in order to center it to the +#' region of the figures (without taking row titles into account). This shift +#' can be reduced. A value of 0 will remove the shift completely, centering +#' the colour bar to the total width of the device. This parameter will be +#' disregarded if no 'row_titles' are provided. +#'@param extra_margin Extra margins to be added around the layout, in the +#' format c(y1, x1, y2, x2). The units are margin lines. Takes rep(0, 4) +#' by default. +#'@param layout_by_rows Logical indicating wether the panels should be filled +#' by columns (FALSE) or by raws (TRUE, default). +#'@param fileout File where to save the plot. If not specified (default) a +#' graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, +#' bmp and tiff. +#'@param width Width in inches of the multi-pannel. 7 by default, or 11 if +#' 'fielout' has been specified. +#'@param height Height in inches of the multi-pannel. 7 by default, or 11 if +#' 'fileout' has been specified. +#'@param size_units Units of the size of the device (file or window) to plot +#' in. Inches ('in') by default. See ?Devices and the creator function of +#' the corresponding device. +#'@param res Resolution of the device (file or window) to plot in. See +#' ?Devices and the creator function of the corresponding device. +#'@param close_device Whether to close the graphics device after plotting +#' the layout and a 'fileout' has been specified. This is useful to avoid +#' closing the device when saving the layout into a file and willing to add +#' extra elements or figures. Takes TRUE by default. Disregarded if no +#' 'fileout' has been specified. +#' +#'@return +#'\item{brks}{ +#' Breaks used for colouring the map (and legend if drawleg = TRUE). +#'} +#'\item{cols}{ +#' Colours used for colouring the map (and legend if drawleg = TRUE). +#' Always of length length(brks) - 1. +#'} +#'\item{col_inf}{ +#' Colour used to draw the lower triangle end in the colour bar +#' (NULL if not drawn at all). +#'} +#'\item{col_sup}{ +#' Colour used to draw the upper triangle end in the colour bar +#' (NULL if not drawn at all). +#'} +#'\item{layout_matrix}{ +#' Underlying matrix of the layout. Useful to later set any of the layout +#' cells as current figure to add plot elements. See .SwitchToFigure. +#'} +#' +#'@examples +#'# See examples on Load() to understand the first lines in this example +#' \dontrun{ +#'data_path <- system.file('sample_data', package = 's2dv') +#'expA <- list(name = 'experiment', path = file.path(data_path, +#' 'model/$EXP_NAME$/$STORE_FREQ$_mean/$VAR_NAME$_3hourly', +#' '$VAR_NAME$_$START_DATE$.nc')) +#'obsX <- list(name = 'observation', path = file.path(data_path, +#' '$OBS_NAME$/$STORE_FREQ$_mean/$VAR_NAME$', +#' '$VAR_NAME$_$YEAR$$MONTH$.nc')) +#' +#'# Now we are ready to use Load(). +#'startDates <- c('19851101', '19901101', '19951101', '20001101', '20051101') +#'sampleData <- Load('tos', list(expA), list(obsX), startDates, +#' leadtimemin = 1, leadtimemax = 4, output = 'lonlat', +#' latmin = 27, latmax = 48, lonmin = -12, lonmax = 40) +#' } +#' \dontshow{ +#'startDates <- c('19851101', '19901101', '19951101', '20001101', '20051101') +#'sampleData <- s2dv:::.LoadSampleData('tos', c('experiment'), +#' c('observation'), startDates, +#' leadtimemin = 1, +#' leadtimemax = 4, +#' output = 'lonlat', +#' latmin = 27, latmax = 48, +#' lonmin = -12, lonmax = 40) +#' } +#'PlotLayout(PlotEquiMap, c('lat', 'lon'), sampleData$mod[1, , 1, 1, , ], +#' sampleData$lon, sampleData$lat, +#' toptitle = 'Predicted tos for Nov 1960 from 1st Nov', +#' titles = paste('Member', 1:15)) +#' +#'@importFrom grDevices dev.cur dev.new dev.off +#'@export +PlotLayout <- function(fun, plot_dims, var, ..., special_args = NULL, + nrow = NULL, ncol = NULL, toptitle = NULL, + row_titles = NULL, col_titles = NULL, bar_scale = 1, + title_scale = 1, title_margin_scale = 1, + title_left_shift_scale = 1, + subtitle_scale = 1, subtitle_margin_scale = 1, + brks = NULL, cols = NULL, drawleg = 'S', titles = NULL, + subsampleg = NULL, bar_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, + color_fun = clim.colors, + draw_bar_ticks = TRUE, draw_separators = FALSE, + triangle_ends_scale = 1, bar_extra_labels = NULL, + units = NULL, units_scale = 1, bar_label_scale = 1, + bar_tick_scale = 1, bar_extra_margin = rep(0, 4), + bar_left_shift_scale = 1, bar_label_digits = 4, + extra_margin = rep(0, 4), layout_by_rows = TRUE, + fileout = NULL, width = NULL, height = NULL, + size_units = 'in', res = 100, close_device = TRUE) { + # If there is any filenames to store the graphics, process them + # to select the right device + if (!is.null(fileout)) { + deviceInfo <- .SelectDevice(fileout = fileout, width = width, height = height, units = size_units, res = res) + saveToFile <- deviceInfo$fun + fileout <- deviceInfo$files + } + + is_single_na <- function(x) ifelse(length(x) > 1, FALSE, is.na(x)) + # Check var + if (!is.list(var) & (is.array(var) || (is_single_na(var)))) { + var <- list(var) + } else if (is.list(var)) { + if (!all(sapply(var, is.array) | sapply(var, is_single_na))) { + stop("Parameter 'var' must be an array or a list of arrays (or NA values).") + } + } else { + stop("Parameter 'var' must be an array or a list of arrays.") + } + + # Check fun + if (length(fun) == 1) { + if (is.function(fun)) { + fun <- as.character(substitute(fun)) + } + if (is.character(fun)) { + fun <- rep(fun, length(var)) + } + } + if (!is.character(fun) || (length(fun) != length(var))) { + stop("Parameter 'fun' must be a single function or a vector of function names, one for each array provided in parameter 'var'.") + } + + # Check special_args + if (!is.null(special_args)) { + if (!is.list(special_args) || any(!sapply(special_args, is.list))) { + stop("Parameter 'special_args' must be a list of lists.") + } else if (length(special_args) != length(var)) { + stop("Parameter 'special_args' must contain a list of special arguments for each array provided in 'var'.") + } + } + + # Check plot_dims + if (is.character(plot_dims) || is.numeric(plot_dims)) { + plot_dims <- replicate(length(var), plot_dims, simplify = FALSE) + } + if (!is.list(plot_dims) || !all(sapply(plot_dims, is.character) | sapply(plot_dims, is.numeric)) || + (length(plot_dims) != length(var))) { + stop("Parameter 'plot_dims' must contain a single numeric or character vector with dimension identifiers or a vector for each array provided in parameter 'var'.") + } + + # Check nrow + if (!is.null(nrow)) { + if (!is.numeric(nrow)) { + stop("Parameter 'nrow' must be numeric or NULL.") + } + nrow <- round(nrow) + } + + # Check ncol + if (!is.null(ncol)) { + if (!is.numeric(ncol)) { + stop("Parameter 'ncol' must be numeric or NULL.") + } + ncol <- round(ncol) + } + # Check layout_by_rows + if (!is.logical(layout_by_rows)) { + stop("Parameter 'layout_by_rows' must be logical.") + } + + # Check toptitle + if (is.null(toptitle) || is.na(toptitle)) { + toptitle <- '' + } + if (!is.character(toptitle)) { + stop("Parameter 'toptitle' must be a character string.") + } + + # Check row_titles + if (!is.null(row_titles)) { + if (!is.character(row_titles)) { + stop("Parameter 'row_titles' must be a vector of character strings.") + } + } + + # Check col_titles + if (!is.null(row_titles)) { + if (!is.character(row_titles)) { + stop("Parameter 'row_titles' must be a vector of character strings.") + } + } + + # Check drawleg + if (is.character(drawleg)) { + if (drawleg %in% c('up', 'u', 'U', 'top', 't', 'T', 'north', 'n', 'N')) { + drawleg <- 'N' + } else if (drawleg %in% c('down', 'd', 'D', 'bottom', 'b', 'B', 'south', 's', 'S')) { + drawleg <- 'S' + } else if (drawleg %in% c('right', 'r', 'R', 'east', 'e', 'E')) { + drawleg <- 'E' + } else if (drawleg %in% c('left', 'l', 'L', 'west', 'w', 'W')) { + drawleg <- 'W' + } else { + stop("Parameter 'drawleg' must be either TRUE, FALSE or a valid identifier of a position (see ?PlotMultiMap).") + } + } else if (!is.logical(drawleg)) { + stop("Parameter 'drawleg' must be either TRUE, FALSE or a valid identifier of a position (see ?PlotMultiMap).") + } + if (drawleg != FALSE && all(sapply(var, is_single_na)) && + (is.null(brks) || length(brks) < 2)) { + stop("Either data arrays in 'var' or breaks in 'brks' must be provided if 'drawleg' is requested.") + } + + # Check the rest of parameters (unless the user simply wants to build an empty layout) + var_limits <- NULL + if (!all(sapply(var, is_single_na))) { + var_limits <- c(min(unlist(var), na.rm = TRUE), max(unlist(var), na.rm = TRUE)) + if ((any(is.infinite(var_limits)) || var_limits[1] == var_limits[2])) { + stop("Arrays in parameter 'var' must contain at least 2 different values.") + } + } + colorbar <- ColorBar(brks, cols, FALSE, subsampleg, bar_limits, + var_limits, triangle_ends, col_inf, col_sup, color_fun, + plot = FALSE, draw_bar_ticks, + draw_separators, triangle_ends_scale, bar_extra_labels, + units, units_scale, bar_label_scale, bar_tick_scale, + bar_extra_margin, bar_label_digits) + + # Check bar_scale + if (!is.numeric(bar_scale)) { + stop("Parameter 'bar_scale' must be numeric.") + } + + # Check bar_left_shift_scale + if (!is.numeric(bar_left_shift_scale)) { + stop("Parameter 'bar_left_shift_scale' must be numeric.") + } + + # Check title_scale + if (!is.numeric(title_scale)) { + stop("Parameter 'title_scale' must be numeric.") + } + + # Check title_margin_scale + if (!is.numeric(title_margin_scale)) { + stop("Parameter 'title_margin_scale' must be numeric.") + } + + # Check title_left_shift_scale + if (!is.numeric(title_left_shift_scale)) { + stop("Parameter 'title_left_shift_scale' must be numeric.") + } + + # Check subtitle_scale + if (!is.numeric(subtitle_scale)) { + stop("Parameter 'subtite_scale' must be numeric.") + } + + # Check subtitle_margin_scale + if (!is.numeric(subtitle_margin_scale)) { + stop("Parameter 'subtite_margin_scale' must be numeric.") + } + + # Check titles + if (!all(sapply(titles, is.character))) { + stop("Parameter 'titles' must be a vector of character strings.") + } + + # Check extra_margin + if (!is.numeric(extra_margin) || length(extra_margin) != 4) { + stop("Parameter 'extra_margin' must be a numeric vector with 4 elements.") + } + + # Check width + if (is.null(width)) { + if (is.null(fileout)) { + width <- 7 + } else { + width <- 11 + } + } + if (!is.numeric(width)) { + stop("Parameter 'width' must be numeric.") + } + + # Check height + if (is.null(height)) { + if (is.null(fileout)) { + height <- 7 + } else { + height <- 8 + } + } + if (!is.numeric(height)) { + stop("Parameter 'height' must be numeric.") + } + + # Check close_device + if (!is.logical(close_device)) { + stop("Parameter 'close_device' must be logical.") + } + + # Count the total number of maps and reorder each array of maps to have the lat and lon dimensions at the end. + n_plots <- 0 + plot_array_i <- 1 + for (plot_array in var) { + if (is_single_na(plot_array)) { + n_plots <- n_plots + 1 + } else { + dim_ids <- plot_dims[[plot_array_i]] + if (is.character(dim_ids)) { + dimnames <- NULL + if (!is.null(names(dim(plot_array)))) { + dimnames <- names(dim(plot_array)) + } else if (!is.null(attr(plot_array, 'dimensions'))) { + dimnames <- attr(plot_array, 'dimensions') + } + if (!is.null(dimnames)) { + if (any(!sapply(dim_ids, `%in%`, dimnames))) { + stop("All arrays provided in parameter 'var' must have all the dimensions in 'plot_dims'.") + } + dim_ids <- sapply(dim_ids, function(x) which(dimnames == x)[1]) + var[[plot_array_i]] <- Reorder(var[[plot_array_i]], c((1:length(dim(plot_array)))[-dim_ids], dim_ids)) + } else { + .warning(paste0("Assuming the ", plot_array_i, "th array provided in 'var' has 'plot_dims' as last dimensions (right-most).")) + dims <- tail(c(rep(1, length(dim_ids)), dim(plot_array)), length(dim_ids)) + dim_ids <- tail(1:length(dim(plot_array)), length(dim_ids)) + if (length(dim(var[[plot_array_i]])) < length(dims)) { + dim(var[[plot_array_i]]) <- dims + } + } + } else if (any(dim_ids > length(dim(plot_array)))) { + stop("Parameter 'plot_dims' contains dimension identifiers out of range.") + } + n_plots <- n_plots + prod(dim(plot_array)[-dim_ids]) + #n_plots <- n_plots + prod(head(c(rep(1, length(dim_ids)), dim(plot_array)), length(dim(plot_array)))) + if (length(dim(var[[plot_array_i]])) == length(dim_ids)) { + dim(var[[plot_array_i]]) <- c(1, dim(var[[plot_array_i]])) + dim_ids <- dim_ids + 1 + } + plot_dims[[plot_array_i]] <- dim_ids + } + plot_array_i <- plot_array_i + 1 + } + if (is.null(nrow) && is.null(ncol)) { + ncol <- ceiling(sqrt(n_plots)) + nrow <- ceiling(n_plots/ncol) + } else if (is.null(ncol)) { + ncol <- ceiling(n_plots/nrow) + } else if (is.null(nrow)) { + nrow <- ceiling(n_plots/ncol) + } else if (nrow * ncol < n_plots) { + stop("There are more arrays to plot in 'var' than cells defined by 'nrow' x 'ncol'.") + } + + if (is.logical(drawleg) && drawleg) { + if (nrow > ncol) { + drawleg <- 'S' + } else { + drawleg <- 'E' + } + } + vertical <- drawleg %in% c('E', 'W') + + # Open connection to graphical device + if (!is.null(fileout)) { + saveToFile(fileout) + } else if (names(dev.cur()) == 'null device') { + dev.new(units = size_units, res = res, width = width, height = height) + } else if (prod(par('mfrow')) > 1) { + dev.new(units = units, res = res, width = width, height = height) + } + + # Take size of device and set up layout: + # --------------------------------------------- + # |0000000000000000000000000000000000000000000| + # |0000000000000000 TOP TITLE 0000000000000000| + # |0000000000000000000000000000000000000000000| + # |-------------------------------------------| + # |00000|0000000000000000000000000000000000000| + # |00000|000000000000 ROW TITLES 0000000000000| + # |00000|0000000000000000000000000000000000000| + # |00000|-------------------------------------| + # |0 0|222222222222222222|333333333333333333| + # |0 C 0|222222222222222222|333333333333333333| + # |0 O 0|222222222222222222|333333333333333333| + # |0 L 0|2222 FIGURE 1 2222|3333 FIGURE 2 3333| + # |0 0|222222222222222222|333333333333333333| + # |0 T 0|222222222222222222|333333333333333333| + # |0 I 0|222222222222222222|333333333333333333| + # |0 T 0|-------------------------------------| + # |0 L 0|444444444444444444|555555555555555555| + # |0 S 0|444444444444444444|555555555555555555| + # |0 0|444444444444444444|555555555555555555| + # |00000|4444 FIGURE 3 4444|5555 FIGURE 4 5555| + # |00000|444444444444444444|555555555555555555| + # |00000|444444444444444444|555555555555555555| + # |00000|444444444444444444|555555555555555555| + # |-------------------------------------------| + # |1111111111111111111111111111111111111111111| + # |1111111111111111 COLOR BAR 1111111111111111| + # |1111111111111111111111111111111111111111111| + # --------------------------------------------- + device_size <- par('din') + device_size[1] <- device_size[1] - sum(extra_margin[c(2, 4)]) + device_size[2] <- device_size[2] - sum(extra_margin[c(1, 3)]) + cs <- char_size <- par('csi') + title_cex <- 2.5 * title_scale + title_margin <- 0.5 * title_cex * title_margin_scale + subtitle_cex <- 1.5 * subtitle_scale + subtitle_margin <- 0.5 * sqrt(nrow * ncol) * subtitle_cex * subtitle_margin_scale + mat_layout <- 1:(nrow * ncol) + if (drawleg != FALSE) { + if (fun == 'PlotMostLikelyQuantileMap') { #multi_colorbar + multi_colorbar <- TRUE + cat_dim <- list(...)$cat_dim + nmap <- as.numeric(dim(var[[1]])[cat_dim]) + mat_layout <- mat_layout + nmap + } else { + multi_colorbar <- FALSE + mat_layout <- mat_layout + 1 + } + } + mat_layout <- matrix(mat_layout, nrow, ncol, byrow = layout_by_rows) + fsu <- figure_size_units <- 10 # unitless + widths <- rep(fsu, ncol) + heights <- rep(fsu, nrow) + # Useless +# n_figures <- nrow * ncol + + if (drawleg != FALSE) { + if (drawleg == 'N') { + mat_layout <- rbind(rep(1, dim(mat_layout)[2]), mat_layout) + heights <- c(round(bar_scale * 2 * nrow), heights) + } else if (drawleg == 'S') { + if (multi_colorbar) { + new_mat_layout <- c() + for (i_col in 1:ncol) { + new_mat_layout <- c(new_mat_layout, rep(mat_layout[, i_col], nmap)) + } + new_mat_layout <- matrix(new_mat_layout, nrow, nmap * ncol) + colorbar_row <- rep(1:nmap, each = ncol) + mat_layout <- rbind(new_mat_layout, as.numeric(colorbar_row)) + widths <- rep(widths, nmap) + } else { + mat_layout <- rbind(mat_layout, rep(1, dim(mat_layout)[2])) + } + heights <- c(heights, round(bar_scale * 2 * nrow)) + } else if (drawleg == 'W') { + mat_layout <- cbind(rep(1, dim(mat_layout)[1]), mat_layout) + widths <- c(round(bar_scale * 3 * ncol), widths) + } else if (drawleg == 'E') { + mat_layout <- cbind(mat_layout, rep(1, dim(mat_layout)[1])) + widths <- c(widths, round(bar_scale * 3 * ncol)) + } + # Useless +# n_figures <- n_figures + 1 + } + + # row and col titles + if (length(row_titles) > 0) { + mat_layout <- cbind(rep(0, dim(mat_layout)[1]), mat_layout) + widths <- c(((subtitle_cex + subtitle_margin / 2) * cs / device_size[1]) * ncol * fsu, widths) + } + if (length(col_titles) > 0) { + mat_layout <- rbind(rep(0, dim(mat_layout)[2]), mat_layout) + heights <- c(((subtitle_cex + subtitle_margin) * cs / device_size[2]) * nrow * fsu, heights) + } + # toptitle + if (toptitle != '') { + mat_layout <- rbind(rep(0, dim(mat_layout)[2]), mat_layout) + heights <- c(((title_cex + title_margin) * cs / device_size[2]) * nrow * fsu, heights) + } + par(oma = extra_margin) + layout(mat_layout, widths, heights) + # Draw the color bar + if (drawleg != FALSE) { + if (length(row_titles) > 0) { + bar_extra_margin[2] <- bar_extra_margin[2] + (subtitle_cex + subtitle_margin / 2) * + bar_left_shift_scale + } + + if (multi_colorbar) { # multiple colorbar + if (!is.null(list(...)$bar_titles)) { + bar_titles <- list(...)$bar_titles + } else { + bar_titles <- NULL + } + multi_ColorBar(nmap = nmap, + brks = brks, cols = cols, vertical = vertical, subsampleg = subsampleg, + bar_limits = bar_limits, var_limits = var_limits, + triangle_ends = triangle_ends, plot = TRUE, + draw_separators = draw_separators, + bar_titles = bar_titles, title_scale = units_scale, + label_scale = bar_label_scale, extra_margin = bar_extra_margin) + + } else { # one colorbar + ColorBar(brks = colorbar$brks, cols = colorbar$cols, vertical = vertical, subsampleg = subsampleg, + bar_limits = bar_limits, var_limits = var_limits, + triangle_ends = triangle_ends, col_inf = colorbar$col_inf, + col_sup = colorbar$col_sup, color_fun = color_fun, plot = TRUE, draw_ticks = draw_bar_ticks, + draw_separators = draw_separators, triangle_ends_scale = triangle_ends_scale, + extra_labels = bar_extra_labels, + title = units, title_scale = units_scale, label_scale = bar_label_scale, tick_scale = bar_tick_scale, + extra_margin = bar_extra_margin, label_digits = bar_label_digits) + + } + } + + # Draw titles + if (toptitle != '' || length(col_titles) > 0 || length(row_titles) > 0) { + plot(0, type = 'n', ann = FALSE, axes = FALSE, xaxs = 'i', yaxs = 'i', + xlim = c(0, 1), ylim = c(0, 1)) + width_lines <- par('fin')[1] / par('csi') + plot_lines <- par('pin')[1] / par('csi') + plot_range <- par('xaxp')[2] - par('xaxp')[1] + size_units_per_line <- plot_range / plot_lines + if (toptitle != '') { + title_x_center <- par('xaxp')[1] - par('mar')[2] * size_units_per_line + + ncol * width_lines * size_units_per_line / 2 + if (length(row_titles) > 0) { + title_x_center <- title_x_center - (1 - title_left_shift_scale) * + (subtitle_cex + subtitle_margin) / 2 * size_units_per_line + } + title_y_center <- par('mar')[3] + (title_margin + title_cex) / 2 + if (length(col_titles > 0)) { + title_y_center <- title_y_center + (subtitle_margin + subtitle_cex) + } + mtext(toptitle, cex = title_cex, line = title_y_center, at = title_x_center, + padj = 0.5) + } + if (length(col_titles) > 0) { + t_x_center <- par('xaxp')[1] - par('mar')[2] * size_units_per_line + for (t in 1:ncol) { + mtext(col_titles[t], cex = subtitle_cex, + line = par('mar')[3] + (subtitle_margin + subtitle_cex) / 2, + at = t_x_center + (t - 0.5) * width_lines * size_units_per_line, + padj = 0.5) + } + } + height_lines <- par('fin')[2] / par('csi') + plot_lines <- par('pin')[2] / par('csi') + plot_range <- par('yaxp')[2] - par('yaxp')[1] + size_units_per_line <- plot_range / plot_lines + if (length(row_titles) > 0) { + t_y_center <- par('yaxp')[1] - par('mar')[1] * size_units_per_line + for (t in 1:nrow) { + mtext(row_titles[t], cex = subtitle_cex, + line = par('mar')[2] + (subtitle_margin + subtitle_cex) / 2, + at = t_y_center - (t - 1.5) * height_lines * size_units_per_line, + padj = 0.5, side = 2) + } + } + par(new = TRUE) + } + + array_number <- 1 + plot_number <- 1 + # For each array provided in var + lapply(var, function(x) { + if (is_single_na(x)) { + if (!all(sapply(var[array_number:length(var)], is_single_na))) { + plot.new() + par(new = FALSE) + } + plot_number <<- plot_number + 1 + } else { + if (is.character(plot_dims[[array_number]])) { + plot_dim_indices <- which(names(dim(x)) %in% plot_dims[[array_number]]) + } else { + plot_dim_indices <- plot_dims[[array_number]] + } + # For each of the arrays provided in that array + apply(x, (1:length(dim(x)))[-plot_dim_indices], + function(y) { + # Do the plot. colorbar is not drew. + fun_args <- c(list(y, toptitle = titles[plot_number], drawleg = FALSE), list(...), + special_args[[array_number]]) +# funct <- fun[[array_number]] + if (fun[[array_number]] %in% c('PlotEquiMap', 'PlotStereoMap', 'PlotSection')) { + fun_args <- c(fun_args, list(brks = colorbar$brks, cols = colorbar$cols, + col_inf = colorbar$col_inf, + col_sup = colorbar$col_sup)) + } else if (fun[[array_number]] %in% 'PlotMostLikelyQuantileMap') { + #TODO: pre-generate colorbar params? like above + fun_args <- c(fun_args, list(brks = brks, cols = cols)) + } + do.call(fun[[array_number]], fun_args) + plot_number <<- plot_number + 1 + }) + } + array_number <<- array_number + 1 + }) + + # If the graphic was saved to file, close the connection with the device + if (!is.null(fileout) && close_device) dev.off() + + invisible(list(brks = colorbar$brks, cols = colorbar$cols, + col_inf = colorbar$col_inf, col_sup = colorbar$col_sup, + layout_matrix = mat_layout)) +} diff --git a/modules/Visualization/tmp/PlotMostLikelyQuantileMap.R b/modules/Visualization/tmp/PlotMostLikelyQuantileMap.R new file mode 100644 index 0000000000000000000000000000000000000000..9f9f1914d8de30f26adc1b285a3c60a41f264b6a --- /dev/null +++ b/modules/Visualization/tmp/PlotMostLikelyQuantileMap.R @@ -0,0 +1,196 @@ +#'Plot Maps of Most Likely Quantiles +#' +#'@author Veronica Torralba, \email{veronica.torralba@bsc.es}, Nicolau Manubens, \email{nicolau.manubens@bsc.es} +#'@description This function receives as main input (via the parameter \code{probs}) a collection of longitude-latitude maps, each containing the probabilities (from 0 to 1) of the different grid cells of belonging to a category. As many categories as maps provided as inputs are understood to exist. The maps of probabilities must be provided on a common rectangular regular grid, and a vector with the longitudes and a vector with the latitudes of the grid must be provided. The input maps can be provided in two forms, either as a list of multiple two-dimensional arrays (one for each category) or as a three-dimensional array, where one of the dimensions corresponds to the different categories. +#' +#'@param probs a list of bi-dimensional arrays with the named dimensions 'latitude' (or 'lat') and 'longitude' (or 'lon'), with equal size and in the same order, or a single tri-dimensional array with an additional dimension (e.g. 'bin') for the different categories. The arrays must contain probability values between 0 and 1, and the probabilities for all categories of a grid cell should not exceed 1 when added. +#'@param lon a numeric vector with the longitudes of the map grid, in the same order as the values along the corresponding dimension in \code{probs}. +#'@param lat a numeric vector with the latitudes of the map grid, in the same order as the values along the corresponding dimension in \code{probs}. +#'@param cat_dim the name of the dimension along which the different categories are stored in \code{probs}. This only applies if \code{probs} is provided in the form of 3-dimensional array. The default expected name is 'bin'. +#'@param bar_titles vector of character strings with the names to be drawn on top of the color bar for each of the categories. As many titles as categories provided in \code{probs} must be provided. +#'@param col_unknown_cat character string with a colour representation of the colour to be used to paint the cells for which no category can be clearly assigned. Takes the value 'white' by default. +#'@param drawleg Where to draw the common colour bar. Can take values TRUE, +#' FALSE or:\cr +#' 'up', 'u', 'U', 'top', 't', 'T', 'north', 'n', 'N'\cr +#' 'down', 'd', 'D', 'bottom', 'b', 'B', 'south', 's', 'S' (default)\cr +#' 'right', 'r', 'R', 'east', 'e', 'E'\cr +#' 'left', 'l', 'L', 'west', 'w', 'W' +#'@param ... additional parameters to be sent to \code{PlotCombinedMap} and \code{PlotEquiMap}. +#'@seealso \code{PlotCombinedMap} and \code{PlotEquiMap} +#' +#'@importFrom maps map +#'@importFrom graphics box image layout mtext par plot.new +#'@importFrom grDevices adjustcolor bmp colorRampPalette dev.cur dev.new dev.off hcl jpeg pdf png postscript svg tiff +#'@examples +#'# Simple example +#'x <- array(1:(20 * 10), dim = c(lat = 10, lon = 20)) / 200 +#'a <- x * 0.6 +#'b <- (1 - x) * 0.6 +#'c <- 1 - (a + b) +#'lons <- seq(0, 359.5, length = 20) +#'lats <- seq(-89.5, 89.5, length = 10) +#'PlotMostLikelyQuantileMap(list(a, b, c), lons, lats, +#' toptitle = 'Most likely tercile map', +#' bar_titles = paste('% of belonging to', c('a', 'b', 'c')), +#' brks = 20, width = 10, height = 8) +#' +#'# More complex example +#'n_lons <- 40 +#'n_lats <- 20 +#'n_timesteps <- 100 +#'n_bins <- 4 +#' +#'# 1. Generation of sample data +#'lons <- seq(0, 359.5, length = n_lons) +#'lats <- seq(-89.5, 89.5, length = n_lats) +#' +#'# This function builds a 3-D gaussian at a specified point in the map. +#'make_gaussian <- function(lon, sd_lon, lat, sd_lat) { +#' w <- outer(lons, lats, function(x, y) dnorm(x, lon, sd_lon) * dnorm(y, lat, sd_lat)) +#' min_w <- min(w) +#' w <- w - min_w +#' w <- w / max(w) +#' w <- t(w) +#' names(dim(w)) <- c('lat', 'lon') +#' w +#'} +#' +#'# This function generates random time series (with values ranging 1 to 5) +#'# according to 2 input weights. +#'gen_data <- function(w1, w2, n) { +#' r <- sample(1:5, n, +#' prob = c(.05, .9 * w1, .05, .05, .9 * w2), +#' replace = TRUE) +#' r <- r + runif(n, -0.5, 0.5) +#' dim(r) <- c(time = n) +#' r +#'} +#' +#'# We build two 3-D gaussians. +#'w1 <- make_gaussian(120, 80, 20, 30) +#'w2 <- make_gaussian(260, 60, -10, 40) +#' +#'# We generate sample data (with dimensions time, lat, lon) according +#'# to the generated gaussians +#'sample_data <- multiApply::Apply(list(w1, w2), NULL, +#' gen_data, n = n_timesteps)$output1 +#' +#'# 2. Binning sample data +#'prob_thresholds <- 1:n_bins / n_bins +#'prob_thresholds <- prob_thresholds[1:(n_bins - 1)] +#'thresholds <- quantile(sample_data, prob_thresholds) +#' +#'binning <- function(x, thresholds) { +#' n_samples <- length(x) +#' n_bins <- length(thresholds) + 1 +#' +#' thresholds <- c(thresholds, max(x)) +#' result <- 1:n_bins +#' lower_threshold <- min(x) - 1 +#' for (i in 1:n_bins) { +#' result[i] <- sum(x > lower_threshold & x <= thresholds[i]) / n_samples +#' lower_threshold <- thresholds[i] +#' } +#' +#' dim(result) <- c(bin = n_bins) +#' result +#'} +#' +#'bins <- multiApply::Apply(sample_data, 'time', binning, thresholds)$output1 +#' +#'# 3. Plotting most likely quantile/bin +#'PlotMostLikelyQuantileMap(bins, lons, lats, +#' toptitle = 'Most likely quantile map', +#' bar_titles = paste('% of belonging to', letters[1:n_bins]), +#' mask = 1 - (w1 + w2 / max(c(w1, w2))), +#' brks = 20, width = 10, height = 8) +#' +#'@export +PlotMostLikelyQuantileMap <- function(probs, lon, lat, cat_dim = 'bin', + bar_titles = NULL, + col_unknown_cat = 'white', drawleg = T, + ...) { + # Check probs + error <- FALSE + if (is.list(probs)) { + if (length(probs) < 1) { + stop("Parameter 'probs' must be of length >= 1 if provided as a list.") + } + check_fun <- function(x) { + is.numeric(x) && (length(dim(x)) == 2) + } + if (!all(sapply(probs, check_fun))) { + error <- TRUE + } + ref_dims <- dim(probs[[1]]) + equal_dims <- all(sapply(probs, function(x) identical(dim(x), ref_dims))) + if (!equal_dims) { + stop("All arrays in parameter 'probs' must have the same dimension ", + "sizes and names when 'probs' is provided as a list of arrays.") + } + num_probs <- length(probs) + probs <- unlist(probs) + dim(probs) <- c(ref_dims, map = num_probs) + cat_dim <- 'map' + } + if (!is.numeric(probs)) { + error <- TRUE + } + if (is.null(dim(probs))) { + error <- TRUE + } + if (length(dim(probs)) != 3) { + error <- TRUE + } + if (error) { + stop("Parameter 'probs' must be either a numeric array with 3 dimensions ", + " or a list of numeric arrays of the same size with the 'lon' and ", + "'lat' dimensions.") + } + dimnames <- names(dim(probs)) + + # Check cat_dim + if (is.character(cat_dim)) { + if (is.null(dimnames)) { + stop("Specified a dimension name in 'cat_dim' but no dimension names provided ", + "in 'probs'.") + } + cat_dim <- which(dimnames == cat_dim) + if (length(cat_dim) < 1) { + stop("Dimension 'cat_dim' not found in 'probs'.") + } + cat_dim <- cat_dim[1] + } else if (!is.numeric(cat_dim)) { + stop("Parameter 'cat_dim' must be either a numeric value or a ", + "dimension name.") + } + if (length(cat_dim) != 1) { + stop("Parameter 'cat_dim' must be of length 1.") + } + cat_dim <- round(cat_dim) + nprobs <- dim(probs)[cat_dim] + + # Check bar_titles + if (is.null(bar_titles)) { + if (nprobs == 3) { + bar_titles <- c("Below normal (%)", "Normal (%)", "Above normal (%)") + } else if (nprobs == 5) { + bar_titles <- c("Low (%)", "Below normal (%)", + "Normal (%)", "Above normal (%)", "High (%)") + } else { + bar_titles <- paste0("Cat. ", 1:nprobs, " (%)") + } + } + + minimum_value <- ceiling(1 / nprobs * 10 * 1.1) * 10 + + # By now, the PlotCombinedMap function is included below in this file. + # In the future, PlotCombinedMap will be part of s2dverification and will + # be properly imported. + PlotCombinedMap(probs * 100, lon, lat, map_select_fun = max, + display_range = c(minimum_value, 100), + map_dim = cat_dim, + bar_titles = bar_titles, + col_unknown_map = col_unknown_cat, + drawleg = drawleg, ...) +} diff --git a/modules/test_decadal.R b/modules/test_decadal.R index 8077b7e200142a638568e5e30ff60ac71d209407..01cf2d9201191eab287ab5629a520a01261f73a2 100644 --- a/modules/test_decadal.R +++ b/modules/test_decadal.R @@ -3,6 +3,7 @@ source("modules/Loading/Loading_decadal.R") source("modules/Calibration/Calibration.R") source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") +source("modules/Visualization/Visualization.R") recipe_file <- "modules/Loading/testing_recipes/recipe_decadal.yml" recipe <- read_yaml(recipe_file) @@ -10,11 +11,20 @@ archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive_decadal.yml"))$ar # Load datasets data <- load_datasets(recipe_file) + # Calibrate datasets calibrated_data <- calibrate_datasets(data, recipe) + # Compute skill metrics skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) + # Compute percentiles and probability bins probabilities <- compute_probabilities(calibrated_data$hcst, recipe) + # Export all data to netCDF save_data(recipe, archive, data, calibrated_data, skill_metrics, probabilities) + +# Plot data +plot_data(recipe, archive, data, calibrated_data, skill_metrics, + probabilities, significance = T) + diff --git a/modules/test_victoria.R b/modules/test_seasonal.R similarity index 74% rename from modules/test_victoria.R rename to modules/test_seasonal.R index eb5933b3547a7d7e2ff10e8d251aee0bed0a538f..5f59794f4e6fc6678119acd2d1979f27ec869663 100644 --- a/modules/test_victoria.R +++ b/modules/test_seasonal.R @@ -3,8 +3,9 @@ source("modules/Loading/Loading.R") source("modules/Calibration/Calibration.R") source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") +source("modules/Visualization/Visualization.R") -recipe_file <- "modules/Loading/testing_recipes/recipe_4.yml" +recipe_file <- "modules/Loading/testing_recipes/recipe_system7c3s-tas.yml" recipe <- read_yaml(recipe_file) archive <- read_yaml(paste0(recipe$Run$code_dir, "conf/archive.yml"))$archive @@ -18,3 +19,6 @@ skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) probabilities <- compute_probabilities(calibrated_data$hcst, recipe) # Export all data to netCDF save_data(recipe, archive, data, calibrated_data, skill_metrics, probabilities) +# Plot data +plot_data(recipe, archive, data, calibrated_data, skill_metrics, + probabilities, significance = T) diff --git a/tests/testthat/test-decadal_daily_1.R b/tests/testthat/test-decadal_daily_1.R index 720662d49a844ab4c4fc95e44bf4fe7094b95efe..a78fd1350ae3f8629929c81d4c529b0daa4f511b 100644 --- a/tests/testthat/test-decadal_daily_1.R +++ b/tests/testthat/test-decadal_daily_1.R @@ -65,11 +65,11 @@ names(data$obs) ) expect_equal( dim(data$hcst$data), -c(dat = 1, var = 1, ensemble = 3, sday = 1, sweek = 1, syear = 3, time = 90, latitude = 7, longitude = 11) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 3, time = 90, latitude = 7, longitude = 11, ensemble = 3) ) expect_equal( dim(data$fcst$data), -c(dat = 1, var = 1, ensemble = 3, sday = 1, sweek = 1, syear = 2, time = 90, latitude = 7, longitude = 11) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 2, time = 90, latitude = 7, longitude = 11, ensemble = 3) ) expect_equal( dim(data$hcst$Dates$start), @@ -77,12 +77,12 @@ c(sday = 1, sweek = 1, syear = 3, time = 90) ) # hcst data expect_equal( -as.vector(drop(data$hcst$data)[1, 2:3, 1:3, 1, 1]), +as.vector(aperm(drop(data$hcst$data), c(5, 1:4))[1, 2:3, 1:3, 1, 1]), c(298.5787, 293.6479, 298.5042, 293.7802, 297.8072, 293.0764), tolerance = 0.0001 ) expect_equal( -as.vector(drop(data$hcst$data)[2, , 89:90, 1, 1]), +as.vector(aperm(drop(data$hcst$data), c(5, 1:4))[2, , 89:90, 1, 1]), c(301.6978, 308.9792, 308.4501, 302.1620, 307.6034, 307.6388), tolerance = 0.0001 ) @@ -99,12 +99,12 @@ tolerance = 0.0001 # fcst data expect_equal( -as.vector(drop(data$fcst$data)[1, , 1:3, 1, 1]), +as.vector(aperm(drop(data$fcst$data), c(5, 1:4))[1, , 1:3, 1, 1]), c(295.0745, 291.1006, 296.2279, 291.6309, 295.3123, 290.8995), tolerance = 0.0001 ) expect_equal( -as.vector(drop(data$fcst$data)[2, , 89:90, 1, 1]), +as.vector(aperm(drop(data$fcst$data), c(5, 1:4))[2, , 89:90, 1, 1]), c(305.3428, 305.0657, 305.5445, 305.5681), tolerance = 0.0001 ) diff --git a/tests/testthat/test-decadal_monthly_1.R b/tests/testthat/test-decadal_monthly_1.R index 6f43aaa1a2782b9511a246a25c72f8b4eac63486..7bb5031e3470854b269dcde36c76135a396f59ad 100644 --- a/tests/testthat/test-decadal_monthly_1.R +++ b/tests/testthat/test-decadal_monthly_1.R @@ -6,6 +6,7 @@ source("modules/Loading/Loading_decadal.R") source("modules/Calibration/Calibration.R") source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") +source("modules/Visualization/Visualization.R") recipe_file <- "tests/recipes/recipe-decadal_monthly_1.yml" recipe <- read_yaml(recipe_file) @@ -35,6 +36,14 @@ save_data(recipe = recipe, data = data, calibrated_data = calibrated_data, skill_metrics = skill_metrics, probabilities = probs, archive = archive) ))}) +# Plotting +suppressWarnings({invisible(capture.output( +plot_data(recipe = recipe, archive = archive, data = data, + calibrated_data = calibrated_data, skill_metrics = skill_metrics, + probabilities = probs, significance = T) +))}) + + outdir <- get_dir(recipe) #====================================== @@ -75,18 +84,18 @@ names(data$obs) ) expect_equal( dim(data$hcst$data), -c(dat = 1, var = 1, ensemble = 2, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 5, longitude = 4) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 5, longitude = 4, ensemble = 2) ) expect_equal( dim(data$fcst$data), -c(dat = 1, var = 1, ensemble = 2, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 5, longitude = 4) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 5, longitude = 4, ensemble = 2) ) expect_equal( dim(data$hcst$Dates$start), c(sday = 1, sweek = 1, syear = 4, time = 3) ) expect_equal( -as.vector(drop(data$hcst$data)[,1:2,1,2,3]), +as.vector(aperm(drop(data$hcst$data), c(5, 1:4))[, 1:2,1,2,3]), c(291.3831, 291.6227, 292.3012, 290.9779), tolerance = 0.0001 ) @@ -140,11 +149,11 @@ class(calibrated_data$fcst), ) expect_equal( dim(calibrated_data$hcst$data), -c(dat = 1, var = 1, ensemble = 2, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 5, longitude = 4) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 5, longitude = 4, ensemble = 2) ) expect_equal( dim(calibrated_data$fcst$data), -c(dat = 1, var = 1, ensemble = 2, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 5, longitude = 4) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 1, time = 3, latitude = 5, longitude = 4, ensemble = 2) ) expect_equal( mean(calibrated_data$fcst$data), @@ -157,7 +166,7 @@ mean(calibrated_data$hcst$data), tolerance = 0.0001 ) expect_equal( -as.vector(drop(calibrated_data$hcst$data)[1, , 2, 3, 4]), +as.vector(aperm(drop(calibrated_data$hcst$data), c(5, 1:4))[1, , 2, 3, 4]), c(286.3895, 286.6408, 290.6652, 288.3759), tolerance = 0.0001 ) @@ -251,7 +260,7 @@ test_that("4. Saving", { expect_equal( list.files(outdir), -c("tas_19911101.nc", "tas_19921101.nc", "tas_19931101.nc", "tas_19941101.nc", "tas_20211101.nc", +c("plots", "tas_19911101.nc", "tas_19921101.nc", "tas_19931101.nc", "tas_19941101.nc", "tas_20211101.nc", "tas-obs_19911101.nc", "tas-obs_19921101.nc", "tas-obs_19931101.nc", "tas-obs_19941101.nc", "tas-percentiles_month11.nc", "tas-probs_19911101.nc", "tas-probs_19921101.nc", "tas-probs_19931101.nc", "tas-probs_19941101.nc", "tas-skill_month11.nc") @@ -261,7 +270,17 @@ c("tas_19911101.nc", "tas_19921101.nc", "tas_19931101.nc", "tas_19941101.nc", "t #) +}) + + +test_that("5. Visualization", { +expect_equal( +list.files(paste0(outdir, "/plots/")), +c("forecast_ensemble_mean.png", "forecast_most_likely_tercile.png", + "rpss.png") +) + }) # Delete files -unlink(paste0(outdir, list.files(outdir))) +unlink(paste0(outdir, list.files(outdir, recursive = TRUE))) diff --git a/tests/testthat/test-decadal_monthly_2.R b/tests/testthat/test-decadal_monthly_2.R index 8fd3525d17f1997e760d95246564d54450a48c49..ac4f2fffd610799375a5787e4559f82c904b3feb 100644 --- a/tests/testthat/test-decadal_monthly_2.R +++ b/tests/testthat/test-decadal_monthly_2.R @@ -22,7 +22,7 @@ suppressWarnings({invisible(capture.output( ))}) # Compute skill metrics -suppressWarnings({invisible(capture.output( +suppressMessages({invisible(capture.output( skill_metrics <- compute_skill_metrics(calibrated_data$hcst, data$obs, recipe) ))}) suppressWarnings({invisible(capture.output( @@ -67,11 +67,11 @@ names(data$obs) ) expect_equal( dim(data$hcst$data), -c(dat = 1, var = 1, ensemble = 3, sday = 1, sweek = 1, syear = 3, time = 14, latitude = 8, longitude = 5) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 3, time = 14, latitude = 8, longitude = 5, ensemble = 3) ) expect_equal( dim(data$fcst$data), -c(dat = 1, var = 1, ensemble = 3, sday = 1, sweek = 1, syear = 2, time = 14, latitude = 8, longitude = 5) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 2, time = 14, latitude = 8, longitude = 5, ensemble = 3) ) expect_equal( dim(data$hcst$Dates$start), @@ -83,7 +83,7 @@ c(sday = 1, sweek = 1, syear = 3, time = 14) #) # hcst data expect_equal( -as.vector(drop(data$hcst$data)[1, , 1:2, 2, 2]), +as.vector(aperm(drop(data$hcst$data), c(5, 1:4))[1,, 1:2, 2, 2]), c(272.8613, 271.0689, 270.8007, 273.5594, 272.1561, 272.8729), tolerance = 0.0001 ) @@ -99,7 +99,7 @@ tolerance = 0.0001 ) # fcst data expect_equal( -as.vector(drop(data$fcst$data)[1, , 1:2, 2, 2]), +as.vector(aperm(drop(data$fcst$data), c(5, 1:4))[1, , 1:2, 2, 2]), c(271.7708, 271.8424, 272.4980, 273.5842), tolerance = 0.0001 ) diff --git a/tests/testthat/test-decadal_monthly_3.R b/tests/testthat/test-decadal_monthly_3.R index b6be7f2b815b20130f6a30eb289bf844e0ff7ad2..21665f6e8dba9b8fb2de86f49d359392e9351c2c 100644 --- a/tests/testthat/test-decadal_monthly_3.R +++ b/tests/testthat/test-decadal_monthly_3.R @@ -63,7 +63,7 @@ names(data$obs) ) expect_equal( dim(data$hcst$data), -c(dat = 1, var = 1, ensemble = 3, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 25, longitude = 16) +c(dat = 1, var = 1, sday = 1, sweek = 1, syear = 4, time = 3, latitude = 25, longitude = 16, ensemble = 3) ) expect_equal( dim(data$hcst$Dates$start), @@ -71,7 +71,7 @@ c(sday = 1, sweek = 1, syear = 4, time = 3) ) # hcst data expect_equal( -as.vector(drop(data$hcst$data)[3, , 2, 2, 2]), +as.vector(aperm(drop(data$hcst$data), c(5, 1:4))[3, , 2, 2, 2]), c(278.4305, 279.5065, 280.4110, 278.7608), tolerance = 0.0001 ) @@ -113,7 +113,7 @@ names(calibrated_data), c("hcst", "fcst") ) expect_equal( -as.vector(drop(calibrated_data$hcst$data)[3, , 2, 2, 2]), +as.vector(aperm(drop(calibrated_data$hcst$data), c(5, 1:4))[3, , 2, 2, 2]), c(279.0648, 281.0578, 282.6535, 280.3137), tolerance = 0.0001 ) diff --git a/tests/testthat/test-seasonal_monthly.R b/tests/testthat/test-seasonal_monthly.R index 0f9149b9cc1a8b54be6123d265827e0a6b199993..90938d6201c62a4b0db6efff287aed8719d8cf72 100644 --- a/tests/testthat/test-seasonal_monthly.R +++ b/tests/testthat/test-seasonal_monthly.R @@ -4,6 +4,7 @@ source("modules/Loading/Loading.R") source("modules/Calibration/Calibration.R") source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") +source("modules/Visualization/Visualization.R") recipe_file <- "tests/recipes/recipe-seasonal_monthly_1.yml" recipe <- read_yaml(recipe_file) @@ -33,6 +34,12 @@ save_data(recipe = recipe, data = data, calibrated_data = calibrated_data, skill_metrics = skill_metrics, probabilities = probs, archive = archive) ))}) +# Plotting +suppressWarnings({invisible(capture.output( +plot_data(recipe = recipe, archive = archive, data = data, + calibrated_data = calibrated_data, skill_metrics = skill_metrics, + probabilities = probs, significance = T) +))}) outdir <- get_dir(recipe) # ------- TESTS -------- @@ -208,7 +215,7 @@ test_that("4. Saving", { expect_equal( list.files(outdir), -c("tas_19931101.nc", "tas_19941101.nc", "tas_19951101.nc", "tas_19961101.nc", "tas_20201101.nc", +c("plots", "tas_19931101.nc", "tas_19941101.nc", "tas_19951101.nc", "tas_19961101.nc", "tas_20201101.nc", "tas-corr_month11.nc", "tas-obs_19931101.nc", "tas-obs_19941101.nc", "tas-obs_19951101.nc", "tas-obs_19961101.nc", "tas-percentiles_month11.nc", "tas-probs_19931101.nc", "tas-probs_19941101.nc", "tas-probs_19951101.nc", "tas-probs_19961101.nc", "tas-skill_month11.nc") @@ -216,5 +223,14 @@ c("tas_19931101.nc", "tas_19941101.nc", "tas_19951101.nc", "tas_19961101.nc", "t }) +test_that("5. Visualization", { +expect_equal( +list.files(paste0(outdir, "/plots/")), +c("crpss.png", "enscorr_specs.png", "enscorr.png", "forecast_ensemble_mean.png", + "forecast_most_likely_tercile.png", "rpss.png") +) + +}) + # Delete files -unlink(paste0(outdir, list.files(outdir))) +unlink(paste0(outdir, list.files(outdir, recursive = TRUE))) diff --git a/tools/libs.R b/tools/libs.R index 2b29835927f2443e7f5e8f67cacc345db0436633..a0767f76c79105d9e60cc88cc76bde875136abc4 100644 --- a/tools/libs.R +++ b/tools/libs.R @@ -12,6 +12,8 @@ library(easyNCDF) library(CSTools) library(lubridate) library(PCICt) +library(RColorBrewer) +library(grDevices) # # library(parallel) # library(pryr) # To check mem usage.