Commits (2)
files/
plots/
import matplotlib.pyplot as plt
import os
import logging
import iris
import matplotlib.path as mpath
import cartopy.crs as ccrs
import matplotlib.colors as colors
import numpy as np
import iris.plot as iplt
from esmvaltool.diag_scripts.shared import run_diagnostic, names, io
from esmvalcore.preprocessor import (
area_statistics,
extract_region,
)
from matplotlib.gridspec import GridSpec
import json
import glob
import string
def compute():
path = "/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/files"
alphabet = np.array([f"{i})" for i in string.ascii_lowercase])[:10].reshape(5,2)
regions = ["WMed", "EMed"]
methods = ["CWIP", "OBS_NAtl", "OBS_Glob", "DP_NAtl", "DP_Glob"]
bold=[[False, False],[False, True],[False, True],[True, False],[True, True]]
signis=[[-0.04, -0.10],[-0.08, 0.08],[-0.04, 0.20],[0.15, 0.05],[0.26, 0.21]]
bold_corr=[[True, False],[False, True],[False, True],[False, False],[False, False]]
signis_corr=[[-0.60, 0.03],[0.25, 0.39],[0.34, 0.82],[0.12, -0.30],[0.28, -0.17]]
fig = plt.figure(figsize=(8, 13),constrained_layout=True, dpi=100)
plt.gcf().subplots_adjust()
top, bottom, left, right = 0.9, 0.01, 0.2, 0.99
wspace, hspace = 0.15, 0.25
gs = GridSpec(
5,
2,
figure=fig,
hspace=hspace,
wspace=wspace,
top=top,
bottom=bottom,
left=left,
right=right,
)
for j, region in enumerate(regions):
for i, method in enumerate(methods):
file = os.path.join(path, f"{method}_y1-20_{region}_CDF.json")
with open(file, "r") as f:
cdf_dict = json.load(f)
ax = fig.add_subplot(gs[i, j])
cstr = cdf_dict["CSTR"]
raw = cdf_dict["RAW"]
plt.plot(cstr[0], cstr[1], color="red", label="constr. CMIP6")
plt.plot(raw[0], raw[1], color="blue", label="raw CMIP6")
plt.ylim(0,100.5)
xmax = np.max([cstr[0][-1], raw[0][-1]])
xmin = np.min([cstr[0][0], raw[0][0]])
ax.hlines(y=100*0.1,xmin=xmin,xmax=xmax,color="grey", alpha=0.5, clip_on=False, zorder=100)
ax.hlines(y=100*0.25,xmin=xmin,xmax=xmax,color="grey", alpha=0.5, clip_on=False, zorder=100)
ax.hlines(y=100*0.5,xmin=xmin,xmax=xmax,color="grey", alpha=0.5, clip_on=False, zorder=100)
ax.hlines(y=100*0.75,xmin=xmin,xmax=xmax,color="grey", alpha=0.5, clip_on=False, zorder=100)
ax.hlines(y=100*0.9,xmin=xmin,xmax=xmax,color="grey", alpha=0.5, clip_on=False, zorder=100)
if i != 0:
ax.hlines(y=100*-0.14,xmin=cstr[0][0],xmax=cstr[0][-1],color="red", clip_on=False, zorder=100)
ax.hlines(y=100*-0.16,xmin=raw[0][0],xmax=raw[0][-1],color="blue", clip_on=False, zorder=100)
plt.xlim(xmin,xmax)
plt.yticks([0,10,25,50,75,90,100])
if i == 4:
plt.xlabel("Temperature anomaly (K)", labelpad=10)
if j == 0:
plt.ylabel("Cumulative probability (%)", labelpad=-1, fontsize="medium")
if j == 0:
m = f"{method.split('_')[-1]} {method.split('_')[0]}"
if "CWIP" in m:
m=m.split(" ")[0].replace("CWIP", "ClimWIP")
ax.text(-0.25, 0.5, m, transform=ax.transAxes, rotation="vertical", verticalalignment="center", horizontalalignment="center", fontsize="large")
if i == 0:
ax.text(0.5, 1.1, region, transform=ax.transAxes, verticalalignment="center", horizontalalignment="center", fontsize="large")
ax.text(0.03, 0.925, alphabet[i,j], transform=ax.transAxes)
if bold[i][j]:
ax.text(0.67, 0.53, r"RPSS: $\bf{%.2f}$" % signis[i][j], transform=ax.transAxes)
else:
ax.text(0.67, 0.53, 'RPSS: %.2f' % signis[i][j], transform=ax.transAxes)
if bold_corr[i][j]:
ax.text(0.625, 0.43, r"ResCor: $\bf{%.2f}$" % signis_corr[i][j], transform=ax.transAxes)
else:
ax.text(0.625, 0.43, 'ResCor: %.2f' % signis_corr[i][j], transform=ax.transAxes)
if i == 0 and j == 0:
plt.legend(fontsize="small", loc="lower right").set_zorder(101)
plt.savefig(os.path.join(path.replace("/files", "/plots"), "figure2.png"), bbox_inches='tight')
plt.savefig(os.path.join(path.replace("/files", "/plots"), "figure2.eps"), bbox_inches='tight')
# best_ind_50 = np.argmin(np.absolute(best_increments-50))
# best_ind_10 = np.argmin(np.absolute(best_increments-10))
# best_ind_90 = np.argmin(np.absolute(best_increments-90))
# tas_proj = np.array(best_tas)[[best_ind_10, best_ind_50, best_ind_90]]
# plt.title(f"{region} area averaged 2015-2036 warming distributions\n ref. 1981-2010")
# plt.savefig(os.path.join(self.cfg[names.PLOT_DIR], f"1-20_{region}_{sel_region}_CDF.png"), bbox_inches='tight')
# with open(os.path.join(self.cfg[names.PLOT_DIR], f"1-20_{region}_{sel_region}_CDF.txt"), "w") as f:
# np.savetxt(f, tas_proj)
def weighted_quantiles(
self, values, sample_weight, quantiles=[0.05, 0.25, 0.5, 0.75, 0.95]
):
""" Very close to numpy.percentile, but supports weights.
param values: numpy.array with data
param sample_weight: array-like of the same length as `array`
param quantiles: array-like with many quantiles needed
return: numpy.array with computed quantiles.
values_N > 2
"""
values = np.ma.array(values)
if len(values) != values.count():
# There are masked values, don't compute.
return np.ma.masked_array(values, mask=True)
quantiles = np.array(quantiles)
if sample_weight is None:
sample_weight = np.ones(len(values))
sample_weight = np.array(sample_weight)
assert np.all(quantiles >= 0) and np.all(
quantiles <= 1
), "quantiles should be in [0, 1]"
# make it sum one
sample_weight = np.array(sample_weight)
sample_weight /= np.sum(sample_weight)
sorter = np.argsort(values)
values = values[sorter]
sample_weight = sample_weight[sorter]
weighted_quantiles = np.ma.cumsum(sample_weight) - 0.5 * sample_weight
weighted_quantiles -= weighted_quantiles[0]
weighted_quantiles /= weighted_quantiles[-1]
return np.interp(quantiles, weighted_quantiles, values)
def main():
compute()
if __name__ == "__main__":
main()
\ No newline at end of file
import iris
import matplotlib.pyplot as plt
import os
import numpy as np
import cartopy.crs as ccrs
import matplotlib.colors as colors
import iris.plot as iplt
from matplotlib.gridspec import GridSpec
import matplotlib.path as mpath
from esmvalcore.preprocessor import extract_region
import xarray as xr
import yaml
import json
from pathlib import Path
import glob
import string
def build_plot():
path = "/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/files"
alphabet = np.array([f"{i})" for i in string.ascii_lowercase])[:20].reshape(4,5)
alphabet2 = np.array([f"{i})" for i in string.ascii_lowercase][:10]+[f"{i})" for i in string.ascii_lowercase][:10]).reshape(4,5)
# evals = ["1970-2000"] #, "1961-2000"]
evals = ["1970-2000", "1961-2000"]
metrics = ["CORR", "DIFFCORR", "RESCORR", "RPSS"]
label_format = ["ACC", r"$\Delta$ACC", "ResCor", "RPSS"]
labels = {k: v for k, v in zip(metrics, label_format)}
methods = ["CWIP", "OBS_NAtl", "OBS_Glob", "DP_NAtl", "DP_Glob"]
reg = "Med"
rd = {"Med": [-10,30,50,15], "WMed": [-10,30,25,15], "EMed": [15,30,25,15], "Glob": [-179,-89,358,178], "NAm": [-120,30,45,15]}
proj, projection, path_ext, plotextend = define_projection([rd[reg][0]-5, rd[reg][0]+rd[reg][2]+5, rd[reg][1]-5, rd[reg][1]+rd[reg][3]+5])
top, bottom, left, right = 0.95, 0.01, 0.02, 0.92
wspace, hspace = 0.01, 0.01
for eval in evals:
fig = plt.figure(figsize=(11, 4.5),constrained_layout=True, dpi=100)
plt.gcf().subplots_adjust()
gs = GridSpec(
4,
5,
figure=fig,
hspace=hspace,
wspace=wspace,
top=top,
bottom=bottom,
left=left,
right=right,
)
cb_heights = [6/8, 4/8, 2/8, 0/8]
for i, metric in enumerate(metrics):
for j, method in enumerate(methods):
file = os.path.join(path, f"{method}_y1-20_{metric}_jja_PC_eval-{eval}.nc")
file_signi = os.path.join(path, f"{method}_y1-20_{metric}_jja_PC_eval-{eval}_signi.nc")
ax = fig.add_subplot(gs[i, j], projection=proj)
ax.set_facecolor("silver")
ax.set_boundary(path_ext, transform=ccrs.PlateCarree())
ax.set_extent(plotextend, crs=ccrs.PlateCarree())
med_region_encloser(rd[reg])
ax.coastlines("50m", linewidth=0.8)
if eval == "1961-2000":
alphabet=alphabet2
ax.text(0.03, 0.8, alphabet[i,j], transform=ax.transAxes)
cube = iris.load_cube(file)
cube = regrid_longitude_coord(cube)
signi = iris.load_cube(file_signi)
signi = regrid_longitude_coord(signi)
if i == 1:
bounds = np.linspace(-0.1, 0.1, 21)
if np.max(extract_region(cube,-10,40,30,45).data)>0.5 or np.min(extract_region(cube,-10,40,30,45).data)<-0.5:
bounds = np.linspace(-1, 1, 21)
elif i == 3:
bounds = np.linspace(-0.45, 0.45, 13)
else:
bounds = np.linspace(-1, 1, 21)
cmap = plt.cm.RdYlBu_r
norm = colors.BoundaryNorm(boundaries=bounds, ncolors=256)
fill = iplt.pcolormesh(cube, norm=norm, coords=("longitude", "latitude"), cmap=cmap)
# pos, neg = save_amount_of_significant_points(cube, signi)
hatches = [4*"/",None]
if len(signi.shape) == 3:
signi=signi[-1]
iplt.contourf(signi, colors="none", levels=[0,0.5,1], hatches=hatches)
ys = ax.get_position().extents
if j == 4:
height=1.6/8
pos = ys[1]
colorbar_axes = plt.gcf().add_axes([0.925, pos+0.025, 0.02, height])
cb = plt.colorbar(fill, colorbar_axes, orientation="vertical")
if j == 0:
ax.text(-0.05, 0.525, labels[metric], transform=ax.transAxes, rotation="vertical", verticalalignment="center", horizontalalignment="center")
if i == 0:
m = f"{method.split('_')[-1]} {method.split('_')[0]}"
if "CWIP" in m:
m=m.split(" ")[0].replace("CWIP", "ClimWIP")
ax.text(0.5, 1, m, transform=ax.transAxes, verticalalignment="center", horizontalalignment="center")
ax = fig.add_subplot(gs[:, :])
ax.axis("OFF")
plt.savefig(os.path.join(path.replace("/files", "/plots"), f"figure1_{eval}.png"), bbox_inches='tight')
plt.savefig(os.path.join(path.replace("/files", "/plots"), f"figure1_{eval}.eps"), bbox_inches='tight')
def regrid_longitude_coord(cube):
"""Sorts the longitudes of the cubes from 0/360 degrees to -180/180"""
coord = cube.coord("longitude")
lon_extent = iris.coords.CoordExtent(coord, -180., 180., True, False)
cube = cube.intersection(lon_extent)
return cube
def save_amount_of_significant_points(cube, signi):
cube = extract_region(cube,-10,40,30,45)
signi = extract_region(signi,-10,40,30,45)
positives = len(np.ma.where((signi.data==1)& (cube.data>0))[0])
negatives = len(np.ma.where((signi.data==1)& (cube.data<0))[0])
cube_key = (f"{self.cfg['selection_criteria']}.{self.cfg['cstr_yavg']}."
f"{cube.attributes['variable']}.{cube.attributes['fcst_years']}y.{cube.attributes['region']}")
json_file = os.path.join(Path(self.cfg["work_dir"]).parent.absolute(), "significant_mediterranean_skill_points.json")
if not os.path.isfile(json_file):
signi_dict = {}
else:
with open(json_file, "r") as f:
signi_dict = json.load(f)
signi_dict[cube_key] = {"positives": positives, "negatives": negatives}
with open(json_file, "w") as f:
json.dump(signi_dict, f)
return positives, negatives
def region_to_square(region, dimension):
"""Definition of the region polygon"""
if dimension == "latitude":
return [
region["start_latitude"],
region["start_latitude"],
region["end_latitude"],
region["end_latitude"],
region["start_latitude"],
]
elif dimension == "longitude":
return [
region["start_longitude"],
region["end_longitude"],
region["end_longitude"],
region["start_longitude"],
region["start_longitude"],
]
else:
return "dimension unknown"
def define_projection(region):
"""
projection definition to get LambertConformal borders
"""
region = {
"start_longitude": region[0],
"end_longitude": region[1],
"start_latitude": region[2],
"end_latitude": region[3],
}
projection = "LambertConformal"
plotextend = [
region["start_longitude"],
region["end_longitude"],
region["start_latitude"],
region["end_latitude"],
]
if projection == "LambertConformal":
# plotextend has to be a little larger so everything is on there
plotextend = [
plotextend[0] - 1.0,
plotextend[1] + 1.0,
plotextend[2] - 1.0,
plotextend[3] + 1.0,
]
# path to cut out is exact though
lons = region_to_square(region, "longitude")
lats = region_to_square(region, "latitude")
path_ext = [[lon, lat] for lon, lat in zip(lons, lats)]
path_ext = mpath.Path(path_ext).interpolated(20)
# South Hemisfere
if region["start_latitude"] <= 0 and region["end_latitude"] <= 0:
proj = ccrs.LambertConformal(
central_longitude=np.sum(plotextend[:2]) / 2.0,
central_latitude=np.sum(plotextend[2:]) / 2.0,
cutoff=+30,
standard_parallels=(-33, -45),
)
# North Hemisphere
else:
proj = ccrs.LambertConformal(
central_longitude=np.sum(plotextend[:2]) / 2.0,
central_latitude=np.sum(plotextend[2:]) / 2.0,
)
return proj, projection, path_ext, plotextend
def med_region_encloser(region):
"""
function that draws a dashed box over the Med
region as defined in the setup.yml file
as "REGION".
"""
reg=[region[0], region[0]+region[2], region[1], region[1]+region[3]]
lon_steps = 11
lat_steps = 9
lon_pad = 0.5
lat_pad = 0
box_x_points = np.concatenate(
(
np.ones(lat_steps) * reg[0] + lon_pad,
np.linspace(reg[0], reg
[1], lon_steps)[1:-1],
np.ones(lat_steps) * reg[1] - lon_pad,
np.linspace(reg[1], reg
[0], lon_steps)[1:-1],
[reg[0] + lon_pad],
)
)
box_y_points = np.concatenate(
(
np.linspace(reg[2], reg[3], lat_steps),
np.ones(lat_steps) * reg[3] - lat_pad,
np.linspace(reg[3], reg[2], lat_steps),
np.ones(lat_steps) * reg[2] + lat_pad,
[reg[2] + lat_pad],
)
)
plt.plot(
box_x_points,
box_y_points,
linewidth=2.1,
color="black",
linestyle="dashed",
transform=ccrs.PlateCarree(),
zorder=5
)
def main():
build_plot()
if __name__ == "__main__":
main()
AER,"Research and Climate Group, Atmospheric and Environmental Research, 131 Hartwell Avenue, Lexington, MA 02421, USA"
AS-RCEC,"Research Center for Environmental Changes, Academia Sinica, Nankang, Taipei 11529, Taiwan"
AWI,"Alfred Wegener Institute, Helmholtz Centre for Polar and Marine Research, Am Handelshafen 12, 27570 Bremerhaven, Germany"
BCC,"Beijing Climate Center, Beijing 100081, China"
CAMS,"Chinese Academy of Meteorological Sciences, Beijing 100081, China"
CAS,"Chinese Academy of Sciences, Beijing 100029, China"
CCCma,"Canadian Centre for Climate Modelling and Analysis, Environment and Climate Change Canada, Victoria, BC V8P 5C2, Canada"
CCCR-IITM,"Centre for Climate Change Research, Indian Institute of Tropical Meteorology Pune, Maharashtra 411 008, India"
CMCC,"Fondazione Centro Euro-Mediterraneo sui Cambiamenti Climatici, Lecce 73100, Italy"
CNRM-CERFACS,"CNRM (Centre National de Recherches Meteorologiques, Toulouse 31057, France), CERFACS (Centre Europeen de Recherche et de Formation Avancee en Calcul Scientifique, Toulouse 31057, France)"
CSIRO,"Commonwealth Scientific and Industrial Research Organisation, Aspendale, Victoria 3195, Australia"
CSIRO-ARCCSS,"CSIRO (Commonwealth Scientific and Industrial Research Organisation, Aspendale, Victoria 3195, Australia), ARCCSS (Australian Research Council Centre of Excellence for Climate System Science). Mailing address: CSIRO, c/o Simon J. Marsland, 107-121 Station Street, Aspendale, Victoria 3195, Australia"
CSIRO-COSIMA,"CSIRO (Commonwealth Scientific and Industrial Research Organisation, Australia), COSIMA (Consortium for Ocean-Sea Ice Modelling in Australia). Mailing address: CSIRO, c/o Simon J. Marsland, 107-121 Station Street, Aspendale, Victoria 3195, Australia"
DKRZ,"Deutsches Klimarechenzentrum, Hamburg 20146, Germany"
DWD,"Deutscher Wetterdienst, Offenbach am Main 63067, Germany"
E3SM-Project,"LLNL (Lawrence Livermore National Laboratory, Livermore, CA 94550, USA); ANL (Argonne National Laboratory, Argonne, IL 60439, USA); BNL (Brookhaven National Laboratory, Upton, NY 11973, USA); LANL (Los Alamos National Laboratory, Los Alamos, NM 87545, USA); LBNL (Lawrence Berkeley National Laboratory, Berkeley, CA 94720, USA); ORNL (Oak Ridge National Laboratory, Oak Ridge, TN 37831, USA); PNNL (Pacific Northwest National Laboratory, Richland, WA 99352, USA); SNL (Sandia National Laboratories, Albuquerque, NM 87185, USA). Mailing address: LLNL Climate Program, c/o David C. Bader, Principal Investigator, L-103, 7000 East Avenue, Livermore, CA 94550, USA"
EC-Earth-Consortium,"AEMET, Spain; BSC, Spain; CNR-ISAC, Italy; DMI, Denmark; ENEA, Italy; FMI, Finland; Geomar, Germany; ICHEC, Ireland; ICTP, Italy; IDL, Portugal; IMAU, The Netherlands; IPMA, Portugal; KIT, Karlsruhe, Germany; KNMI, The Netherlands; Lund University, Sweden; Met Eireann, Ireland; NLeSC, The Netherlands; NTNU, Norway; Oxford University, UK; surfSARA, The Netherlands; SMHI, Sweden; Stockholm University, Sweden; Unite ASTR, Belgium; University College Dublin, Ireland; University of Bergen, Norway; University of Copenhagen, Denmark; University of Helsinki, Finland; University of Santiago de Compostela, Spain; Uppsala University, Sweden; Utrecht University, The Netherlands; Vrije Universiteit Amsterdam, the Netherlands; Wageningen University, The Netherlands. Mailing address: EC-Earth consortium, Rossby Center, Swedish Meteorological and Hydrological Institute/SMHI, SE-601 76 Norrkoping, Sweden"
ECMWF,"European Centre for Medium-Range Weather Forecasts, Reading RG2 9AX, UK"
FIO-QLNM,"FIO (First Institute of Oceanography, Ministry of Natural Resources, Qingdao 266061, China), QNLM (Qingdao National Laboratory for Marine Science and Technology, Qingdao 266237, China)"
HAMMOZ-Consortium,"ETH Zurich, Switzerland; Max Planck Institut fur Meteorologie, Germany; Forschungszentrum Julich, Germany; University of Oxford, UK; Finnish Meteorological Institute, Finland; Leibniz Institute for Tropospheric Research, Germany; Center for Climate Systems Modeling (C2SM) at ETH Zurich, Switzerland"
INM,"Institute for Numerical Mathematics, Russian Academy of Science, Moscow 119991, Russia"
IPSL,"Institut Pierre Simon Laplace, Paris 75252, France"
KIOST,"Korea Institute of Ocean Science and Technology, Busan 49111, Republic of Korea"
LLNL,"Lawrence Livermore National Laboratory, Livermore, CA 94550, USA. Mailing address: LLNL Climate Program, c/o Stephen A. Klein, Principal Investigator, L-103, 7000 East Avenue, Livermore, CA 94550, USA"
MESSy-Consortium,"The Modular Earth Submodel System (MESSy) Consortium, represented by the Institute for Physics of the Atmosphere, Deutsches Zentrum fur Luft- und Raumfahrt (DLR), Wessling, Bavaria 82234, Germany"
MIROC,"JAMSTEC (Japan Agency for Marine-Earth Science and Technology, Kanagawa 236-0001, Japan), AORI (Atmosphere and Ocean Research Institute, The University of Tokyo, Chiba 277-8564, Japan), NIES (National Institute for Environmental Studies, Ibaraki 305-8506, Japan), and R-CCS (RIKEN Center for Computational Science, Hyogo 650-0047, Japan)"
MOHC,"Met Office Hadley Centre, Fitzroy Road, Exeter, Devon, EX1 3PB, UK"
MPI-M,"Max Planck Institute for Meteorology, Hamburg 20146, Germany"
MRI,"Meteorological Research Institute, Tsukuba, Ibaraki 305-0052, Japan"
NASA-GISS,"Goddard Institute for Space Studies, New York, NY 10025, USA"
NASA-GSFC,"NASA Goddard Space Flight Center, Greenbelt, MD 20771, USA"
NCAR,"National Center for Atmospheric Research, Climate and Global Dynamics Laboratory, 1850 Table Mesa Drive, Boulder, CO 80305, USA"
NCC,"NorESM Climate modeling Consortium consisting of CICERO (Center for International Climate and Environmental Research, Oslo 0349), MET-Norway (Norwegian Meteorological Institute, Oslo 0313), NERSC (Nansen Environmental and Remote Sensing Center, Bergen 5006), NILU (Norwegian Institute for Air Research, Kjeller 2027), UiB (University of Bergen, Bergen 5007), UiO (University of Oslo, Oslo 0313) and UNI (Uni Research, Bergen 5008), Norway. Mailing address: NCC, c/o MET-Norway, Henrik Mohns plass 1, Oslo 0313, Norway"
NERC,"Natural Environment Research Council, STFC-RAL, Harwell, Oxford, OX11 0QX, UK"
NIMS-KMA,"National Institute of Meteorological Sciences/Korea Meteorological Administration, Climate Research Division, Seoho-bukro 33, Seogwipo-si, Jejudo 63568, Republic of Korea"
NIWA,"National Institute of Water and Atmospheric Research, Hataitai, Wellington 6021, New Zealand"
NOAA-GFDL,"National Oceanic and Atmospheric Administration, Geophysical Fluid Dynamics Laboratory, Princeton, NJ 08540, USA"
NTU,"National Taiwan University, Taipei 10650, Taiwan"
NUIST,"Nanjing University of Information Science and Technology, Nanjing, 210044, China"
PCMDI,"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA"
PNNL-WACCEM,"PNNL (Pacific Northwest National Laboratory), Richland, WA 99352, USA"
RTE-RRTMGP-Consortium,"AER (Atmospheric and Environmental Research, Lexington, MA 02421, USA); UColorado (University of Colorado, Boulder, CO 80309, USA). Mailing address: AER c/o Eli Mlawer, 131 Hartwell Avenue, Lexington, MA 02421, USA"
RUBISCO,"ORNL (Oak Ridge National Laboratory, Oak Ridge, TN 37831, USA); ANL (Argonne National Laboratory, Argonne, IL 60439, USA); BNL (Brookhaven National Laboratory, Upton, NY 11973, USA); LANL (Los Alamos National Laboratory, Los Alamos, NM 87545); LBNL (Lawrence Berkeley National Laboratory, Berkeley, CA 94720, USA); NAU (Northern Arizona University, Flagstaff, AZ 86011, USA); NCAR (National Center for Atmospheric Research, Boulder, CO 80305, USA); UCI (University of California Irvine, Irvine, CA 92697, USA); UM (University of Michigan, Ann Arbor, MI 48109, USA). Mailing address: ORNL Climate Change Science Institute, c/o Forrest M. Hoffman, Laboratory Research Manager, Building 4500N Room F106, 1 Bethel Valley Road, Oak Ridge, TN 37831-6301, USA"
SNU,"Seoul National University, Seoul 08826, Republic of Korea"
THU,"Department of Earth System Science, Tsinghua University, Beijing 100084, China"
UA,"Department of Geosciences, University of Arizona, Tucson, AZ 85721, USA"
UCI,"Department of Earth System Science, University of California Irvine, Irvine, CA 92697, USA"
UCSB,"Bren School of Environmental Science and Management, University of California, Santa Barbara. Mailing address: c/o Samantha Stevenson, 2400 Bren Hall, University of California Santa Barbara, Santa Barbara, CA 93106, USA"
UHH,"Universitat Hamburg, Hamburg 20148, Germany"
\ No newline at end of file
ACCESS-CM2,['CSIRO-ARCCSS'],"['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
ACCESS-ESM1-5,['CSIRO'],"['r10i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
BCC-CSM2-MR,['BCC'],['r1i1p1f1']
CAMS-CSM1-0,['CAMS'],"['r1i1p1f1', 'r2i1p1f1']"
CAS-ESM2-0,['CAS'],"['r1i1p1f1', 'r3i1p1f1']"
CESM2-WACCM,['NCAR'],"['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
CMCC-CM2-SR5,['CMCC'],['r1i1p1f1']
CMCC-ESM2,['CMCC'],['r1i1p1f1']
CNRM-CM6-1-HR,['CNRM-CERFACS'],['r1i1p1f2']
CNRM-CM6-1,['CNRM-CERFACS'],"['r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2']"
CNRM-ESM2-1,['CNRM-CERFACS'],"['r10i1p1f2', 'r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
CanESM5-CanOE,['CCCma'],"['r1i1p2f1', 'r2i1p2f1', 'r3i1p2f1']"
CanESM5,['CCCma'],"['r10i1p2f1', 'r11i1p2f1', 'r12i1p2f1', 'r13i1p2f1', 'r14i1p2f1', 'r15i1p2f1', 'r16i1p2f1', 'r17i1p2f1', 'r18i1p2f1', 'r19i1p2f1', 'r1i1p2f1', 'r20i1p2f1', 'r21i1p2f1', 'r22i1p2f1', 'r23i1p2f1', 'r24i1p2f1', 'r25i1p2f1', 'r2i1p2f1', 'r3i1p2f1', 'r4i1p2f1', 'r5i1p2f1', 'r6i1p2f1', 'r7i1p2f1', 'r8i1p2f1', 'r9i1p2f1']"
EC-Earth3,['EC-Earth-Consortium'],"['r10i1p1f1', 'r12i1p1f1', 'r14i1p1f1', 'r17i1p1f1', 'r19i1p1f1', 'r21i1p1f1', 'r22i1p1f1', 'r23i1p1f1', 'r24i1p1f1', 'r25i1p1f1', 'r2i1p1f1', 'r7i1p1f1']"
FGOALS-f3-L,['CAS'],['r1i1p1f1']
FGOALS-g3,['CAS'],"['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
FIO-ESM-2-0,['FIO-QLNM'],"['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
GISS-E2-1-G,['NASA-GISS'],"['r10i1p1f2', 'r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
HadGEM3-GC31-LL,"['MOHC', 'NERC']",['r1i1p1f3']
INM-CM5-0,['INM'],['r1i1p1f1']
IPSL-CM6A-LR,['IPSL'],"['r10i1p1f1', 'r11i1p1f1', 'r14i1p1f1', 'r1i1p1f1', 'r22i1p1f1', 'r25i1p1f1', 'r2i1p1f1', 'r3i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1']"
KIOST-ESM,['KIOST'],['r1i1p1f1']
MIROC-ES2L,['MIROC'],"['r10i1p1f2', 'r11i1p1f2', 'r12i1p1f2', 'r13i1p1f2', 'r14i1p1f2', 'r15i1p1f2', 'r16i1p1f2', 'r17i1p1f2', 'r18i1p1f2', 'r19i1p1f2', 'r1i1p1f2', 'r20i1p1f2', 'r21i1p1f2', 'r22i1p1f2', 'r23i1p1f2', 'r24i1p1f2', 'r25i1p1f2', 'r26i1p1f2', 'r27i1p1f2', 'r28i1p1f2', 'r29i1p1f2', 'r2i1p1f2', 'r30i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
MIROC6,['MIROC'],"['r10i1p1f1', 'r11i1p1f1', 'r12i1p1f1', 'r13i1p1f1', 'r14i1p1f1', 'r15i1p1f1', 'r16i1p1f1', 'r17i1p1f1', 'r18i1p1f1', 'r19i1p1f1', 'r1i1p1f1', 'r20i1p1f1', 'r21i1p1f1', 'r22i1p1f1', 'r23i1p1f1', 'r24i1p1f1', 'r25i1p1f1', 'r26i1p1f1', 'r27i1p1f1', 'r28i1p1f1', 'r29i1p1f1', 'r2i1p1f1', 'r30i1p1f1', 'r31i1p1f1', 'r32i1p1f1', 'r33i1p1f1', 'r34i1p1f1', 'r35i1p1f1', 'r36i1p1f1', 'r37i1p1f1', 'r38i1p1f1', 'r39i1p1f1', 'r3i1p1f1', 'r40i1p1f1', 'r41i1p1f1', 'r42i1p1f1', 'r43i1p1f1', 'r44i1p1f1', 'r45i1p1f1', 'r46i1p1f1', 'r47i1p1f1', 'r48i1p1f1', 'r49i1p1f1', 'r4i1p1f1', 'r50i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r7i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
MPI-ESM1-2-LR,"['MPI-M', 'AWI']","['r10i1p1f1', 'r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r7i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
MRI-ESM2-0,['MRI'],['r1i1p1f1']
NESM3,['NUIST'],"['r1i1p1f1', 'r2i1p1f1']"
NorESM2-LM,['NCC'],"['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
NorESM2-MM,['NCC'],['r1i1p1f1']
UKESM1-0-LL,"['MOHC', 'NERC', 'NIMS-KMA', 'NIWA']","['r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r8i1p1f2']"
"ACCESS-CM2, ['CSIRO-ARCCSS'], ['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
"ACCESS-ESM1-5, ['CSIRO'], ['r10i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
"BCC-CSM2-MR, ['BCC'], ['r1i1p1f1']"
"CAMS-CSM1-0, ['CAMS'], ['r1i1p1f1', 'r2i1p1f1']"
"CAS-ESM2-0, ['CAS'], ['r1i1p1f1', 'r3i1p1f1']"
"CESM2-WACCM, ['NCAR'], ['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
"CMCC-CM2-SR5, ['CMCC'], ['r1i1p1f1']"
"CMCC-ESM2, ['CMCC'], ['r1i1p1f1']"
"CNRM-CM6-1-HR, ['CNRM-CERFACS'], ['r1i1p1f2']"
"CNRM-CM6-1, ['CNRM-CERFACS'], ['r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2']"
"CNRM-ESM2-1, ['CNRM-CERFACS'], ['r10i1p1f2', 'r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
"CanESM5-CanOE, ['CCCma'], ['r1i1p2f1', 'r2i1p2f1', 'r3i1p2f1']"
"CanESM5, ['CCCma'], ['r10i1p2f1', 'r11i1p2f1', 'r12i1p2f1', 'r13i1p2f1', 'r14i1p2f1', 'r15i1p2f1', 'r16i1p2f1', 'r17i1p2f1', 'r18i1p2f1', 'r19i1p2f1', 'r1i1p2f1', 'r20i1p2f1', 'r21i1p2f1', 'r22i1p2f1', 'r23i1p2f1', 'r24i1p2f1', 'r25i1p2f1', 'r2i1p2f1', 'r3i1p2f1', 'r4i1p2f1', 'r5i1p2f1', 'r6i1p2f1', 'r7i1p2f1', 'r8i1p2f1', 'r9i1p2f1']"
"EC-Earth3, ['EC-Earth-Consortium'], ['r10i1p1f1', 'r12i1p1f1', 'r14i1p1f1', 'r17i1p1f1', 'r19i1p1f1', 'r21i1p1f1', 'r22i1p1f1', 'r23i1p1f1', 'r24i1p1f1', 'r25i1p1f1', 'r2i1p1f1', 'r7i1p1f1']"
"FGOALS-f3-L, ['CAS'], ['r1i1p1f1']"
"FGOALS-g3, ['CAS'], ['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
"FIO-ESM-2-0, ['FIO-QLNM'], ['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
"GISS-E2-1-G, ['NASA-GISS'], ['r10i1p1f2', 'r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
"HadGEM3-GC31-LL, ['MOHC', 'NERC'], ['r1i1p1f3']"
"INM-CM5-0, ['INM'], ['r1i1p1f1']"
"IPSL-CM6A-LR, ['IPSL'], ['r10i1p1f1', 'r11i1p1f1', 'r14i1p1f1', 'r1i1p1f1', 'r22i1p1f1', 'r25i1p1f1', 'r2i1p1f1', 'r3i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1']"
"KIOST-ESM, ['KIOST'], ['r1i1p1f1']"
"MIROC-ES2L, ['MIROC'], ['r10i1p1f2', 'r11i1p1f2', 'r12i1p1f2', 'r13i1p1f2', 'r14i1p1f2', 'r15i1p1f2', 'r16i1p1f2', 'r17i1p1f2', 'r18i1p1f2', 'r19i1p1f2', 'r1i1p1f2', 'r20i1p1f2', 'r21i1p1f2', 'r22i1p1f2', 'r23i1p1f2', 'r24i1p1f2', 'r25i1p1f2', 'r26i1p1f2', 'r27i1p1f2', 'r28i1p1f2', 'r29i1p1f2', 'r2i1p1f2', 'r30i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r5i1p1f2', 'r6i1p1f2', 'r7i1p1f2', 'r8i1p1f2', 'r9i1p1f2']"
"MIROC6, ['MIROC'], ['r10i1p1f1', 'r11i1p1f1', 'r12i1p1f1', 'r13i1p1f1', 'r14i1p1f1', 'r15i1p1f1', 'r16i1p1f1', 'r17i1p1f1', 'r18i1p1f1', 'r19i1p1f1', 'r1i1p1f1', 'r20i1p1f1', 'r21i1p1f1', 'r22i1p1f1', 'r23i1p1f1', 'r24i1p1f1', 'r25i1p1f1', 'r26i1p1f1', 'r27i1p1f1', 'r28i1p1f1', 'r29i1p1f1', 'r2i1p1f1', 'r30i1p1f1', 'r31i1p1f1', 'r32i1p1f1', 'r33i1p1f1', 'r34i1p1f1', 'r35i1p1f1', 'r36i1p1f1', 'r37i1p1f1', 'r38i1p1f1', 'r39i1p1f1', 'r3i1p1f1', 'r40i1p1f1', 'r41i1p1f1', 'r42i1p1f1', 'r43i1p1f1', 'r44i1p1f1', 'r45i1p1f1', 'r46i1p1f1', 'r47i1p1f1', 'r48i1p1f1', 'r49i1p1f1', 'r4i1p1f1', 'r50i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r7i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
"MPI-ESM1-2-LR, ['MPI-M', 'AWI'], ['r10i1p1f1', 'r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1', 'r4i1p1f1', 'r5i1p1f1', 'r6i1p1f1', 'r7i1p1f1', 'r8i1p1f1', 'r9i1p1f1']"
"MRI-ESM2-0, ['MRI'], ['r1i1p1f1']"
"NESM3, ['NUIST'], ['r1i1p1f1', 'r2i1p1f1']"
"NorESM2-LM, ['NCC'], ['r1i1p1f1', 'r2i1p1f1', 'r3i1p1f1']"
"NorESM2-MM, ['NCC'], ['r1i1p1f1']"
"UKESM1-0-LL, ['MOHC', 'NERC', 'NIMS-KMA', 'NIWA'], ['r1i1p1f2', 'r2i1p1f2', 'r3i1p1f2', 'r4i1p1f2', 'r8i1p1f2']"
import iris
"""
scripts:
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/scripts/plot_pattern_correlation_maps.py
- plot_2methods_regional_timeseries -> S6 & 7, remove RPSS boes and so on
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/shared_constraints_diagnostics/member_selection_heatmap.py
- heatmap S2 (inplace) add redbox, subtract first decade
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/scripts/plot_relres.py
- S4 & 5 (inplace?)
S3 (done)
S1 probably done in inkscape
"""
import matplotlib.pyplot as plt
import glob
import os
import warnings
import yaml
import json
import logging
import numpy as np
from dask import array as da
import esmvaltool.diag_scripts.shared as e
from esmvaltool.diag_scripts.shared import names, extract_variables, io
from datetime import datetime
import sys
import matplotlib.patches as patches
current = os.path.dirname(os.path.realpath(__file__))
parent_directory = os.path.dirname(current)
sys.path.append(parent_directory)
logger = logging.getLogger(__name__)
FORMAT = "%(asctime)-15s %(message)s"
logging.basicConfig(format=FORMAT)
startTime = datetime.now()
warnings.filterwarnings("ignore")
config_loops_file = "/esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/config_loops.yml"
with open(config_loops_file, "r") as f:
config_loops = yaml.full_load(f)
class IntermediaryPlots(object):
def __init__(self, config, crit, cstr_yavg, crit_collect, i, N):
self.cfg = config
self.cfg["earl_anomaly"] = False
self.cfg["selection_criteria"] = crit
self.cfg["collect_criteria"] = crit_collect
self.cfg["cstr_yavg"] = cstr_yavg
self.cfg["subsample_size"] = N
self.setup_dirs()
if i != 0:
keys = list(self.cfg["regions"].keys())
[self.cfg["regions"].pop(key, None) for key in keys if "corr" not in key]
def setup_dirs(self):
criteria_label = self.cfg["selection_criteria"]
year_averaging = self.cfg["cstr_yavg"]
self.cfg["out_plot_dir"] = os.path.join(self.cfg[names.PLOT_DIR],f"{criteria_label}", year_averaging)
if not os.path.isdir(self.cfg["out_plot_dir"]):
os.makedirs(self.cfg["out_plot_dir"])
self.cfg["out_work_dir"] = os.path.join(self.cfg[names.WORK_DIR],f"{criteria_label}", year_averaging)
if not os.path.isdir(self.cfg["out_work_dir"]):
os.makedirs(self.cfg["out_work_dir"])
if "_eval-" in self.cfg["selection_criteria"]:
if self.cfg["selection_criteria"].endswith("_X"):
attributes = self.cfg["selection_criteria"].split("_")
criteria_label = ""
for attr in attributes:
if "eval-" not in attr:
criteria_label += f"{attr}_"
criteria_label = criteria_label[:-1]
else:
criteria_label = self.cfg["selection_criteria"].split("_eval-")[0]
new_idata = []
for path in self.cfg["input_files"]:
new_path = path
if not path.endswith('metadata.yml'):
new_path = os.path.join(path, criteria_label, year_averaging)
new_idata.append(new_path)
self.cfg["input_files"] = new_idata
print(self.cfg["input_files"])
def main(self):
self.plot_heatmap()
# self.plot_correlation_timeseries()
def plot_heatmap(self, ):
reverse = True if "RMSE" not in self.cfg["selection_criteria"] else False
var = self.cfg['input_files'][0].split("/")[-3].split("_")[0]
recipe_name = self.cfg['input_files'][0].split("/")[-6]
if "observation" in self.cfg["recipe"]:
method = "OBS"
elif "decadal" in self.cfg["recipe"]:
method = "DP"
for region in self.cfg["regions"]:
# if region != "NAtl":
if region != "Glob":
continue
if "corr" in region:
region = self.cfg["regions"][region].replace("CSTR-YRS",f"{self.cfg['cstr_yavg']}").replace("CRIT-CLCT",f"{self.cfg['collect_criteria']}")
region = region.split("OBS_")[-1].split(".nc")[0]
m_files = io.get_all_ancestor_files(self.cfg, f"{region}_*.json")
m_files.sort()
if len(m_files) == 0:
continue
formatter = {"RMSE": "RMSE", "PC": "Uncentered pat. corr.", "PCiris": "Centered pat. corr."}
N=self.cfg["subsample_size"]
corrs_array = []
sds = []
for i,m_sd in enumerate(m_files):
logger.info(f"loading json correlation file: {m_sd}")
with open(m_sd, "r") as f:
corr = json.load(f)
corrs = list(corr.values())
corr = self.purge_dataset("CESM2", corr)
if i == 0:
selection = np.zeros((len(m_files),len(list(corr.keys()))))
members = list(corr.keys())
members = sorted([m for m in corr if not m.startswith("OBS")])
selected = [k for k, v in sorted(corr.items(), key=lambda item: item[1], reverse=reverse)][:N]
seldict = {m: 0 for m in members}
for d in seldict:
if d in selected:
seldict[d] = 1
selection[i] = list(seldict.values())
corrs.sort(reverse=reverse)
corrs_array.append(corrs)
sds.append(int(m_sd.split(".json")[0].split("_")[-1]))
canesm = [i for i, m in enumerate(members) if "CanESM5" in m]
members = [m.replace("_", " ") for m in members]
dates = np.arange(1961, 2001)
fig, ax = plt.subplots(figsize=(12,28/210*len(members)), dpi=150)
fill = plt.pcolormesh(dates[9:], members, selection.transpose()[:,9:], cmap=plt.cm.Greys, vmin=0, vmax=1, edgecolors="black")
ymin, ymax = plt.ylim()
# plt.vlines(1969.5, ymin, ymax, color = "red", linewidth=2)
# plt.colorbar(fill, location="right", orientation="vertical", shrink=0.3, cmap=plt.cm.Greys, label="Weight")
plt.title(f"Selection of the Best{self.cfg['subsample_size']} through\n"
f"{method}-sel. {self.cfg['cstr_yavg']} {formatter[self.cfg['selection_criteria']]}\n"
f"in region: {region.split('_signi')[0]}\n", fontsize="x-large", pad=-10)
plt.tight_layout()
minimal_region = region
if "corr" in minimal_region:
minimal_region=minimal_region.partition(".")[-1].rpartition("_signi_corr-mask")[0]
if "decadal" in self.cfg["out_plot_dir"]:
method = "DP"
rect = patches.Rectangle((1969.6, canesm[0]+ymin), 30.8, canesm[-1]-canesm[0]-ymin*2, linewidth=3, edgecolor='r', facecolor='none')
ax.add_patch(rect)
ax.text(-0.2, 1.02, "A)", transform=ax.transAxes, fontsize="xx-large")
else:
method = "OBS"
ax.text(-0.2, 1.02, "B)", transform=ax.transAxes, fontsize="xx-large")
plt.savefig(os.path.join("/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/plots", f"{method}_selection_heatmap_best{self.cfg['subsample_size']}_{minimal_region}.png"))
plt.savefig(os.path.join("/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/plots", f"{method}_selection_heatmap_best{self.cfg['subsample_size']}_{minimal_region}.pdf"), format="pdf", bbox_inches='tight')
def purge_dataset(self, datasets, dictionary):
"""
Purge a list of datasets from the dictionary
datasets: List of strings
dictionary: Keys are datasets or aliases
"""
if not isinstance(datasets, list):
datasets = [datasets]
keys_list = list(dictionary.keys())
purgeable_keys = []
for dataset in datasets:
full_name_dataset_keys=[k for k in keys_list if dataset == k]
part_name_dataset_keys=[k for k in keys_list if dataset in k.split("_")]
dataset_keys=full_name_dataset_keys+part_name_dataset_keys
purgeable_keys += dataset_keys
return {k: v for k, v in dictionary.items() if k not in purgeable_keys}
def main():
print("----------- MAIN ----------")
for crit in config_loops["criteria_cstr"]:
for cstr_yavg in config_loops["year_averages"]:
for N in config_loops["subsample_size"]:
for i, crit_collect in enumerate(config_loops["criteria_collect"]):
with e.run_diagnostic() as config:
IntermediaryPlots(config, crit, cstr_yavg, crit_collect, i, N).main()
if __name__ == "__main__":
main()
print(f" TIME SPENT: {datetime.now() - startTime}")
import iris
import matplotlib.pyplot as plt
import iris.plot as iplt
import json
import glob
import os
import numpy as np
import cartopy.crs as ccrs
from matplotlib.gridspec import GridSpec
import iris.plot as iplt
from PIL import Image
import dask.array as da
from iris.coords import DimCoord, AuxCoord
from iris.cube import Cube
import xarray as xr
from numpy import genfromtxt
from esmvalcore.preprocessor import area_statistics, extract_point, extract_region
"""
scripts:
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/scripts/plot_pattern_correlation_maps.py
- plot_2methods_regional_timeseries -> S6 & 7, remove RPSS boes and so on
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/shared_constraints_diagnostics/member_selection_heatmap.py
- heatmap S2 (inplace) add redbox, subtract first decade
- /esarchive/scratch/jcos/es-esmvaltool/common-tools/projections/diagnostics/scripts/plot_relres.py
- S4 & 5 (inplace?)
S3 (done)
S1 probably done in inkscape
"""
member_alias = iris.load_cube("/esarchive/scratch/jcos/output/recipe_decadal_constrains_20220718_053727/work/constr_s1968/s1968/PC/1-9y/members_field_Glob_1968.nc").coord("member_alias").points
canesm_ind = [i for i, alias in enumerate(member_alias) if "CanESM" in alias]
series = np.arange(1961,2001)
period = [1961,2000]
indices = [i for i in np.where(series>=period[0])[0] if i in np.where(series<=period[1])[0]]
rpss = {"Eastern":
{"1961-2000": {"OBS": {"Glob": [-0.08,0,-0.50,0.07], "NAtl": [-0.15,1,-0.46,0.06]},
"DP": {"Glob": [0.14,1,-0.59,0.06], "NAtl": [0.04,0,-0.49,0.05]},
"ERA5": [-0.56,0.05], "ALL": [-0.44,0.05]},
"1970-2000": {"OBS": {"Glob": [0.20,1,-0.3,0.22], "NAtl": [0.08,1,-0.3,0.22]},
"DP": {"Glob": [0.21,1,-0.36,0.21], "NAtl": [0.05,0,-0.32,0.20]},
"ERA5": [-0.36,0.21], "ALL": [-0.28,0.18]}},
"Western":
{"1961-2000": {"OBS": {"Glob": [0.04,1,-0.46,0.05], "NAtl": [-0.14,0,-0.41,0.06]},
"DP": {"Glob": [0.4,1,-0.59,0.04], "NAtl": [0.06,1,-0.49,0.04]},
"ERA5": [-0.73,0.09], "ALL": [-0.41,0.05]},
"1970-2000": {"OBS": {"Glob": [-0.04,0,-0.28,0.2], "NAtl": [-0.08,0,-0.27,0.2]},
"DP": {"Glob": [0.26,1,-0.34,0.17], "NAtl": [0.14,1,-0.30,0.17]},
"ERA5": [-0.34,0.19], "ALL": [-0.25,0.16]}}}
def plot_regional_timeseries(path, region, title, cwip=False):
"""compares the OBS, BEST, ALL forecasts for the 1-20 timeseries in an input area avgd region"""
"tas_fcst_cube_BEST_Glob_1-20.nc"
"tas_members_fcst_cube_BEST_Glob_1-20.nc"
selection_region = title.split(" ")[-2]
method = title.split(" ")[-3]
subregion = title.split(" ")[0]
fcst_files = glob.glob(os.path.join(path, f"tas_fcst_cube_*_{selection_region}_1-20.nc")) # fcst, members º1-20 BEST&ALL&OBS
members_files = glob.glob(os.path.join(path, f"tas_members_fcst_cube_*_{selection_region}_1-20.nc"))
obs_file = os.path.join(path, f"tas_obs_cube_BEST_{selection_region}_1-20.nc")
fcst_cubes = {f.split("_")[-3]: reg_postpro(f, region) for f in fcst_files}
# fcst_cubes.update(cwip_fcst_cubes)
members_cubes = {f.split("_")[-3]: reg_postpro(f, region) for f in members_files}
# members_cubes.update(cwip_members_cubes)
obs_cube = reg_postpro(obs_file, region)
colors={"BEST": "red", "ALL": "blue", "weighted": "yellow", "ERA5": "black", "OBS": "red", "DP": "red"}
fig, ax = plt.subplots(dpi=300)
plt.plot(series[indices],obs_cube.data, color="black",label="OBS",zorder=101)
min, max = [0, 0]
for key, fcst in fcst_cubes.items():
if key == "weighted" and not cwip:
continue
plt.plot(series[indices],fcst.data, color=colors[key], label=key, zorder=101)
for key, mem in members_cubes.items():
if key == "BEST":
plt.scatter(np.repeat(series[indices], len(mem.coord("member").points), axis=0),mem.data.flatten(),marker='.',s=4,alpha=0.5, zorder=100, color="red")
else:
for i in np.arange(mem.data.shape[1]):
plt.plot(series[indices],mem[:,i].data, color="cornflowerblue", alpha=0.1)
min=np.min(mem.data)
max=np.max(mem.data)
if rpss[subregion]["1961-2000"][method][selection_region][1] == 1:
weight = "bold"
else:
weight = "normal"
ax.text(1980, 0.85, f"1961-2000 RPSS: {rpss[subregion]['1961-2000'][method][selection_region][0]:.2f}", verticalalignment="center", horizontalalignment="center", weight=weight)
if rpss[subregion]["1970-2000"][method][selection_region][1] == 1:
weight = "bold"
else:
weight = "normal"
ax.text(1980, 1, f"1970-2000 RPSS: {rpss[subregion]['1970-2000'][method][selection_region][0]:.2f}", verticalalignment="center", horizontalalignment="center", weight=weight)
for ens in [method, "ALL", "ERA5"]:
for side, period in enumerate(["1961-2000", "1970-2000"]):
level = rpss[subregion][period][ens]
if isinstance(level, dict):
level = level[selection_region]
plt.hlines(level[-1],series[indices][side*-1]-1.5,series[indices][side*-1]+1.5, color=colors[ens], alpha=0.7)
plt.hlines(level[-2],series[indices][side*-1]-1.5,series[indices][side*-1]+1.5, color=colors[ens], alpha=0.7)
plt.hlines(0,series[indices][0],series[indices][-1], color="grey", alpha=0.4)
plt.fill_between([1961,1970],[min,min], [max,max], color="grey", alpha=0.2)
plt.ylim(min,max)
plt.xlim(series[indices][0],series[indices][-1])
plt.ylabel("(K)")
plt.legend()
if len(region)==4:
save = f"{region[0]}_{region[1]}_{region[2]}_{region[3]}"
else:
save = f"{region[0]}_{region[1]}"
plt.title(title)
plt.savefig(os.path.join("/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/plots",f"{method}_fcst_timeseries_{selection_region}_{save}.png"),bbox_inches='tight')
plt.savefig(os.path.join("/esarchive/scratch/jcos/constraints_paper_figures_data_scritps/plots",f"{method}_fcst_timeseries_{selection_region}_{save}.pdf"), format="pdf", bbox_inches='tight')
# def plot_2methods_regional_timeseries(path, region, title):
# """compares the OBS, BEST, ALL forecasts for the 1-20 timeseries in an input area avgd region"""
# "tas_fcst_cube_BEST_Glob_1-20.nc"
# "tas_members_fcst_cube_BEST_Glob_1-20.nc"
# selection_region = title.split(" ")[-2]
# method = title.split(" ")[-3]
# obs_file = os.path.join(path, f"tas_obs_cube_BEST_{selection_region}_1-20.nc")
# obs_cube = reg_postpro(obs_file, region)
# colors={"BEST": "red", "ALL": "blue", "weighted": "yellow"}
# fig, ax = plt.subplots()
# plt.plot(series[indices],obs_cube.data, color="black",label="OBS",zorder=101)
# min, max = [0, 0]
# for key, fcst in fcst_cubes.items():
# if key == "weighted":
# continue
# plt.plot(series[indices],fcst.data, color=colors[key], label=key, zorder=101)
# for path in paths:
# fcst_files = glob.glob(os.path.join(path, f"tas_fcst_cube_*_{selection_region}_1-20.nc")) # fcst, members º1-20 BEST&ALL&OBS
# members_files = glob.glob(os.path.join(path, f"tas_members_fcst_cube_*_{selection_region}_1-20.nc"))
# fcst_cubes = {f.split("_")[-3]: reg_postpro(f, region) for f in fcst_files}
# members_cubes = {f.split("_")[-3]: reg_postpro(f, region) for f in members_files}
# for key, mem in members_cubes.items():
# if key == "BEST":
# plt.scatter(np.repeat(series[indices], len(mem.coord("member").points), axis=0),mem.data.flatten(),marker='.',s=4,alpha=0.5, zorder=100, color="red")
# else:
# for i in np.arange(mem.data.shape[1]):
# plt.plot(series[indices],mem[:,i].data, color="purple", alpha=0.2)
# min=np.min(mem.data)
# max=np.max(mem.data)
# plt.fill_between([1961,1970],[min,min], [max,max], color="grey", alpha=0.2)
# plt.ylim(min,max)
# plt.xlim(series[indices][0],series[indices][-1])
# plt.legend()
# if len(region)==4:
# save = f"{region[0]}_{region[1]}_{region[2]}_{region[3]}"
# else:
# save = f"{region[0]}_{region[1]}"
# plt.title(title)
# plt.savefig(os.path.join(path.replace("/work/", "/plots/"),f"{method}_fcst_timeseries_{selection_region}_{save}.png"),bbox_inches='tight')
def reg_postpro(f, region):
if len(region) == 4:
if isinstance(f,str):
return area_statistics(extract_region(iris.load_cube(f), region[0], region[1], region[2], region[3]), "mean")
else:
return area_statistics(extract_region(f, region[0], region[1], region[2], region[3]), "mean")
# return area_statistics(extract_region(iris.load_cube(f)[9:], region[0], region[1], region[2], region[3]), "mean")
elif len(region) == 2:
if isinstance(f,str):
return extract_point(iris.load_cube(f), region[0], region[1], "nearest")
else:
return extract_point(f, region[0], region[1], "nearest")
# return extract_point(iris.load_cube(f)[9:], region[0], region[1], "nearest")
def main():
path="/esarchive/scratch/jcos/output/recipe_light_observational_constrains_20220621_095313/work/tas_skill/collect/jja_PC/1-9y"
plot_regional_timeseries(path, [15,40,30,45], "Eastern Mediterranean OBS Glob 1-20") # Med
plot_regional_timeseries(path, [-10,15,30,45], "Western Mediterranean OBS Glob 1-20") # Med
plot_regional_timeseries(path, [15,40,30,45], "Eastern Mediterranean OBS NAtl 1-20") # Med
plot_regional_timeseries(path, [-10,15,30,45], "Western Mediterranean OBS NAtl 1-20") # Med
path="/esarchive/scratch/jcos/output/recipe_decadal_constrains_20220718_053727/work/tas_skill/collect/jja_PC/1-9y/"
plot_regional_timeseries(path, [15,40,30,45], "Eastern Mediterranean DP Glob 1-20") # Med
plot_regional_timeseries(path, [-10,15,30,45], "Western Mediterranean DP Glob 1-20") # Med
plot_regional_timeseries(path, [15,40,30,45], "Eastern Mediterranean DP NAtl 1-20") # Med
plot_regional_timeseries(path, [-10,15,30,45], "Western Mediterranean DP NAtl 1-20") # Med
if __name__ == "__main__":
main()
\ No newline at end of file