Commit 3c6eb551 authored by Javier Vegas-Regidor's avatar Javier Vegas-Regidor
Browse files

Richer metadata for days over and output now proportional

parent 3baa4b4c
......@@ -6,7 +6,7 @@ SCRATCH_DIR = /scratch/Earth/$USER
# Root path for the cmorized data to use
DATA_DIR = /esnas:/esarchive
# Specify if your data is from an experiment (exp), observation (obs) or reconstructions (recon)
DATA_TYPE = recon
DATA_TYPE = exp
# CMORization type to use. Important also for THREDDS as it affects variable name conventions.
# Options: SPECS (default), PRIMAVERA, CMIP6
DATA_CONVENTION = SPECS
......@@ -16,7 +16,7 @@ CON_FILES = /esnas/autosubmit/con_files/
# Diagnostics to run, space separated. You must provide for each one the name and the parameters (comma separated) or
# an alias defined in the ALIAS section (see more below). If you are using the diagnostics just to CMORize, leave it
# empty
DIAGS = climpercent,atmos,sfcWind,2000,2000,1-2 daysover,atmos,sfcWind,2000,2000,1-2
DIAGS = climpercent,atmos,sfcWind,2000,2000,1 daysover,atmos,sfcWind,2000,2000,1
# DIAGS = OHC
# Frequency of the data you want to use by default. Some diagnostics do not use this value: i.e. monmean always stores
# its results at monthly frequency (obvious) and has a parameter to specify input's frequency.
......@@ -70,7 +70,7 @@ SERVER_URL = https://earth.bsc.es/thredds
[EXPERIMENT]
# Experiments parameters as defined in CMOR standard
INSTITUTE = ecmwf
MODEL = erainterim
MODEL = system4_m1
# Model version: Available versions
MODEL_VERSION = Ec3.2_O1L75
# Atmospheric output timestep in hours
......
......@@ -31,6 +31,9 @@ class ObsReconManager(DataManager):
filepath = self.get_file_path(startdate, domain, variable, frequency, vartype)
return self._get_file_from_storage(filepath)
def create_link(self, domain, filepath, frequency, var, grid, move_old, vartype):
pass
# noinspection PyUnusedLocal
def file_exists(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None,
vartype=VariableType.MEAN):
......
......@@ -42,14 +42,15 @@ class DaysOverPercentile(Diagnostic):
self.end_year = end_year
self.year_to_compute = year_to_compute
self.forecast_month = forecast_month
self.startdate = '{0}{1:02}01'.format(self.start_year, self.forecast_month)
self.startdate = '{0}{1:02}01'.format(self.year_to_compute, self.forecast_month)
def __eq__(self, other):
return self.startdate == other.startdate and self.domain == other.domain and self.variable == other.variable
def __str__(self):
return 'Days over percentile Startdate: {0} ' \
'Variable: {1}:{2}'.format(self.startdate, self.domain, self.variable)
'Variable: {1}:{2} Climatology: {3}-{4}'.format(self.startdate, self.domain, self.variable,
self.start_year, self.end_year)
@classmethod
def generate_jobs(cls, diags, options):
......@@ -143,6 +144,13 @@ class DaysOverPercentile(Diagnostic):
results_over = {perc: iris.cube.CubeList() for perc in ClimatologicalPercentile.Percentiles}
results_below = {perc: iris.cube.CubeList() for perc in ClimatologicalPercentile.Percentiles}
var_daysover = 'daysover'
var_days_below = 'daysbelow'
long_name_days_over = 'Proportion of days over a given percentile for {0.start_year}-{0.end_year} ' \
'climatology'.format(self)
long_name_days_below = 'Proportion of days below a given percentile for {0.start_year}-{0.end_year} ' \
'climatology'.format(self)
for leadtime in leadtimes.keys():
leadtime_slice = var.extract(iris.Constraint(leadtime=leadtime))
if len(percentiles.coords('leadtime')) >0:
......@@ -153,41 +161,18 @@ class DaysOverPercentile(Diagnostic):
first_time = time_coord.points[0]
last_time = time_coord.points[-1]
timesteps = leadtime_slice.coord('time').shape[0]
days = time_coord.units.num2date(last_time) - time_coord.units.num2date(first_time)
if days.seconds > 0:
days = days.days + 1
else:
days = days.days
timesteps_per_day = timesteps / days
time_coord = time_coord.copy(first_time + (last_time - first_time) / 2, (first_time, last_time))
for percentile_slice in percentiles_leadtime.slices_over('percentile'):
percentile = percentile_slice.coord('percentile').points[0]
days_over = np.sum(leadtime_slice.data > percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_over.astype(np.float32), var_name='daysover', units=1.0)
if realization_coord is not None:
result.add_aux_coord(realization_coord, 0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
else:
result.add_dim_coord(lat_coord, 0)
result.add_dim_coord(lon_coord, 1)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(time_coord)
days_over = np.sum(leadtime_slice.data > percentile_slice.data, 0) / float(timesteps)
result = self.create_results_cube(days_over, lat_coord, lon_coord, percentile, realization_coord,
time_coord, var_daysover, long_name_days_over)
results_over[percentile].append(result)
days_below = np.sum(leadtime_slice.data < percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_below.astype(np.float32), var_name='daysbelow', units=1.0)
if realization_coord is not None:
result.add_aux_coord(realization_coord, 0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
else:
result.add_dim_coord(lat_coord, 0)
result.add_dim_coord(lon_coord, 1)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(time_coord)
days_below = np.sum(leadtime_slice.data < percentile_slice.data, 0) / float(timesteps)
result = self.create_results_cube(days_below, lat_coord, lon_coord, percentile, realization_coord,
time_coord, var_days_below, long_name_days_below)
results_below[percentile].append(result)
for perc in ClimatologicalPercentile.Percentiles:
......@@ -204,6 +189,20 @@ class DaysOverPercentile(Diagnostic):
must_exist=False, rename_dimension=True)
self.days_below_file[perc].set_local_file(temp, rename_var='daysbelow')
def create_results_cube(self, days_over, lat_coord, lon_coord, percentile, realization_coord, time_coord,
var_name, long_name):
result = iris.cube.Cube(days_over.astype(np.float32), var_name=var_name, long_name=long_name, units=1.0)
if realization_coord is not None:
result.add_aux_coord(realization_coord, 0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
else:
result.add_dim_coord(lat_coord, 0)
result.add_dim_coord(lon_coord, 1)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(time_coord)
return result
......@@ -2,13 +2,13 @@
#SBATCH -n 1
#SBATCH -w gustafson
#SBATCH --time 72:00:00
#SBATCH --time 2:00:00
#SBATCH --error=job.%J.err
#SBATCH --output=job.%J.out
PATH_TO_CONF_FILE=~jvegas/earthdiagnostics/diags.conf
PATH_TO_CONF_FILE=~vtorralb/diags-erai.conf
PATH_TO_DIAGNOSTICS=~jvegas/earthdiagnostics
PATH_TO_CONDAENV=~jvegas/anaconda/envs/earthdiags/bin
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment