Commit 3baa4b4c authored by Javier Vegas-Regidor's avatar Javier Vegas-Regidor
Browse files

Days over for erainterim ready

parent 360c8269
......@@ -6,7 +6,7 @@ SCRATCH_DIR = /scratch/Earth/$USER
# Root path for the cmorized data to use
DATA_DIR = /esnas:/esarchive
# Specify if your data is from an experiment (exp), observation (obs) or reconstructions (recon)
DATA_TYPE = exp
DATA_TYPE = recon
# CMORization type to use. Important also for THREDDS as it affects variable name conventions.
# Options: SPECS (default), PRIMAVERA, CMIP6
DATA_CONVENTION = SPECS
......@@ -16,8 +16,7 @@ CON_FILES = /esnas/autosubmit/con_files/
# Diagnostics to run, space separated. You must provide for each one the name and the parameters (comma separated) or
# an alias defined in the ALIAS section (see more below). If you are using the diagnostics just to CMORize, leave it
# empty
# DIAGS = climpercent,atmos,sfcWind,2000,2000,1
DIAGS = daysover,atmos,sfcWind,2000,2000,1
DIAGS = climpercent,atmos,sfcWind,2000,2000,1-2 daysover,atmos,sfcWind,2000,2000,1-2
# DIAGS = OHC
# Frequency of the data you want to use by default. Some diagnostics do not use this value: i.e. monmean always stores
# its results at monthly frequency (obvious) and has a parameter to specify input's frequency.
......@@ -71,7 +70,7 @@ SERVER_URL = https://earth.bsc.es/thredds
[EXPERIMENT]
# Experiments parameters as defined in CMOR standard
INSTITUTE = ecmwf
MODEL = system4_m1
MODEL = erainterim
# Model version: Available versions
MODEL_VERSION = Ec3.2_O1L75
# Atmospheric output timestep in hours
......@@ -87,7 +86,7 @@ OCEAN_TIMESTEP = 6
# if 2, fc00
# CHUNK_SIZE is the size of each data file, given in months
# CHUNKS is the number of chunks. You can specify less chunks than present on the experiment
EXPID = testing_recon
EXPID = testing_erainterim
STARTDATES = 20000101
MEMBERS = 0
MEMBER_DIGITS = 1
......
......@@ -4,7 +4,7 @@ from bscearth.utils.date import parse_date, add_months
from bscearth.utils.log import Log
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticVariableOption, DiagnosticDomainOption, \
DiagnosticIntOption
DiagnosticIntOption, DiagnosticListIntOption
from earthdiagnostics.frequency import Frequencies
from earthdiagnostics.utils import Utils, TempFile
from earthdiagnostics.variable_type import VariableType
......@@ -80,16 +80,16 @@ class ClimatologicalPercentile(Diagnostic):
DiagnosticVariableOption(),
DiagnosticIntOption('start_year'),
DiagnosticIntOption('end_year'),
DiagnosticIntOption('forecast_month'),
DiagnosticListIntOption('forecast_month'),
DiagnosticIntOption('bins', 2000),
)
options = cls.process_options(options, options_available)
job_list = list()
job_list.append(ClimatologicalPercentile(diags.data_manager, options['domain'], options['variable'],
options['bins'], options['start_year'], options['end_year'],
options['forecast_month'],
diags.config.experiment))
for forecast_month in options['forecast_month']:
job_list.append(ClimatologicalPercentile(diags.data_manager, options['domain'], options['variable'],
options['bins'], options['start_year'], options['end_year'],
forecast_month, diags.config.experiment))
return job_list
def requested_startdates(self):
......@@ -152,7 +152,6 @@ class ClimatologicalPercentile(Diagnostic):
leadtimes[leadtime_month] = partial_date
return leadtime_month
iris.coord_categorisation.add_categorised_coord(data_cube, 'leadtime', 'time', assign_leadtime)
return data_cube
......@@ -197,12 +196,15 @@ class ClimatologicalPercentile(Diagnostic):
for leadtime in set(data_cube.coord('leadtime').points):
Log.debug('Discretizing leadtime {0}', leadtime)
leadtime_cube = data_cube.extract(iris.Constraint(leadtime=leadtime))
for realization in range(self.realizations):
Log.debug('Discretizing realization {0}', realization)
try:
realization_cube = leadtime_cube.extract(iris.Constraint(realization=realization+1))
except iris.exceptions.CoordinateNotFoundError:
realization_cube = data_cube
realization_cube = leadtime_cube
if realization_cube is None and realization == 0:
realization_cube = leadtime_cube
if leadtime not in distribution:
distribution[leadtime] = self._calculate_distribution(realization_cube)
else:
......
......@@ -5,6 +5,7 @@ from earthdiagnostics.statistics.climatologicalpercentile import ClimatologicalP
from earthdiagnostics.diagnostic import *
from earthdiagnostics.frequency import Frequencies
import iris
import iris.exceptions
import iris.coord_categorisation
from iris.time import PartialDateTime
import iris.analysis
......@@ -33,24 +34,22 @@ class DaysOverPercentile(Diagnostic):
alias = 'daysover'
"Diagnostic alias for the configuration file"
def __init__(self, data_manager, startdate, member, chunk, domain, variable, start_year, end_year, forecast_month):
def __init__(self, data_manager, domain, variable, start_year, end_year, year_to_compute, forecast_month):
Diagnostic.__init__(self, data_manager)
self.startdate = startdate
self.member = member
self.chunk = chunk
self.variable = variable
self.domain = domain
self.start_year = start_year
self.end_year = end_year
self.year_to_compute = year_to_compute
self.forecast_month = forecast_month
self.startdate = '{0}{1:02}01'.format(self.start_year, self.forecast_month)
def __eq__(self, other):
return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \
self.domain == other.domain and self.variable == other.variable
return self.startdate == other.startdate and self.domain == other.domain and self.variable == other.variable
def __str__(self):
return 'Days over percentile Startdate: {0} Member: {1} Chunk: {2} ' \
'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable)
return 'Days over percentile Startdate: {0} ' \
'Variable: {1}:{2}'.format(self.startdate, self.domain, self.variable)
@classmethod
def generate_jobs(cls, diags, options):
......@@ -67,14 +66,17 @@ class DaysOverPercentile(Diagnostic):
DiagnosticOption('variable'),
DiagnosticIntOption('start_year'),
DiagnosticIntOption('end_year'),
DiagnosticIntOption('forecast_month'),)
DiagnosticListIntOption('forecast_month'),)
options = cls.process_options(options, options_available)
job_list = list()
for startdate, member, chunk in diags.config.experiment.get_chunk_list():
job_list.append(DaysOverPercentile(diags.data_manager, startdate, member, chunk,
options['domain'], options['variable'],
options['start_year'], options['end_year'], options['forecast_month'],))
year = options['start_year']
while year <= options['end_year']:
for forecast_month in options['forecast_month']:
job_list.append(DaysOverPercentile(diags.data_manager, options['domain'], options['variable'],
options['start_year'], options['end_year'],
year, forecast_month))
year += 1
return job_list
def request_data(self):
......@@ -82,7 +84,7 @@ class DaysOverPercentile(Diagnostic):
self.percentiles_file = self.request_chunk(self.domain, var_name, None, None, None,
frequency=Frequencies.climatology)
self.variable_file = self.request_chunk(self.domain, self.variable, self.startdate, self.member, self.chunk)
self.variable_file = self.request_chunk(self.domain, self.variable, self.startdate, None, None)
def declare_data_generated(self):
var_over = self.variable + '_daysover_q{0}'
......@@ -90,11 +92,11 @@ class DaysOverPercentile(Diagnostic):
self.days_over_file = {}
self.days_below_file = {}
for perc in ClimatologicalPercentile.Percentiles:
self.days_over_file[perc] = self.declare_chunk(self.domain, var_over.format(int(perc * 100)), None, None,
self.days_over_file[perc] = self.declare_chunk(self.domain, var_over.format(int(perc * 100)), self.startdate, None,
None, frequency=Frequencies.monthly,
vartype=VariableType.STATISTIC)
self.days_below_file[perc] = self.declare_chunk(self.domain, var_below.format(int(perc * 100)), None, None,
self.days_below_file[perc] = self.declare_chunk(self.domain, var_below.format(int(perc * 100)), self.startdate, None,
None, frequency=Frequencies.monthly,
vartype=VariableType.STATISTIC)
......@@ -132,7 +134,10 @@ class DaysOverPercentile(Diagnostic):
iris.coord_categorisation.add_categorised_coord(var, 'leadtime', 'time', assign_leadtime)
iris.coord_categorisation.add_year(var, 'time')
iris.coord_categorisation.add_day_of_year(var, 'time')
realization_coord = var.coord('realization')
try:
realization_coord = var.coord('realization')
except iris.exceptions.CoordinateNotFoundError:
realization_coord = None
lat_coord = var.coord('latitude')
lon_coord = var.coord('longitude')
results_over = {perc: iris.cube.CubeList() for perc in ClimatologicalPercentile.Percentiles}
......@@ -140,8 +145,10 @@ class DaysOverPercentile(Diagnostic):
for leadtime in leadtimes.keys():
leadtime_slice = var.extract(iris.Constraint(leadtime=leadtime))
percentiles_leadtime = percentiles.extract(iris.Constraint(leadtime=leadtime))
if len(percentiles.coords('leadtime')) >0:
percentiles_leadtime = percentiles.extract(iris.Constraint(leadtime=leadtime))
else:
percentiles_leadtime = percentiles
time_coord = iris.coords.AuxCoord.from_coord(leadtime_slice.coord('time'))
first_time = time_coord.points[0]
last_time = time_coord.points[-1]
......@@ -158,18 +165,27 @@ class DaysOverPercentile(Diagnostic):
days_over = np.sum(leadtime_slice.data > percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_over.astype(np.float32), var_name='daysover', units=1.0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
result.add_aux_coord(realization_coord, 0)
if realization_coord is not None:
result.add_aux_coord(realization_coord, 0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
else:
result.add_dim_coord(lat_coord, 0)
result.add_dim_coord(lon_coord, 1)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(time_coord)
results_over[percentile].append(result)
days_below = np.sum(leadtime_slice.data < percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_below.astype(np.float32), var_name='daysbelow', units=1.0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
result.add_aux_coord(realization_coord, 0)
if realization_coord is not None:
result.add_aux_coord(realization_coord, 0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
else:
result.add_dim_coord(lat_coord, 0)
result.add_dim_coord(lon_coord, 1)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(time_coord)
results_below[percentile].append(result)
......@@ -178,12 +194,14 @@ class DaysOverPercentile(Diagnostic):
iris.FUTURE.netcdf_no_unlimited = True
temp = TempFile.get()
iris.save(results_over[perc].merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
Utils.rename_variables(temp, {'dim2': 'ensemble', 'dim1': 'ensemble'},
must_exist=False, rename_dimension=True)
self.days_over_file[perc].set_local_file(temp, rename_var='daysover')
temp = TempFile.get()
iris.save(results_below[perc].merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
Utils.rename_variables(temp, {'dim2': 'ensemble', 'dim1': 'ensemble'},
must_exist=False, rename_dimension=True)
self.days_below_file[perc].set_local_file(temp, rename_var='daysbelow')
......
......@@ -10,7 +10,7 @@
PATH_TO_CONF_FILE=~jvegas/earthdiagnostics/diags.conf
PATH_TO_DIAGNOSTICS=~jvegas/earthdiagnostics
PATH_TO_CONDAENV=~jvegas/anaconda/venvs/earthdiags/bin
PATH_TO_CONDAENV=~jvegas/anaconda/envs/earthdiags/bin
module purge
module load NCO/4.5.4-foss-2015a
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment