Commit 360c8269 authored by Javier Vegas-Regidor's avatar Javier Vegas-Regidor
Browse files

Days over ready

parent 39001ab9
......@@ -16,7 +16,8 @@ CON_FILES = /esnas/autosubmit/con_files/
# Diagnostics to run, space separated. You must provide for each one the name and the parameters (comma separated) or
# an alias defined in the ALIAS section (see more below). If you are using the diagnostics just to CMORize, leave it
# empty
DIAGS = climpercent,atmos,sfcWind,2000,2000,1 daysover,atmos,sfcWind,2000,2000,1
# DIAGS = climpercent,atmos,sfcWind,2000,2000,1
DIAGS = daysover,atmos,sfcWind,2000,2000,1
# DIAGS = OHC
# Frequency of the data you want to use by default. Some diagnostics do not use this value: i.e. monmean always stores
# its results at monthly frequency (obvious) and has a parameter to specify input's frequency.
......
......@@ -31,6 +31,8 @@ class ClimatologicalPercentile(Diagnostic):
alias = 'climpercent'
"Diagnostic alias for the configuration file"
Percentiles = np.array([0.1, 0.25, 0.33, 0.5, 0.66, 0.75, 0.9])
def __init__(self, data_manager, domain, variable, num_bins, start_year, end_year, forecast_month,
experiment_config):
Diagnostic.__init__(self, data_manager)
......@@ -46,7 +48,6 @@ class ClimatologicalPercentile(Diagnostic):
self.start_year = start_year
self.end_year = end_year
self.forecast_month = forecast_month
self.percentiles = np.array([0.1, 0.25, 0.33, 0.5, 0.66, 0.75, 0.9])
self.cmor_var = data_manager.variable_list.get_variable(variable, silent=True)
if self.cmor_var and self.cmor_var.valid_max and self.cmor_var.valid_min:
self.max_value = float(self.cmor_var.valid_max)
......@@ -157,7 +158,7 @@ class ClimatologicalPercentile(Diagnostic):
def _save_results(self, percentile_values):
temp = TempFile.get()
percentile_coord = iris.coords.DimCoord(self.percentiles, long_name='percentile')
percentile_coord = iris.coords.DimCoord(ClimatologicalPercentile.Percentiles, long_name='percentile')
results = iris.cube.CubeList()
for leadtime in percentile_values.keys():
result = iris.cube.Cube(percentile_values[leadtime], var_name='percent', units=self.units)
......@@ -178,7 +179,7 @@ class ClimatologicalPercentile(Diagnostic):
def calculate(point_distribution):
cs = np.cumsum(point_distribution)
total = cs[-1]
percentile_values = self.percentiles * total
percentile_values = ClimatologicalPercentile.Percentiles * total
index = np.searchsorted(cs, percentile_values)
return [(self._bins[i + 1] + self._bins[i]) / 2 for i in index]
for leadtime, dist in six.iteritems(distribution):
......
# coding=utf-8
from bscearth.utils.date import parse_date, add_months
from earthdiagnostics.statistics.climatologicalpercentile import ClimatologicalPercentile
from earthdiagnostics.diagnostic import *
from earthdiagnostics.frequency import Frequencies
import iris
......@@ -84,13 +85,18 @@ class DaysOverPercentile(Diagnostic):
self.variable_file = self.request_chunk(self.domain, self.variable, self.startdate, self.member, self.chunk)
def declare_data_generated(self):
var_over = self.variable + '_daysover'
var_below = self.variable + '_daysbelow'
self.days_over_file = self.declare_chunk(self.domain, var_over, None, None, None,
frequency=Frequencies.monthly, vartype=VariableType.STATISTIC)
self.days_below_file = self.declare_chunk(self.domain, var_below, None, None, None,
frequency=Frequencies.monthly, vartype=VariableType.STATISTIC)
var_over = self.variable + '_daysover_q{0}'
var_below = self.variable + '_daysbelow_q{0}'
self.days_over_file = {}
self.days_below_file = {}
for perc in ClimatologicalPercentile.Percentiles:
self.days_over_file[perc] = self.declare_chunk(self.domain, var_over.format(int(perc * 100)), None, None,
None, frequency=Frequencies.monthly,
vartype=VariableType.STATISTIC)
self.days_below_file[perc] = self.declare_chunk(self.domain, var_below.format(int(perc * 100)), None, None,
None, frequency=Frequencies.monthly,
vartype=VariableType.STATISTIC)
def compute(self):
"""
......@@ -129,43 +135,56 @@ class DaysOverPercentile(Diagnostic):
realization_coord = var.coord('realization')
lat_coord = var.coord('latitude')
lon_coord = var.coord('longitude')
results_over = iris.cube.CubeList()
results_below = iris.cube.CubeList()
results_over = {perc: iris.cube.CubeList() for perc in ClimatologicalPercentile.Percentiles}
results_below = {perc: iris.cube.CubeList() for perc in ClimatologicalPercentile.Percentiles}
for leadtime in leadtimes.keys():
leadtime_slice = var.extract(iris.Constraint(leadtime=leadtime))
timesteps = leadtime_slice.coord('time').shape[0]
percentiles_leadtime = percentiles.extract(iris.Constraint(leadtime=leadtime))
time_coord = iris.coords.AuxCoord.from_coord(leadtime_slice.coord('time'))
first_time = time_coord.points[0]
last_time = time_coord.points[-1]
timesteps = leadtime_slice.coord('time').shape[0]
days = time_coord.units.num2date(last_time) - time_coord.units.num2date(first_time)
if days.seconds > 0:
days = days.days + 1
else:
days = days.days
timesteps_per_day = timesteps / days
time_coord = time_coord.copy(first_time + (last_time - first_time) / 2, (first_time, last_time))
for percentile_slice in percentiles_leadtime.slices_over('percentile'):
percentile = percentile_slice.coord('percentile').points[0]
days_over = np.sum(leadtime_slice.data > percentile_slice.data, 0) / float(timesteps)
days_over = np.sum(leadtime_slice.data > percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_over.astype(np.float32), var_name='daysover', units=1.0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
result.add_aux_coord(realization_coord, 0)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(iris.coords.AuxCoord(np.int8(leadtime), long_name='leadtime'))
results_over.append(result)
result.add_aux_coord(time_coord)
results_over[percentile].append(result)
days_below = np.sum(leadtime_slice.data < percentile_slice.data, 0) / float(timesteps)
days_below = np.sum(leadtime_slice.data < percentile_slice.data, 0) / float(timesteps_per_day)
result = iris.cube.Cube(days_below.astype(np.float32), var_name='daysbelow', units=1.0)
result.add_dim_coord(lat_coord, 1)
result.add_dim_coord(lon_coord, 2)
result.add_aux_coord(realization_coord, 0)
result.add_aux_coord(iris.coords.AuxCoord(percentile, long_name='percentile'))
result.add_aux_coord(iris.coords.AuxCoord(np.int8(leadtime), long_name='leadtime'))
results_below.append(result)
iris.FUTURE.netcdf_no_unlimited = True
temp = TempFile.get()
iris.save(results_over.merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
self.days_over_file.set_local_file(temp, rename_var='daysover')
temp = TempFile.get()
iris.save(results_below.merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
self.days_below_file.set_local_file(temp, rename_var='daysbelow')
result.add_aux_coord(time_coord)
results_below[percentile].append(result)
for perc in ClimatologicalPercentile.Percentiles:
iris.FUTURE.netcdf_no_unlimited = True
temp = TempFile.get()
iris.save(results_over[perc].merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
self.days_over_file[perc].set_local_file(temp, rename_var='daysover')
temp = TempFile.get()
iris.save(results_below[perc].merge_cube(), temp, zlib=True)
Utils.rename_variable(temp, 'dim2', 'ensemble', must_exist=False, rename_dimension=True)
self.days_below_file[perc].set_local_file(temp, rename_var='daysbelow')
......
......@@ -71,3 +71,11 @@ mun,mun,,
mud,mud,,
ppnewn,ppnewn,,
ppnewd,ppnewd,,
alb_ice,sialb,,
qsr3d,rsdo,,
hflx_rnf_cea,hfrunoffds2d,,
hflx_rain_cea,hfrainds,,
hflx_cal_cea,hfibthermds2d,,
rain,prra,,
calving,ficeberg2d,,
......@@ -298,12 +298,5 @@ qtr_ice,qtr,,
var78,tclw,,
var79,tciw,,
rho,rhopoto,,
alb_ice,sialb,,
qsr,rsntds,,
qsr3d,rsdo,,
hflx_rnf_cea,hfrunoffds2d,,
hflx_rain_cea,hfrainds,,
hflx_cal_cea,hfibthermds2d,,
rain,prra,,
runoffs,friver,,
calving,ficeberg2d,,
\ No newline at end of file
runoffs,friver,,
\ No newline at end of file
Aliases,Shortname,Basin,Grid
iiceconc:siconc:soicecov:ileadfra:sic,siconc,,
alb_ice,sialb,,
qsr3d,rsdo,,
hflx_rnf_cea,hfrunoffds2d,,
hflx_rain_cea,hfrainds,,
hflx_cal_cea,hfibthermds2d,,
rain,prra,,
calving,ficeberg2d,,
......@@ -6,18 +6,20 @@
#SBATCH --error=job.%J.err
#SBATCH --output=job.%J.out
set -xv
PATH_TO_CONF_FILE=~/earthdiagnostics/diags.conf
PATH_TO_DIAGNOSTICS=~/earthdiagnostics
PATH_TO_VIRTUALENV=~jvegas/virtualenvs/diags/bin
PATH_TO_CONF_FILE=~jvegas/earthdiagnostics/diags.conf
PATH_TO_DIAGNOSTICS=~jvegas/earthdiagnostics
PATH_TO_CONDAENV=~jvegas/anaconda/venvs/earthdiags/bin
module purge
module load NCO/4.5.4-foss-2015a
module load CDO/1.7.2-foss-2015a
module load CDFTOOLS/3.0a5-foss-2015a
module load CDFTOOLS/3.0a8-foss-2015a
set -xv
source ${PATH_TO_VIRTUALENV}/activate
source ${PATH_TO_CONDAENV}/activate earthdiags
export PYTHONPATH=${PATH_TO_DIAGNOSTICS}:${PYTHONPATH}
cd ${PATH_TO_DIAGNOSTICS}/earthdiagnostics/
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment