Commit 39001ab9 authored by Javier Vegas-Regidor's avatar Javier Vegas-Regidor
Browse files

Merged

parent 198c0bca
......@@ -73,7 +73,7 @@ class CMORManager(DataManager):
"""
Copies a given file from the CMOR repository to the scratch folder and returns the path to the scratch's copy
:param **kwargs:
:param vartype:
:param domain: CMOR domain
:type domain: Domain
:param var: variable name
......
......@@ -25,7 +25,8 @@ class Config(object):
parser.read(path)
# Read diags config
self.data_adaptor = parser.get_choice_option('DIAGNOSTICS', 'DATA_ADAPTOR', ('CMOR', 'THREDDS', 'OBSRECON'), 'CMOR')
self.data_adaptor = parser.get_choice_option('DIAGNOSTICS', 'DATA_ADAPTOR', ('CMOR', 'THREDDS', 'OBSRECON'),
'CMOR')
"Scratch folder path"
self.scratch_dir = parser.get_path_option('DIAGNOSTICS', 'SCRATCH_DIR')
"Scratch folder path"
......
......@@ -2,7 +2,7 @@
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, \
DiagnosticVariableListOption, DiagnosticIntOption
from earthdiagnostics.modelingrealm import ModelingRealm
from earthdiagnostics.utils import Utils
from earthdiagnostics.utils import Utils, TempFile
from earthdiagnostics.box import Box
......@@ -48,7 +48,7 @@ class SelectLevels(Diagnostic):
return 'Select levels Startdate: {0} Member: {1} Chunk: {2} ' \
'Variable: {3}:{4} Levels: {6}-{7} ' \
'Grid: {5}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable,
self.grid, self.box.min_depth, self.box.max_depth)
self.grid, self.box.min_depth, self.box.max_depth)
def __eq__(self, other):
return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \
......
......@@ -5,8 +5,8 @@ import iris.exceptions
from earthdiagnostics.box import Box
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticFloatOption, DiagnosticDomainOption, \
DiagnosticVariableOption, DiagnosticChoiceOption
from earthdiagnostics.utils import Utils, TempFile
DiagnosticVariableOption
from earthdiagnostics.utils import TempFile
from earthdiagnostics.modelingrealm import ModelingRealms
......@@ -84,7 +84,7 @@ class VerticalMeanMetersIris(Diagnostic):
job_list = list()
for startdate, member, chunk in diags.config.experiment.get_chunk_list():
job_list.append(VerticalMeanMetersIris(diags.data_manager, startdate, member, chunk,
options['domain'], options['variable'], box, options['grid_point']))
options['domain'], options['variable'], box, options['grid_point']))
return job_list
def request_data(self):
......@@ -108,9 +108,9 @@ class VerticalMeanMetersIris(Diagnostic):
coord = None
for coord_name in lev_names:
try:
coord = var_cube.coord(coord_name)
coord = var_cube.coord(coord_name)
except iris.exceptions.CoordinateNotFoundError:
pass
pass
if self.box.min_depth is None:
lev_min = coord.points[0]
......@@ -121,8 +121,8 @@ class VerticalMeanMetersIris(Diagnostic):
lev_max = coord.points[-1]
else:
lev_max = self.box.max_depth
var_cube = var_cube.extract(iris.Constraint(coord_values={coord.var_name:
lambda cell: lev_min <= cell <= lev_max}))
var_cube = var_cube.extract(iris.Constraint(coord_values=
{coord.var_name: lambda cell: lev_min <= cell <= lev_max}))
var_cube = var_cube.collapsed(coord, iris.analysis.MEAN)
temp = TempFile.get()
iris.save(var_cube, temp, zlib=True)
......
# coding=utf-8
import os
from bscearth.utils.date import parse_date, add_months, chunk_start_date, chunk_end_date
from earthdiagnostics.datafile import NetCDFFile, StorageStatus, LocalStatus
from earthdiagnostics.datamanager import DataManager
from earthdiagnostics.utils import TempFile, Utils
from datetime import datetime
from earthdiagnostics.variable_type import VariableType
from frequency import Frequencies
class ObsReconManager(DataManager):
......@@ -33,11 +26,11 @@ class ObsReconManager(DataManager):
raise Exception('For obs and recon data chunk_size must be always 1')
# noinspection PyUnusedLocal
def request_leadtimes(self, domain, variable, startdate, member, leadtimes, frequency=None, vartype=VariableType.MEAN):
def request_leadtimes(self, domain, variable, startdate, member, leadtimes, frequency=None,
vartype=VariableType.MEAN):
filepath = self.get_file_path(startdate, domain, variable, frequency, vartype)
return self._get_file_from_storage(filepath)
# noinspection PyUnusedLocal
def file_exists(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None,
vartype=VariableType.MEAN):
......
......@@ -84,7 +84,7 @@ class Interpolate(Diagnostic):
:return:
"""
options_available = (DiagnosticOption('target_grid'),
DiagnosticVariableListOption(),
DiagnosticVariableListOption('variable'),
DiagnosticDomainOption(default_value=ModelingRealms.ocean),
DiagnosticBoolOption('invert_lat', False),
DiagnosticOption('original_grid', ''))
......
# coding=utf-8
import shutil
from bscearth.utils.log import Log
import os
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticVariableOption
from earthdiagnostics.utils import Utils, TempFile
......@@ -118,8 +117,6 @@ class Rotation(Diagnostic):
vfile_handler.close()
self.vrotated_file.set_local_file(urotated)
def _merge_levels(self, var, direction):
temp = TempFile.get()
if self.has_levels:
......
......@@ -12,7 +12,6 @@ import numpy as np
from earthdiagnostics.modelingrealm import ModelingRealms
from earthdiagnostics.constants import Basins
from earthdiagnostics.variable import VariableManager
class Siasiesiv(Diagnostic):
......
......@@ -13,7 +13,7 @@ class Publisher(object):
:param who: subscriber to add
:type who: object
:param callback: method to execute when publisher updates
:type callback: callable
:type callback: callable | NoneType
"""
if callback is None:
callback = getattr(who, 'update')
......
# coding=utf-8
import six
from bscearth.utils.date import parse_date, add_months
from bscearth.utils.log import Log
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticVariableOption, DiagnosticDomainOption, DiagnosticIntOption
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticVariableOption, DiagnosticDomainOption, \
DiagnosticIntOption
from earthdiagnostics.frequency import Frequencies
from earthdiagnostics.utils import Utils, TempFile
from earthdiagnostics.variable_type import VariableType
......@@ -137,20 +139,18 @@ class ClimatologicalPercentile(Diagnostic):
leadtimes = {1: PartialDateTime(lead_date.year, lead_date.month, lead_date.day)}
def assign_leadtime(coord, x):
try:
leadtime_month = 1
partial_date = leadtimes[leadtime_month]
while coord.units.num2date(x) >= partial_date:
leadtime_month += 1
try:
partial_date = leadtimes[leadtime_month]
except KeyError:
new_date = add_months(date, leadtime_month, self.data_manager.config.experiment.calendar)
partial_date = PartialDateTime(new_date.year, new_date.month, new_date.day)
leadtimes[leadtime_month] = partial_date
return leadtime_month
except Exception:
return -1
leadtime_month = 1
partial_date = leadtimes[leadtime_month]
while coord.units.num2date(x) >= partial_date:
leadtime_month += 1
try:
partial_date = leadtimes[leadtime_month]
except KeyError:
new_date = add_months(date, leadtime_month, self.data_manager.config.experiment.calendar)
partial_date = PartialDateTime(new_date.year, new_date.month, new_date.day)
leadtimes[leadtime_month] = partial_date
return leadtime_month
iris.coord_categorisation.add_categorised_coord(data_cube, 'leadtime', 'time', assign_leadtime)
return data_cube
......@@ -174,13 +174,14 @@ class ClimatologicalPercentile(Diagnostic):
def _calculate_percentiles(self, distribution):
Log.debug('Calculating percentiles')
percentiles = {}
def calculate(point_distribution):
cs = np.cumsum(point_distribution)
total = cs[-1]
percentile_values = self.percentiles * total
index = np.searchsorted(cs, percentile_values)
return [(self._bins[i + 1] + self._bins[i]) / 2 for i in index]
for leadtime, dist in distribution.iteritems():
for leadtime, dist in six.iteritems(distribution):
Log.debug('Calculating leadtime {0}', leadtime)
percentiles[leadtime] = np.apply_along_axis(calculate, 0, dist)
return percentiles
......
......@@ -121,7 +121,7 @@ class DaysOverPercentile(Diagnostic):
partial_date = PartialDateTime(new_date.year, new_date.month, new_date.day)
leadtimes[leadtime_month] = partial_date
return leadtime_month
except Exception as ex:
except Exception:
pass
iris.coord_categorisation.add_categorised_coord(var, 'leadtime', 'time', assign_leadtime)
iris.coord_categorisation.add_year(var, 'time')
......
......@@ -152,10 +152,10 @@ class WorkManager(object):
@staticmethod
def _run_job(job):
time = datetime.datetime.now()
try:
Log.info('Starting {0}', job)
job.status = DiagnosticStatus.RUNNING
time = datetime.datetime.now()
job.compute()
except Exception as ex:
job.consumed_time = datetime.datetime.now() - time
......
......@@ -26,7 +26,7 @@ setup(
keywords=['climate', 'weather', 'diagnostic'],
setup_requires=['pyproj'],
install_requires=['numpy', 'netCDF4', 'bscearth.utils', 'cdo', 'nco>=0.0.3', 'iris>=1.12.0', 'coverage',
'pygrib', 'openpyxl', 'mock', 'futures', 'cf_units'],
'pygrib', 'openpyxl', 'mock', 'futures', 'cf_units', 'cfunits'],
packages=find_packages(),
include_package_data=True,
scripts=['bin/earthdiags']
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment