indices.py 6.21 KB
Newer Older
sloosvel's avatar
sloosvel committed
# coding=utf-8
"""Compute the indices for oceanic basins"""
from bscearth.utils.log import Log

import iris
import iris.analysis

from earthdiagnostics.diagnostic import Diagnostic
from earthdiagnostics.modelingrealm import ModelingRealms
from earthdiagnostics.utils import Utils, TempFile


class Indices(Diagnostic):
    """
    Compute the MOC for oceanic basins

    :created: March 2012
    :last modified: June 2016

    :param data_manager: data management object
    :type data_manager: DataManager
    :param startdate: startdate
    :type startdate: str
    :param member: member number
    :type member: int
    :param chunk: chunk's number
    :type chunk: int
    """

    alias = 'indices'
    "Diagnostic alias for the configuration file"

    def __init__(self, data_manager, startdate, member, chunk):
        Diagnostic.__init__(self, data_manager)
        self.startdate = startdate
        self.member = member
        self.chunk = chunk

        self.results = {}
        self.region_metadata = {}
        self.generated = {}

    def __str__(self):
        return 'Indices Startdate: {0.startdate} Member: {0.member} ' \
            'Chunk: {0.chunk}'.format(self)

    def __hash__(self):
        return hash(str(self))

    def __eq__(self, other):
        if self._different_type(other):
            return False
        return (
            self.startdate == other.startdate and
            self.member == other.member and self.chunk == other.chunk
        )

    @classmethod
    def generate_jobs(cls, diags, options):
        """
        Create a job for each chunk to compute the diagnostic

        :param diags: Diagnostics manager class
        :type diags: Diags
        :param options: None
        :type options: list[str]
        :return:
        """

        job_list = list()
        for sdate, mem, chunk in diags.config.experiment.get_chunk_list():
            job_list.append(
                Indices(diags.data_manager, sdate, mem, chunk))
        return job_list

    def request_data(self):
        """Request data required by the diagnostic"""
        self.variable_file = self.request_chunk(
            ModelingRealms.ocean, 'tosmean',
            self.startdate, self.member, self.chunk)

    def declare_data_generated(self):
        """Declare data to be generated by the diagnostic"""
        self._declare_var('amv')
        self._declare_var('ipo')
        self._declare_var('iod')

    def _declare_var(self, var_name):
        self.generated[var_name] = self.declare_chunk(
            ModelingRealms.ocean, var_name,
            self.startdate, self.member, self.chunk)

    def compute(self):
        """Run the diagnostic"""
        tosmean = iris.load_cube(self.variable_file.local_file)
        data_regions = tosmean.coord('region').points
        amv_regions = ['AMV_North_Atlantic', 'AMV_trend']
        ipo_regions = ['Pacific_TPI1', 'Pacific_TPI2', 'Pacific_TPI3']
        iod_regions = ['Indian_dipole_east', 'Indian_dipole_west']

        check_amv = set(amv_regions).issubset(set(data_regions))
        if check_amv:
            data = {}
            for region in amv_regions:
                data[region] = tosmean.extract(iris.Constraint(region=region))
            self.compute_amv(data)
        else:
            Log.info('Input data does not contain the basins required to '
                     'compute the  AMV index. Skipping  AMV computations.')

        check_ipo = set(ipo_regions).issubset(set(data_regions))
        if check_ipo:
            data = {}
            for region in ipo_regions:
                data[region] = tosmean.extract(iris.Constraint(region=region))
            self.compute_ipo(data)
        else:
            Log.info('Input data does not contain the basins required to '
                     'compute the  IPO index. Skipping IPO computations.')

        check_iod = set(iod_regions).issubset(set(data_regions))
        if check_iod:
            data = {}
            for region in iod_regions:
                data[region] = tosmean.extract(iris.Constraint(region=region))
            self.compute_iod(data)
        else:
            Log.info('Input data does not contain the basins required to '
                     'compute the  IOD index. Skipping IOD computations.')

        self.save()

    def compute_amv(self, data):
        self.results['amv'] = (data['AMV_North_Atlantic'].data -
                               data['AMV_trend'].data)
        self.region_metadata['amv'] = (
            'AMV_North_Atlantic Box (lat: [0, 60], lon:[-80, 0]),  '
            'AMV_trend Box (lat: [-60, 60], lon: [-180, 180])')

    def compute_ipo(self, data):
        self.results['ipo'] = data['Pacific_TPI2'].data - 0.5*(
            data['Pacific_TPI1'].data + data['Pacific_TPI3'].data
        )
        self.region_metadata['ipo'] = (
            'Pacific_TPI1 Box ( (lat: [25, 45], lon:[140, 180]), '
            '(lat: [25, 45], lon:[-180, -145]) ) '
            'Pacific_TPI2 Box ( (lat: [-10, 10], lon:[170, 180]), '
            '(lat: [-10, 10], lon:[-180, -90]) ) '
            'Pacific_TPI3 Box ( (lat: [-50, -15], lon:[150, 180]), '
            '(lat: [-50, -15], lon:[-180, -160]) )'
        )

    def compute_iod(self, data):
        self.results['iod'] = (data['Indian_dipole_west'].data -
                               data['Indian_dipole_east'].data)
        self.region_metadata['iod'] = (
            'Indian_dipole_west Box (lat: [-10, 10], lon:[50,70]) '
            'Indian_dipole_east Box (lat: [-10, 0], lon:[90, 110])'
        )

    def save(self):
        for var in self.results.keys():
            res = self.results[var]
            temp = TempFile.get()
            handler_source = Utils.open_cdf(self.variable_file.local_file)
            handler_temp = Utils.open_cdf(temp, 'w')

            Utils.copy_variable(
                handler_source, handler_temp, 'time', True, True)

            var_res = handler_temp.createVariable(
                '{0}'.format(var), float, ('time',))
            var_res[...] = res[...]
            var_res.units = 'degC'
            var_res.comment = '{var} index computed at {region}'.format(
                var=var,
                region=self.region_metadata[var]
            )
            handler_temp.close()
            self.generated[var].set_local_file(temp, diagnostic=self)