Newer
Older
import six
import iris
import iris.analysis
import iris.coords
import iris.util
from bscearth.utils.log import Log
from earthdiagnostics.constants import Basins
from earthdiagnostics.diagnostic import (
Diagnostic,
DiagnosticBasinListOption,
)
from earthdiagnostics.modelingrealm import ModelingRealms
from earthdiagnostics.utils import Utils, TempFile
class Sivolume(Diagnostic):
"""
Compute the sea ice volume from sivol in both hemispheres or a region.
Parameters
----------
data_manager: DataManager
startdate: str
member: int
chunk: init
variable: str
basin: list of Basin
mask: numpy.array
"""
def __init__(
self,
data_manager,
startdate,
member,
chunk,
masks,
var_manager,
data_convention,
):
Diagnostic.__init__(self, data_manager)
self.startdate = startdate
self.member = member
self.chunk = chunk
self.masks = masks
self.generated = {}
self.var_manager = var_manager
self.data_convention = data_convention
self.sivol_varname = self.var_manager.get_variable("sivol").short_name
return (
"Sivolume Startdate: {0.startdate} Member: {0.member} "
"Chunk: {0.chunk} Basins: {1} ".format(
self, ",".join(str(basin) for basin in self.masks.keys())
)
)
@classmethod
def generate_jobs(cls, diags, options):
"""
Create a job for each chunk to compute the diagnostic
:param diags: Diagnostics manager class
:type diags: Diags
:param options: basin
:type options: list[str]
:return:
"""
DiagnosticBasinListOption("basins", Basins().Global.name),
options = cls.process_options(options, options_available)
e1t = iris.load_cube("mesh_hgr.nc", "e1t")
e2t = iris.load_cube("mesh_hgr.nc", "e2t")
area = e1t * e2t
for basin in basins:
masks[basin] = Utils.get_mask(basin) * area.data
job_list = list()
for (
startdate,
member,
chunk,
) in diags.config.experiment.get_chunk_list():
diags.data_manager,
startdate,
member,
chunk,
masks,
diags.config.var_manager,
diags.config.data_convention,
)
return job_list
def request_data(self):
"""Request data required by the diagnostic"""
self.sivol = self.request_chunk(
ModelingRealms.seaIce,
self.sivol_varname,
self.startdate,
self.member,
self.chunk,
)
def declare_data_generated(self):
"""Declare data to be generated by the diagnostic"""
self._declare_var("sivols")
self._declare_var("sivoln")
self.generated[var_name] = self.declare_chunk(
ModelingRealms.seaIce,
var_name,
self.startdate,
self.member,
self.chunk,
)
coordinates = " ".join(
(
"time",
"leadtime",
"time_centered",
self.data_convention.lon_name,
self.data_convention.lat_name,
)
)
handler = Utils.open_cdf(self.sivol.local_file)
handler.variables[self.sivol_varname].coordinates = coordinates
handler.close()
self.sivoln[basin] = self.sum(sivol, mask, north=True)
self.sivols[basin] = self.sum(sivol, mask, north=False)
del sivol
self.save("sivoln", self.sivoln)
self.save("sivols", self.sivols)
def sum(self, data, mask, north=True):
if north:
condition = data.coord("latitude").points < 0
weights = (
iris.util.broadcast_to_shape(
condition, data.shape, data.coord_dims("latitude")
)
* mask
)
return data.collapsed(
("latitude", "longitude"), iris.analysis.SUM, weights=weights
)
def save(self, var, results):
cubes = iris.cube.CubeList()
for basin, result in six.iteritems(results):
result.var_name = var
result.units = "m^3"
result.add_aux_coord(
iris.coords.AuxCoord(basin.name, var_name="region")
)
cubes.append(result)
self._save_file(cubes.merge_cube(), var)
def _save_file(self, data, var):
generated_file = self.generated[var]
temp = TempFile.get()
data.remove_coord('latitude')
data.remove_coord('longitude')
data.remove_coord(data.coord(var_name='i'))
data.remove_coord(data.coord(var_name='j'))
iris.save(data, temp, zlib=True)
generated_file.set_local_file(temp)