Newer
Older
from bscearth.utils.log import Log
from earthdiagnostics.constants import Basins
from earthdiagnostics.diagnostic import Diagnostic, DiagnosticBasinListOption
from earthdiagnostics.modelingrealm import ModelingRealms
from earthdiagnostics.utils import Utils, TempFile
import diagonals.moc as moc
from diagonals.mesh_helpers.nemo import Nemo
class Moc(Diagnostic):
"""
Compute the MOC for oceanic basins
:original author: Virginie Guemas <virginie.guemas@bsc.es>
:contributor: Javier Vegas-Regidor<javier.vegas@bsc.es>
:created: March 2012
:last modified: June 2016
:param data_manager: data management object
:type data_manager: DataManager
:param startdate: startdate
:type startdate: str
:param member: member number
:type member: int
:param chunk: chunk's number
:type chunk: int
"Diagnostic alias for the configuration file"
def __init__(self, data_manager, startdate, member, chunk, basins):
Diagnostic.__init__(self, data_manager)
self.data_convention = data_manager.convention
self.startdate = startdate
self.member = member
self.chunk = chunk
basins = []
return (
"MOC Startdate: {0.startdate} Member: {0.member} "
"Chunk: {0.chunk} Basins: {1}".format(self, basins)
)
def __hash__(self):
return hash(str(self))
if self._different_type(other):
return False
self.startdate == other.startdate
and self.member == other.member
and self.chunk == other.chunk
@classmethod
def generate_jobs(cls, diags, options):
Create a job for each chunk to compute the diagnostic
:param diags: Diagnostics manager class
:type diags: Diags
:param options: None
options_available = (DiagnosticBasinListOption("basins", "glob"),)
options = cls.process_options(options, options_available)
for (
startdate,
member,
chunk,
) in diags.config.experiment.get_chunk_list():
job_list.append(
Moc(diags.data_manager, startdate, member, chunk, basins)
)
self.variable_file = self.request_chunk(
ModelingRealms.ocean, "vo", self.startdate, self.member, self.chunk
)
def declare_data_generated(self):
self.results = self.declare_chunk(
ModelingRealms.ocean,
Moc.vsftmyz,
self.startdate,
self.member,
self.chunk,
)
vo_cube = iris.load_cube(self.variable_file.local_file)
vo = np.ma.filled(vo_cube.data, 0.0).astype(np.float32)
mesh = Nemo("mesh_hgr.nc", "mask_regions.nc")
e1v = mesh.get_i_length(cell_point="V")
e3v = mesh.get_k_length(cell_point="V")
global_mask = mesh.get_landsea_mask(cell_point="V")
global_mask[..., 0] = 0.0
global_mask[..., -1] = 0.0
masks[basin] = global_mask
else:
masks[basin] = Utils.get_mask(basin)
moc_results = moc.compute(masks, e1v, e3v, vo)
del vo, e1v, e3v
self._save_result(moc_results, mesh)
def _save_result(self, result, mesh):
handler_source = Utils.open_cdf(self.variable_file.local_file)
handler_temp = Utils.open_cdf(temp, "w")
gphiv = np.squeeze(mesh.get_grid_latitude(cell_point="V"))
max_gphiv = np.unravel_index(np.argmax(gphiv), gphiv.shape)[1]
Utils.copy_variable(handler_source, handler_temp, "time", True, True)
Utils.copy_variable(handler_source, handler_temp, "lev", True, True)
handler_temp.createDimension("i", 1)
handler_temp.createDimension("j", gphiv.shape[0])
handler_temp.createDimension("region", len(result))
handler_temp.createDimension("region_length", 50)
var_region = handler_temp.createVariable(
"region", "S1", ("region", "region_length")
)
lat = handler_temp.createVariable(
self.data_convention.lat_name, float, ("j", "i"))
lat.standard_name = "latitude"
lon = handler_temp.createVariable(
self.data_convention.lon_name, float, ("j", "i"))
lon.standard_name = "longitude"
var = handler_temp.createVariable(
"vsftmyz", float, ("time", "lev", "i", "j", "region")
)
var.units = "Sverdrup"
var.coordinates = "lev time latitude longitude"
var.long_name = "Ocean meridional overturning volume streamfunction"
var.missing_value = 1e20
var.fill_value = 1e20
for i, basin in enumerate(result):
var_region[i, ...] = netCDF4.stringtoarr(str(basin), 50)
var[..., i] = result[basin]
handler_temp.close()
self.results.set_local_file(temp, diagnostic=self)