import shutil import threading import netCDF4 import os from autosubmit.config.log import Log from earthdiagnostics import cdo, cdftools from earthdiagnostics.basins import Basins from earthdiagnostics.diagnostic import Diagnostic from earthdiagnostics.utils import Utils, TempFile import numpy as np class Siasiesiv(Diagnostic): """ Compute the sea ice extent , area and volume in both hemispheres or a specified region. :original author: Virginie Guemas :contributor: Neven Fuckar :contributor: Ruben Cruz :contributor: Javier Vegas-Regidor :created: April 2012 :last modified: June 2016 """ def __init__(self, data_manager, basin, startdate, member, chunk, lock): Diagnostic.__init__(self, data_manager) self.basin = basin if basin == Basins.Global: self._region = None else: self._region = basin.shortname self.startdate = startdate self.member = member self.chunk = chunk self.required_vars = ['sit', 'sic'] self.generated_vars = ['siextents', 'sivols', 'siareas', 'siextentn', 'sivoln', 'siarean'] self.lock = lock def __str__(self): return 'Siasiesiv Startdate: {0} Member: {1} Chunk: {2} Basin: {3}'.format(self.startdate, self.member, self.chunk, self.basin.fullname) @classmethod def generate_jobs(cls, diags, options): """ Creates a job for each chunk to compute the diagnostic :param diags: Diagnostics manager class :type diags: Diags :param options: basin :type options: List[str] :return: """ if len(options) != 2: raise Exception('You must specify the basin for the siasiesiv diagnostic (and nothing else)') basin = Basins.parse(options[1]) lock = threading.Lock() job_list = list() for startdate in diags.startdates: for member in diags.members: for chunk in range(1, diags.chunks + 1): job_list.append(Siasiesiv(diags.data_manager, basin, startdate, member, chunk, lock)) return job_list def compute(self): nco = Utils.nco sit_file = self.data_manager.get_file('seaIce', 'sit', self.startdate, self.member, self.chunk) sic_file = self.data_manager.get_file('seaIce', 'sic', self.startdate, self.member, self.chunk) nco.ncks(input=sic_file, output=sit_file, options='-A -v sic') self.lock.acquire() temp = TempFile.get() if self.basin != Basins.Global: shutil.move('mask.nc', 'original_mask.nc') shutil.move('mask_regions.nc', 'mask.nc') Utils.rename_variable('mask.nc', self.basin.fullname, 'tmask') error = None try: cdftools.run('cdficediags', input=sit_file, output=temp) except Exception as ex: error = ex.message finally: if self.basin != Basins.Global: Utils.rename_variable('mask.nc', 'tmask', self.basin.fullname) shutil.move('mask.nc', 'mask_regions.nc') shutil.move('original_mask.nc', 'mask.nc') if error: raise Exception(error) self.lock.release() self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'SVolume', 'sivols', "10^3 km3"), 'seaIce', 'sivols', self.startdate, self.member, self.chunk, region=self.basin.fullname) self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'SArea', 'siareas', "10^6 km2"), 'seaIce', 'siareas', self.startdate, self.member, self.chunk, region=self.basin.fullname) self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'SExnsidc', 'siextents', "10^6 km2"), 'seaIce', 'siextents', self.startdate, self.member, self.chunk, region=self.basin.fullname) self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'NVolume', 'sivoln', "10^3 km3"), 'seaIce', 'sivoln', self.startdate, self.member, self.chunk, region=self.basin.fullname) self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'NArea', 'siarean', "10^6 km2"), 'seaIce', 'siarean', self.startdate, self.member, self.chunk, region=self.basin.fullname) self.data_manager.send_file(self._extract_variable_and_rename(temp, sit_file, 'NExnsidc', 'siextentn', "10^6 km2"), 'seaIce', 'siextentn', self.startdate, self.member, self.chunk, region=self.basin.fullname) os.remove(temp) def _extract_variable_and_rename(self, input_file, reference_file, variable, cmor_name, output_units): temp = TempFile.get() input_handler = Utils.openCdf(input_file) reference_handler = Utils.openCdf(reference_file) os.remove(temp) handler = netCDF4.Dataset(temp, 'w') # Create dimensions handler.createDimension('time') handler.createDimension('bnds', 2) # Copy time variable Utils.copy_variable(reference_handler, handler, 'time') Utils.copy_variable(reference_handler, handler, 'time_bnds') Utils.copy_variable(reference_handler, handler, 'leadtime') reference_handler.close() # time_var = input_handler.variables['time'] # new_time = handler.createVariable('time', time_var.datatype, time_var.dimensions) # new_time.setncatts({k: time_var.getncattr(k) for k in time_var.ncattrs()}) # new_time[:] = time_var[:] # # original_bnds = input_handler.variables['time_bnds'] # new_bnds = handler.createVariable('time_bnds', original_bnds.datatype, original_bnds.dimensions) # new_bnds.setncatts({k: original_bnds.getncattr(k) for k in original_bnds.ncattrs()}) # new_bnds[:] = original_bnds[:] original_variable = input_handler.variables[variable] values = original_variable[:, 0, 0] new_var = handler.createVariable(cmor_name, original_variable.datatype, 'time', fill_value=0.0) new_var.setncatts({k: original_variable.getncattr(k) for k in original_variable.ncattrs()}) factor = self._get_conversion_factor(original_variable.units, output_units) values *= factor new_var[:] = values new_var.units = output_units new_var.short_name = cmor_name new_var.valid_min = 0.0 new_var.valid_max = np.max(values) handler.close() return temp def _get_conversion_factor(self, input_units, output_units): units = input_units.split() if len(units) == 1: scale_unit = 1 unit = units[0] else: if '^' in units[0]: values = units[0].split('^') scale_unit = pow(int(values[0]), int(values[1])) else: scale_unit = float(units[0]) unit = units[1] units = output_units.split() if len(units) == 1: scale_new_unit = 1 new_unit = units[0] else: if '^' in units[0]: values = units[0].split('^') scale_new_unit = pow(int(values[0]), int(values[1])) else: scale_new_unit = float(units[0]) new_unit = units[1] factor = self._get_factor(new_unit, unit) invert = False if factor is None: factor = self._get_factor(unit, new_unit) invert = True if factor is None: raise Exception("Conversion from {0} to {1} not supported".format(units, output_units)) if invert: factor = scale_unit / float(scale_new_unit * factor) else: factor = (factor * scale_unit) / float(scale_new_unit) return factor @staticmethod def _get_factor(new_unit, unit): # Add only the conversions with a factor greater than 1 if unit == new_unit: return 1 if unit == 'km3': if new_unit == 'm3': return pow(1000, 3) elif unit == 'km2': if new_unit == 'm2': return pow(1000, 2) return None