diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..9f5872299c573dba2a058e6dfb0de52c7c41f5b7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Earth Sciences + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/VERSION b/VERSION index 15a279981720791464e46ab21ae96b3c1c65c3b6..18091983f59ddde8105e566545a0d9e4a12a4f1c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.3.0 +3.4.0 diff --git a/doc/source/conf.py b/doc/source/conf.py index a735658641cfc6095f5966e08d16289d14e7f916..1501231f416a1f239ec0751614c114c9442fb4bc 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -56,16 +56,16 @@ master_doc = "index" # General information about the project. project = u"Earth Diagnostics" -copyright = u"2019, BSC-CNS Earth Sciences Department" +copyright = u"2020, BSC-CNS Earth Sciences Department" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents.source ~/vi # # The short X.Y version. -version = "3.2" +version = "3.4" # The full version, including alpha/beta/rc tags. -release = "3.2.0" +release = "3.4.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -195,11 +195,11 @@ htmlhelp_basename = "EarthDiagnosticsd" latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', + # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', + # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - #'preamble': '', + # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples diff --git a/doc/source/diagnostic_list.rst b/doc/source/diagnostic_list.rst index 1679fef9c01b338b5a4ce8604edded373418524f..de450a4f55f7b23e2695e6e1b91409d2452ade91 100644 --- a/doc/source/diagnostic_list.rst +++ b/doc/source/diagnostic_list.rst @@ -55,12 +55,12 @@ Calculates the daily mean for a given variable. See :class:`~earthdiagnostics.ge Options: ******** -1. Variable: - Variable name - -2. Domain: +1. Domain: Variable domain +2. Variable: + Variable name + 3. Original frequency: Original frequency to use @@ -102,58 +102,19 @@ Calculates the monthly mean for a given variable. See :class:`~earthdiagnostics. Options: ******** -1. Variable: - Variable name - -2. Domain: +1. Domain: Variable domain -3. Original frequency = daily: - Original frequency to use - -4. Grid = '': - Variable grid. Only required in case that you want to use - interpolated data. - -relink -~~~~~~ - -Regenerates the links created in the monthly_mean, daily_mean, etc folders -for a given varible. - -See :class:`~earthdiagnostics.general.relink.Relink` - -Options: -******** - -1. Variable: +2. Variable: Variable name -2. Domain: - Variable domain - -3. Move old = - True: If True, any data founded in the target directory will be moved - to another folder (called FOLDER_NAME_old) instead of deleted. +3. Original frequency = daily: + Original frequency to use 4. Grid = '': Variable grid. Only required in case that you want to use interpolated data. - -relinkall -~~~~~~~~~ - -Regenerates the links created in the monthly_mean, daily_mean, etc folders -for all variables - -See :class:`~earthdiagnostics.general.relinkall.RelinkAll` - -Options: -******** - -This diagnostic has no options - rewrite: ~~~~~~~~ @@ -229,8 +190,8 @@ Options: 1. Domain: Variable domain -2. Variable: - Variable name +2. Variables: + List of variables to simplify, separated by ':' 5. Grid = '': Variable grid. Only required in case that you want to use interpolated data. @@ -247,12 +208,12 @@ Calculates the daily mean for a given variable. See :class:`~earthdiagnostics.ge Options: ******** -1. Variable: - Variable name - -2. Domain: +1. Domain: Variable domain +2. Variable: + Variable name + 3. Original frequency: Original frequency to use diff --git a/earthdiagnostics/cmorizer.py b/earthdiagnostics/cmorizer.py index cc254ead6a10a592d691db88ec1210d9d509f281..6cb10a3dd1d8c9aa50a7c14938a51a0354fb2be0 100644 --- a/earthdiagnostics/cmorizer.py +++ b/earthdiagnostics/cmorizer.py @@ -318,7 +318,25 @@ class Cmorizer(object): options="-O", input=filename, output=temp ) shutil.move(temp, filename) - Utils.cdo().mergetime(input=files, output=merged) + cmorize_vars = set() + var_manager = self.config.var_manager + for filename in files: + handler = Utils.open_cdf(filename) + for variable in handler.variables.keys(): + + _, var_cmor = var_manager.get_variable_and_alias( + variable, silent=True, + ) + if self.cmor.cmorize(var_cmor): + cmorize_vars.add(variable) + handler.close() + if not cmorize_vars: + continue + var_str = ",".join([str(var) for var in cmorize_vars]) + Utils.cdo().mergetime( + input=[f"-selvar,{var_str} {filepath}" for filepath in files], + output=merged + ) for filename in files: self._remove(filename) tar_startdate = ( diff --git a/earthdiagnostics/config.py b/earthdiagnostics/config.py index d6480355d84e43a7e06b5aac479391f2026bdd69..7340daddb28c84f093cf149a883f914f746117df 100644 --- a/earthdiagnostics/config.py +++ b/earthdiagnostics/config.py @@ -604,7 +604,6 @@ class ExperimentConfig(object): def _parse_startdates(self, parser): startdates = parser.get_list_option("EXPERIMENT", "STARTDATES") - import exrex self.startdates = [] for startdate_pattern in startdates: @@ -614,9 +613,7 @@ class ExperimentConfig(object): if startdate_pattern[0] == "{" and startdate_pattern[-1] == "}": self._read_startdates(startdate_pattern[1:-1]) else: - for startdate in exrex.generate(startdate_pattern): - startdate = startdate.strip() - self.startdates.append(startdate) + self.startdates.append(startdate_pattern) def _parse_members(self): members = [] diff --git a/earthdiagnostics/data_convention.py b/earthdiagnostics/data_convention.py index fc1f76bfca3cf4e6f0f95a80b04b09653cd79a67..0ac704f0afaca48b479290c1707628e2f4090751 100644 --- a/earthdiagnostics/data_convention.py +++ b/earthdiagnostics/data_convention.py @@ -193,6 +193,7 @@ class DataConvention(object): return os.path.join( self.config.data_dir, self.config.experiment.expid, + "original_files", "cmorfiles", self.config.cmor.activity, self.config.experiment.institute, @@ -421,10 +422,11 @@ class DataConvention(object): var_path = self.get_file_path( startdate, member, domain, var, cmor_var, chunk, frequency=freq ) - if os.path.isfile(var_path): - current_count += 1 - if current_count >= self.config.cmor.min_cmorized_vars: - break + for path in (var_path, var_path.replace('/original_files/', '/')): + if os.path.isfile(path): + current_count += 1 + if current_count >= self.config.cmor.min_cmorized_vars: + break return current_count def is_cmorized(self, startdate, member, chunk, domain): @@ -643,6 +645,7 @@ class SPECSConvention(Cmor2Convention): return os.path.join( self.config.data_dir, self.config.experiment.expid, + "original_files", "cmorfiles", self.config.experiment.institute, self.config.experiment.model, @@ -682,6 +685,7 @@ class PrefaceConvention(Cmor2Convention): return os.path.join( self.config.data_dir, self.config.experiment.expid, + "original_files", "cmorfiles", self.config.experiment.institute, self.experiment_name(startdate), @@ -849,6 +853,7 @@ class Cmor3Convention(DataConvention): def get_cmor_folder_path( self, startdate, member, domain, var, frequency, grid, cmor_var ): + if not self.config.cmor.version: raise ValueError( "CMOR version is mandatory for PRIMAVERA and CMIP6" @@ -878,6 +883,12 @@ class Cmor3Convention(DataConvention): grid, self.config.cmor.version, ) + if self.config.cmor.version == "latest": + versions = os.listdir(os.path.dirname(folder_path)) + versions.sort(reverse=True) + self.config.cmor.version = versions[0] + fodler_path = folder_path.replace('/latest/', f'/{versions[0]}/') + return folder_path def _link_startdate(self, path, member_str): @@ -982,7 +993,9 @@ class Cmor3Convention(DataConvention): """ startdate_path = self.get_startdate_path(startdate) if not os.path.isdir(startdate_path): - return False + startdate_path = startdate_path.replace('/original_files/', '/') + if not os.path.isdir(startdate_path): + return False count = 0 member_path = os.path.join(startdate_path, self.get_member_str(member)) if not os.path.isdir(member_path): diff --git a/earthdiagnostics/datafile.py b/earthdiagnostics/datafile.py index f43b83b0b754b9755728c93ff49a081dab3e6be1..1390a97345b9ab444f6f92ee39113290a581ffe5 100644 --- a/earthdiagnostics/datafile.py +++ b/earthdiagnostics/datafile.py @@ -16,6 +16,8 @@ from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.publisher import Publisher from earthdiagnostics.variable import VariableType +import netCDF4 + class LocalStatus(object): """Local file status enumeration""" @@ -184,6 +186,17 @@ class DataFile(Publisher): return True return self._modifiers[0] is diagnostic + @property + def remote_diags_file(self): + remote_diags_file = self.remote_file.replace("/cmorfiles/", "/diags/") + remote_diags_file = remote_diags_file.replace("/original_files/", "/") + return remote_diags_file + + @property + def remote_cmor_file(self): + remote_cmor_file = self.remote_file.replace("/original_files/", "/") + return remote_cmor_file + @property def local_status(self): """Get local storage status""" @@ -261,6 +274,7 @@ class DataFile(Publisher): self._rename_coordinate_variables() self._correct_metadata() self._prepare_region() + self.add_basin_history() self.add_diagnostic_history() Utils.convert2netcdf4(self.local_file) @@ -269,9 +283,8 @@ class DataFile(Publisher): self.storage_status = StorageStatus.UPLOADING remote_file = self.remote_file try: - if "/cmorfiles/" in remote_file: - remote_file = remote_file.replace("/cmorfiles/", "/diags/") - Utils.copy_file(self.local_file, remote_file, save_hash=True) + Utils.copy_file( + self.local_file, self.remote_diags_file, save_hash=True) except (OSError, Exception) as ex: Log.error("File {0} can not be uploaded: {1}", remote_file, ex) self.storage_status = StorageStatus.FAILED @@ -434,16 +447,18 @@ class DataFile(Publisher): cube.coord("region") except iris.exceptions.CoordinateNotFoundError: return - try: - old_cube = iris.load_cube(self.remote_file) - except Exception: + old_cube = None + for path in (self.remote_diags_file, self.remote_file, + self.remote_cmor_file): try: - old_cube = iris.load_cube( - self.remote_file.replace('/cmorfiles/', '/diags/') - ) + old_cube = iris.load_cube(path) except Exception: - # Bad data, overwrite - return + pass + else: + break + if old_cube is None: + return + new_data = {} for region_slice in cube.slices_over("region"): Log.debug(region_slice.coord("region").points[0]) @@ -476,6 +491,14 @@ class DataFile(Publisher): Utils.rename_variables( temp, renames, must_exist=False, rename_dimension=True) Utils.move_file(temp, self.local_file) + handler2 = Utils.open_cdf(self.local_file) + region_var = handler2.variables['region'] + for i, cube in enumerate(cube_list): + encode = 'utf-8' + name = region_var[i, ...].tobytes().strip().decode(encode) + length = handler2.dimensions['region_length'].size + region_var[i, ...] = netCDF4.stringtoarr(name, length) + handler2.close() self._correct_metadata() def _rename_coordinate_variables(self): @@ -495,7 +518,6 @@ class DataFile(Publisher): if not self.diagnostic: return from earthdiagnostics.earthdiags import EarthDiags - history_line = ( f"Diagnostic {self.diagnostic} calculated with EarthDiagnostics " f"version {EarthDiags.version}" @@ -511,6 +533,19 @@ class DataFile(Publisher): ) self._add_history_line(history_line) + def add_basin_history(self): + """Add basin history line to local file""" + if not os.path.isfile('basins.nc'): + return + basins = iris.load_cube('basins.nc') + history_line = ( + "Using Basins masks file " + f"version {basins.attributes['version']} with " + f"grid {basins.attributes['grid']}. " + "Original file can be found in /esarchive/autosubmit/conf_files." + ) + self._add_history_line(history_line) + def _add_history_line(self, history_line): utc_datetime = "UTC " + datetime.utcnow().isoformat() history_line = "{0}: {1};".format(utc_datetime, history_line) @@ -646,8 +681,9 @@ class NetCDFFile(DataFile): def download(self): """Get data from remote storage to the local one""" for path in ( - self.remote_file.replace("/cmorfiles/", "/diags/"), + self.remote_diags_file, self.remote_file, + self.remote_cmor_file, ): if os.path.isfile(path): try: @@ -695,7 +731,8 @@ class NetCDFFile(DataFile): def check_is_in_storage(self, update_status=True): for path in ( self.remote_file, - self.remote_file.replace("/cmorfiles/", "/diags/"), + self.remote_cmor_file, + self.remote_diags_file ): if os.path.isfile(path): if update_status: diff --git a/earthdiagnostics/general/relink.py b/earthdiagnostics/general/relink.py deleted file mode 100644 index bb277ce3ea23cda947feb14ec43bc4e442578c8c..0000000000000000000000000000000000000000 --- a/earthdiagnostics/general/relink.py +++ /dev/null @@ -1,136 +0,0 @@ -# coding=utf-8 -"""Create links for a variable""" -from earthdiagnostics.diagnostic import ( - Diagnostic, - DiagnosticOption, - DiagnosticDomainOption, - DiagnosticBoolOption, - DiagnosticVariableOption, -) - - -class Relink(Diagnostic): - """ - Recreates the links for the variable specified - - :original author: Javier Vegas-Regidor - - :created: September 2016 - - :param data_manager: data management object - :type data_manager: DataManager - :param startdate: startdate - :type startdate: str - :param member: member number - :type member: int - :param chunk: chunk's number - :type chunk: int - :param variable: variable's name - :type variable: str - :param domain: variable's domain - :type domain: ModelingRealm - :param move_old: if true, looks for files following the old convention - and moves to avoid collisions - :type move_old: bool - """ - - alias = "relink" - "Diagnostic alias for the configuration file" - - def __init__( - self, - data_manager, - startdate, - member, - chunk, - domain, - variable, - move_old, - grid, - ): - Diagnostic.__init__(self, data_manager) - self.startdate = startdate - self.member = member - self.chunk = chunk - self.variable = variable - self.domain = domain - self.move_old = move_old - self.grid = grid - self.var_manager = data_manager.config.var_manager - - def __str__(self): - return ( - "Relink output Startdate: {0.startdate} Member: {0.member} " - "Chunk: {0.chunk} Move old: {0.move_old} " - "Variable: {0.domain}:{0.variable} Grid: {0.grid}".format(self) - ) - - def __hash__(self): - return hash(str(self)) - - def __eq__(self, other): - if self._different_type(other): - return False - return ( - self.startdate == other.startdate - and self.member == other.member - and self.chunk == other.chunk - and self.domain == other.domain - and self.variable == other.variable - and self.move_old == other.move_old - and self.grid == other.grid - ) - - @classmethod - def generate_jobs(cls, diags, options): - """ - Create a job for each chunk to compute the diagnostic - - :param diags: Diagnostics manager class - :type diags: Diags - :param options: variable, domain, move_old=False - :type options: list[str] - :return: - """ - options_available = ( - DiagnosticDomainOption(), - DiagnosticVariableOption(diags.data_manager.config.var_manager), - DiagnosticBoolOption("move_old", True), - DiagnosticOption("grid", ""), - ) - options = cls.process_options(options, options_available) - job_list = list() - chunk_list = diags.config.experiment.get_chunk_list() - for startdate, member, chunk in chunk_list: - job_list.append( - Relink( - diags.data_manager, - startdate, - member, - chunk, - options["domain"], - options["variable"], - options["move_old"], - options["grid"], - ) - ) - return job_list - - def request_data(self): - """Request data required by the diagnostic""" - - def declare_data_generated(self): - """Declare data to be generated by the diagnostic""" - - def compute(self): - """Run the diagnostic""" - self.data_manager.link_file( - self.domain, - self.variable, - self.var_manager.get_variable(self.variable), - self.startdate, - self.member, - self.chunk, - move_old=self.move_old, - grid=self.grid, - ) diff --git a/earthdiagnostics/general/relinkall.py b/earthdiagnostics/general/relinkall.py deleted file mode 100644 index 107fa290068fa68c5adcee0348539b98637b9031..0000000000000000000000000000000000000000 --- a/earthdiagnostics/general/relinkall.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -"""Create links for all variables in a startdate""" -from earthdiagnostics.diagnostic import Diagnostic - - -class RelinkAll(Diagnostic): - """ - Recreates the links for the variable specified - - :original author: Javier Vegas-Regidor - - :created: September 2016 - - :param data_manager: data management object - :type data_manager: DataManager - :param startdate: startdate - :type startdate: str - """ - - alias = "relinkall" - "Diagnostic alias for the configuration file" - - def __init__(self, data_manager, startdate): - Diagnostic.__init__(self, data_manager) - self.startdate = startdate - - def __str__(self): - return "Relink all output Startdate: {0}".format(self.startdate) - - def __hash__(self): - return hash(str(self)) - - def __eq__(self, other): - if self._different_type(other): - return False - return self.startdate == other.startdate - - @classmethod - def generate_jobs(cls, diags, options): - """ - Create a job for each chunk to compute the diagnostic - - :param diags: Diagnostics manager class - :type diags: Diags - :param options: variable, domain, move_old=False - :type options: list[str] - :return: - """ - if len(options) > 1: - raise Exception("The Relink All diagnostic has no options") - job_list = list() - for startdate in diags.config.experiment.startdates: - job_list.append(RelinkAll(diags.data_manager, startdate)) - return job_list - - def request_data(self): - """Request data required by the diagnostic""" - - def declare_data_generated(self): - """Declare data to be generated by the diagnostic""" - - def compute(self): - """Run the diagnostic""" - self.data_manager.convention.create_links(self.startdate) diff --git a/earthdiagnostics/general/select_levels.py b/earthdiagnostics/general/select_levels.py deleted file mode 100644 index a5c99a4fcda2830e0e0e047d5f3e79da126e8b32..0000000000000000000000000000000000000000 --- a/earthdiagnostics/general/select_levels.py +++ /dev/null @@ -1,188 +0,0 @@ -# coding=utf-8 -"""Extract levels from variable""" -from earthdiagnostics.box import Box -from earthdiagnostics.diagnostic import ( - Diagnostic, - DiagnosticOption, - DiagnosticDomainOption, - DiagnosticVariableListOption, - DiagnosticIntOption, -) -from earthdiagnostics.utils import Utils, TempFile - - -class SelectLevels(Diagnostic): - """ - Extract levels from file - - Parameters - ---------- - data_manager: DataManager - startdate: str - member: int - chunk: init - domain: ModellingRealm - variable: str - grid: str or None - first_level: int - last_level: int - """ - - alias = "selev" - "Diagnostic alias for the configuration file" - - def __init__( - self, - data_manager, - startdate, - member, - chunk, - domain, - variable, - grid, - first_level, - last_level, - ): - Diagnostic.__init__(self, data_manager) - self.startdate = startdate - self.member = member - self.chunk = chunk - self.variable = variable - self.domain = domain - self.grid = grid - self.box = Box(False) - self.box.min_depth = first_level - self.box.max_depth = last_level - - self.variable_file = None - self.result = None - - def __str__(self): - return ( - "Select levels Startdate: {0.startdate} Member: {0.member} " - "Chunk: {0.chunk} Variable: {0.domain}:{0.variable} " - "Levels: {0.box.min_depth}-{0.box.max_depth} " - "Grid: {0.grid}".format(self) - ) - - def __eq__(self, other): - if self._different_type(other): - return False - return ( - self.startdate == other.startdate - and self.member == other.member - and self.chunk == other.chunk - and self.domain == other.domain - and self.variable == other.variable - and self.box == other.box - and self.grid == self.grid - ) - - @classmethod - def generate_jobs(cls, diags, options): - """ - Create a job for each chunk to compute the diagnostic - - :param diags: Diagnostics manager class - :type diags: Diags - :param options: domain,variables,grid - :type options: list[str] - :return: - """ - options_available = ( - DiagnosticDomainOption(), - DiagnosticVariableListOption( - diags.data_manager.config.var_manager, "variables" - ), - DiagnosticIntOption("first_level"), - DiagnosticIntOption("last_level"), - DiagnosticOption("grid", ""), - ) - options = cls.process_options(options, options_available) - job_list = list() - variables = options["variables"] - for var in variables: - chunk_list = diags.config.experiment.get_chunk_list() - for startdate, member, chunk in chunk_list: - job_list.append( - SelectLevels( - diags.data_manager, - startdate, - member, - chunk, - options["domain"], - var, - options["grid"], - options["first_level"], - options["last_level"], - ) - ) - return job_list - - def request_data(self): - """Request data required by the diagnostic""" - self.variable_file = self.request_chunk( - self.domain, - self.variable, - self.startdate, - self.member, - self.chunk, - grid=self.grid, - to_modify=True, - ) - - def declare_data_generated(self): - """Request data required by the diagnostic""" - self.result = self.declare_chunk( - self.domain, - self.variable, - self.startdate, - self.member, - self.chunk, - grid=self.grid, - ) - - def compute(self): - """Run the diagnostic""" - temp = TempFile.get() - handler = Utils.open_cdf(self.variable_file.local_file) - var_name = "" - for var in ("lev", "plev"): - if var in handler.variables: - var_name = var - continue - handler.close() - - Utils.nco().ncks( - input=self.variable_file.local_file, - output=temp, - options="-O -d {1},{0.min_depth},{0.max_depth}".format( - self.box, var_name - ), - ) - self.result.set_local_file(temp) - - @staticmethod - def _create_var(var_name, var_values, source, destiny): - old_var = source.variables[var_name] - new_var = destiny.createVariable( - var_name, old_var.dtype, dimensions=(var_name,) - ) - new_var[:] = var_values - Utils.copy_attributes(new_var, old_var) - - vertices_name = "{0}_vertices".format(var_name) - - if vertices_name in source.variables: - var_vertices = source.variables[vertices_name] - if var_name == "lon": - vertices_values = var_vertices[0:1, ...] - else: - vertices_values = var_vertices[:, 0:1, :] - new_lat_vertices = destiny.createVariable( - vertices_name, - var_vertices.dtype, - dimensions=(var_name, "vertices"), - ) - new_lat_vertices[:] = vertices_values - Utils.copy_attributes(new_lat_vertices, var_vertices) diff --git a/earthdiagnostics/ocean/heatcontentlayer.py b/earthdiagnostics/ocean/heatcontentlayer.py index 178bac42dcad54ec6d4b15e7661cbd0708b19c0e..03317acfa2df7bbc966a227aa7d30a031db79542 100644 --- a/earthdiagnostics/ocean/heatcontentlayer.py +++ b/earthdiagnostics/ocean/heatcontentlayer.py @@ -262,7 +262,7 @@ class HeatContentLayer(Diagnostic): var = handler_results.createVariable( "heatc", float, ("time", "j", "i"), fill_value=1.0e20 ) - var.units = "J m-2" "" + var.units = "J m-2" var.coordinates = " ".join((lat_name, lon_name)) handler_results.sync() # temporary fix, needs to loop over layers @@ -281,6 +281,7 @@ class HeatContentLayer(Diagnostic): "heatcsum", float, ("time", "region",), ) handler_results_1d.sync() + var_ohc_1d.units = "J" for i, basin in enumerate(self.basins): var_region[i, ...] = netCDF4.stringtoarr(basin.name, 50) var_ohc_1d[..., i] = heatc_sl_1d[i] diff --git a/earthdiagnostics/ocean/indices.py b/earthdiagnostics/ocean/indices.py new file mode 100644 index 0000000000000000000000000000000000000000..a091b040da43e10bec70de2d68730bf24727538c --- /dev/null +++ b/earthdiagnostics/ocean/indices.py @@ -0,0 +1,180 @@ +# coding=utf-8 +"""Compute the indices for oceanic basins""" +from bscearth.utils.log import Log + +import iris +import iris.analysis + +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.modelingrealm import ModelingRealms +from earthdiagnostics.utils import Utils, TempFile + + +class Indices(Diagnostic): + """ + Compute the MOC for oceanic basins + + :created: March 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + """ + + alias = 'indices' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + + self.results = {} + self.region_metadata = {} + self.generated = {} + + def __str__(self): + return 'Indices Startdate: {0.startdate} Member: {0.member} ' \ + 'Chunk: {0.chunk}'.format(self) + + def __hash__(self): + return hash(str(self)) + + def __eq__(self, other): + if self._different_type(other): + return False + return ( + self.startdate == other.startdate and + self.member == other.member and self.chunk == other.chunk + ) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Create a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + + job_list = list() + for sdate, mem, chunk in diags.config.experiment.get_chunk_list(): + job_list.append( + Indices(diags.data_manager, sdate, mem, chunk)) + return job_list + + def request_data(self): + """Request data required by the diagnostic""" + self.variable_file = self.request_chunk( + ModelingRealms.ocean, 'tosmean', + self.startdate, self.member, self.chunk) + + def declare_data_generated(self): + """Declare data to be generated by the diagnostic""" + self._declare_var('amv') + self._declare_var('ipo') + self._declare_var('iod') + + def _declare_var(self, var_name): + self.generated[var_name] = self.declare_chunk( + ModelingRealms.ocean, var_name, + self.startdate, self.member, self.chunk) + + def compute(self): + """Run the diagnostic""" + tosmean = iris.load_cube(self.variable_file.local_file) + data_regions = tosmean.coord('region').points + amv_regions = ['AMV_North_Atlantic', 'AMV_trend'] + ipo_regions = ['Pacific_TPI1', 'Pacific_TPI2', 'Pacific_TPI3'] + iod_regions = ['Indian_dipole_east', 'Indian_dipole_west'] + + check_amv = set(amv_regions).issubset(set(data_regions)) + if check_amv: + data = {} + for region in amv_regions: + data[region] = tosmean.extract(iris.Constraint(region=region)) + self.compute_amv(data) + else: + Log.info('Input data does not contain the basins required to ' + 'compute the AMV index. Skipping AMV computations.') + + check_ipo = set(ipo_regions).issubset(set(data_regions)) + if check_ipo: + data = {} + for region in ipo_regions: + data[region] = tosmean.extract(iris.Constraint(region=region)) + self.compute_ipo(data) + else: + Log.info('Input data does not contain the basins required to ' + 'compute the IPO index. Skipping IPO computations.') + + check_iod = set(iod_regions).issubset(set(data_regions)) + if check_iod: + data = {} + for region in iod_regions: + data[region] = tosmean.extract(iris.Constraint(region=region)) + self.compute_iod(data) + else: + Log.info('Input data does not contain the basins required to ' + 'compute the IOD index. Skipping IOD computations.') + + self.save() + + def compute_amv(self, data): + self.results['amv'] = (data['AMV_North_Atlantic'].data - + data['AMV_trend'].data) + self.region_metadata['amv'] = ( + 'AMV_North_Atlantic Box (lat: [0, 60], lon:[-80, 0]), ' + 'AMV_trend Box (lat: [-60, 60], lon: [-180, 180])') + + def compute_ipo(self, data): + self.results['ipo'] = data['Pacific_TPI2'].data - 0.5*( + data['Pacific_TPI1'].data + data['Pacific_TPI3'].data + ) + self.region_metadata['ipo'] = ( + 'Pacific_TPI1 Box ( (lat: [25, 45], lon:[140, 180]), ' + '(lat: [25, 45], lon:[-180, -145]) ) ' + 'Pacific_TPI2 Box ( (lat: [-10, 10], lon:[170, 180]), ' + '(lat: [-10, 10], lon:[-180, -90]) ) ' + 'Pacific_TPI3 Box ( (lat: [-50, -15], lon:[150, 180]), ' + '(lat: [-50, -15], lon:[-180, -160]) )' + ) + + def compute_iod(self, data): + self.results['iod'] = (data['Indian_dipole_west'].data - + data['Indian_dipole_east'].data) + self.region_metadata['iod'] = ( + 'Indian_dipole_west Box (lat: [-10, 10], lon:[50,70]) ' + 'Indian_dipole_east Box (lat: [-10, 0], lon:[90, 110])' + ) + + def save(self): + for var in self.results.keys(): + res = self.results[var] + temp = TempFile.get() + handler_source = Utils.open_cdf(self.variable_file.local_file) + handler_temp = Utils.open_cdf(temp, 'w') + + Utils.copy_variable( + handler_source, handler_temp, 'time', True, True) + + var_res = handler_temp.createVariable( + '{0}'.format(var), float, ('time',)) + var_res[...] = res[...] + var_res.units = 'degC' + var_res.comment = '{var} index computed at {region}'.format( + var=var, + region=self.region_metadata[var] + ) + handler_temp.close() + self.generated[var].set_local_file(temp, diagnostic=self) diff --git a/earthdiagnostics/ocean/moc.py b/earthdiagnostics/ocean/moc.py index b6f845064a3d25970a6efbeb8a2302e81d99d899..f5931fc8b622bb7a0d9c40007cd8ecfe18ef91f3 100644 --- a/earthdiagnostics/ocean/moc.py +++ b/earthdiagnostics/ocean/moc.py @@ -91,6 +91,7 @@ class Moc(Diagnostic): if not basins: Log.error("Basins not recognized") return () + basins.sort() job_list = list() for ( @@ -128,9 +129,9 @@ class Moc(Diagnostic): e3v = mesh.get_k_length(cell_point="V") masks = {} - self.basins.sort() + self.basins for basin in self.basins: - if basin == "Global": + if basin == Basins().Global: global_mask = mesh.get_landsea_mask(cell_point="V") global_mask[..., 0] = 0.0 global_mask[..., -1] = 0.0 diff --git a/earthdiagnostics/work_manager.py b/earthdiagnostics/work_manager.py index 048388f740d18a03544c9e13cba191a4a4c0b861..3666e2c791b58b5f3f5251718b23a79da0910458 100644 --- a/earthdiagnostics/work_manager.py +++ b/earthdiagnostics/work_manager.py @@ -362,8 +362,6 @@ class WorkManager(object): ) from earthdiagnostics.general.module import Module from earthdiagnostics.general.rewrite import Rewrite - from earthdiagnostics.general.relink import Relink - from earthdiagnostics.general.relinkall import RelinkAll from earthdiagnostics.general.scale import Scale from earthdiagnostics.general.verticalmeanmetersiris import ( VerticalMeanMetersIris, @@ -376,8 +374,6 @@ class WorkManager(object): Diagnostic.register(MonthlyMean) Diagnostic.register(YearlyMean) Diagnostic.register(Rewrite) - Diagnostic.register(Relink) - Diagnostic.register(RelinkAll) Diagnostic.register(Scale) Diagnostic.register(Attribute) Diagnostic.register(Module) @@ -392,6 +388,7 @@ class WorkManager(object): from .ocean.verticalmean import VerticalMean from .ocean.verticalmeanmeters import VerticalMeanMeters from .ocean.verticalgradient import VerticalGradient + from .ocean.indices import Indices from .ocean.interpolate import Interpolate from .ocean.interpolatecdo import InterpolateCDO from .ocean.moc import Moc @@ -415,6 +412,7 @@ class WorkManager(object): Diagnostic.register(Siasiesiv) Diagnostic.register(VerticalMean) Diagnostic.register(VerticalMeanMeters) + Diagnostic.register(Indices) Diagnostic.register(Interpolate) Diagnostic.register(InterpolateCDO) Diagnostic.register(Moc) diff --git a/setup.py b/setup.py index b5a06b99e86e20834fa29310b71028b0e79bb595..f4571a319913c9d946200bd3bb8344714a42f65e 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,6 @@ REQUIREMENTS = { "cfgrib", "dask[array]", "diagonals", - "exrex", "netCDF4", "nco>=0.0.3", "numba", diff --git a/src/mixdiags b/src/mixdiags new file mode 160000 index 0000000000000000000000000000000000000000..199979700e38d3918a82bd2052855d46375e48ab --- /dev/null +++ b/src/mixdiags @@ -0,0 +1 @@ +Subproject commit 199979700e38d3918a82bd2052855d46375e48ab diff --git a/test/integration/test_cmorizer.py b/test/integration/test_cmorizer.py index 14ee5db2b184f6566c831845a678eb86924ec22c..29fab22bb4457edb912b0b75fc569ca04f6d2fa6 100644 --- a/test/integration/test_cmorizer.py +++ b/test/integration/test_cmorizer.py @@ -21,7 +21,7 @@ import calendar class TestCmorizer(TestCase): """Tests for Cmorizer class""" - def _get_variable_and_alias(self, variable): + def _get_variable_and_alias(self, variable, silent=False): mock_alias = Mock() mock_alias.basin = None mock_alias.grid = None @@ -349,22 +349,26 @@ class TestCmorizer(TestCase): log.Log.WARNING: warnings, } ): - if value: - self.assertTrue( - [ - record - for record in cmd.records - if record.levelno == level - ] - ) - else: - self.assertFalse( - [ - record - for record in cmd.records - if record.levelno == level - ] - ) + try: + if value: + self.assertTrue( + [ + record + for record in cmd.records + if record.levelno == level + ] + ) + else: + self.assertFalse( + [ + record + for record in cmd.records + if record.levelno == level + ] + ) + except AssertionError: + print(cmd.records) + raise else: cmorizer = Cmorizer(self.data_manager, "19900101", 0) if ocean: diff --git a/test/unit/data_convention/test_data_convention.py b/test/unit/data_convention/test_data_convention.py index 26abd6c488b8284969a4834cde786ff96f16dfc5..35aeeda8d08323c0cd917b0db9869980e410b3d0 100644 --- a/test/unit/data_convention/test_data_convention.py +++ b/test/unit/data_convention/test_data_convention.py @@ -43,7 +43,8 @@ class TestDataConvention(TestCase): self.convention.get_startdate_path("19900101"), os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/expname", + "expid/original_files/cmorfiles/activity/institute/model/" + "expname", ), ) diff --git a/test/unit/data_convention/test_meteofrance.py b/test/unit/data_convention/test_meteofrance.py index c0833f72630a8b37d2b31fcb9ad114642cab0f7d..d47cbb7e1df8e2be3cd345782e7db061c8c075d5 100644 --- a/test/unit/data_convention/test_meteofrance.py +++ b/test/unit/data_convention/test_meteofrance.py @@ -40,7 +40,8 @@ class TestMeteoFranceConvention(TestCase): self.convention.get_startdate_path("19900101"), os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name", ), ) diff --git a/test/unit/data_convention/test_preface.py b/test/unit/data_convention/test_preface.py index 0b81eb002b834725c3ce6c4b2dff231a530b5e56..1f549da9bbbdf7b864abe2f549c4ac2f994369b6 100644 --- a/test/unit/data_convention/test_preface.py +++ b/test/unit/data_convention/test_preface.py @@ -41,7 +41,8 @@ class TestPrefaceConvention(TestCase): self.convention.get_startdate_path("19900101"), os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101", ), ) @@ -69,8 +70,8 @@ class TestPrefaceConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101/mon/" - "ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101/mon/ocean/var/r2i1p1/version", ), ) @@ -86,8 +87,8 @@ class TestPrefaceConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101/mon/" - "atmos/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101/mon/atmos/var/r2i1p1/version", ), ) @@ -103,8 +104,8 @@ class TestPrefaceConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101/mon/" - "ocean/var/grid/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101/mon/ocean/var/grid/r2i1p1/version", ), ) @@ -126,8 +127,8 @@ class TestPrefaceConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101/mon/" - "ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101/mon/ocean/var/r2i1p1/version", ), ) @@ -150,8 +151,8 @@ class TestPrefaceConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S19900101/mon/" - "ocean/var/r2i1p1", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S19900101/mon/ocean/var/r2i1p1", ), ) @@ -319,8 +320,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101/mon/" - "ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertTrue( @@ -339,8 +340,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101/mon/" - "ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertFalse( @@ -357,8 +358,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101/mon" - "/ocean/var/", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101/mon/ocean/var/", ) ) self.assertFalse( @@ -367,7 +368,7 @@ class TestPrefaceConvention(TestCase): @mock.patch("os.path.isfile") def test_is_cmorized_not_enough_vars(self, mock_is_file): - mock_is_file.return_value = True + mock_is_file.return_value = False cmor_var = Mock() omon = Mock() omon.name = "Omon" @@ -377,8 +378,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101/mon/" - "ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertFalse( @@ -395,8 +396,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101/mon/" - "r2i1p1/version", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101/mon/r2i1p1/version", ) ) self.assertFalse( @@ -413,7 +414,8 @@ class TestPrefaceConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20000101", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20000101", ) ) self.assertFalse( @@ -426,8 +428,8 @@ class TestPrefaceConvention(TestCase): def test_create_links(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20010101/mon/ocean/" - "var/r2i1p1", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20010101/mon/ocean/var/r2i1p1", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) @@ -440,8 +442,8 @@ class TestPrefaceConvention(TestCase): def test_create_links_member_not_found(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20010101/mon/ocean/" - "var/r1i1p1", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20010101/mon/ocean/var/r1i1p1", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) @@ -454,8 +456,8 @@ class TestPrefaceConvention(TestCase): def test_create_links_with_grid(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/experiment_name/S20010101/mon/" - "ocean/var/r2i1p1/grid", + "expid/original_files/cmorfiles/institute/experiment_name/" + "S20010101/mon/ocean/var/r2i1p1/grid", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) diff --git a/test/unit/data_convention/test_primavera.py b/test/unit/data_convention/test_primavera.py index 01afd690313489f2e45f969e14071e275fcacd3e..91df38592a65d1db22557072b3d2c7dbec0b3bce 100644 --- a/test/unit/data_convention/test_primavera.py +++ b/test/unit/data_convention/test_primavera.py @@ -50,7 +50,8 @@ class TestPrimaveraConvention(TestCase): self.convention.get_startdate_path("19900101"), os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name", ), ) @@ -80,8 +81,8 @@ class TestPrimaveraConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/ocean_grid/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/ocean_grid/version", ), ) @@ -100,8 +101,8 @@ class TestPrimaveraConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/ocean_grid/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/ocean_grid/version", ), ) @@ -118,8 +119,8 @@ class TestPrimaveraConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/atmos_grid/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/atmos_grid/version", ), ) @@ -136,8 +137,8 @@ class TestPrimaveraConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/grid/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/grid/version", ), ) @@ -160,8 +161,8 @@ class TestPrimaveraConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/ocean_grid/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/ocean_grid/version", ), ) @@ -481,8 +482,8 @@ class TestPrimaveraConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var", ) ) self.assertTrue( @@ -502,8 +503,8 @@ class TestPrimaveraConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var", ) ) self.assertFalse( @@ -521,7 +522,8 @@ class TestPrimaveraConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/", ) ) self.assertFalse( @@ -539,8 +541,8 @@ class TestPrimaveraConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1", ) ) self.assertFalse( @@ -560,8 +562,8 @@ class TestPrimaveraConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/ocean/var", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/ocean/var", ) ) self.assertFalse( @@ -575,8 +577,8 @@ class TestPrimaveraConvention(TestCase): """Test create links""" member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/gn", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/gn", ) os.makedirs(member_path) self.config.var_manager.tables = {"Omon": Mock()} @@ -591,8 +593,8 @@ class TestPrimaveraConvention(TestCase): """Test create links with version""" member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/gn/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/gn/version", ) os.makedirs(member_path) self.config.var_manager.tables = {"Omon": Mock()} @@ -609,8 +611,8 @@ class TestPrimaveraConvention(TestCase): """Test create links with version full startdate""" member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r2i1p1f1/Omon/var/gn/version", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r2i1p1f1/Omon/var/gn/version", ) os.makedirs(member_path) self.config.var_manager.tables = {"Omon": Mock()} @@ -625,8 +627,8 @@ class TestPrimaveraConvention(TestCase): """Test create links when the member can not be found""" member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/activity/institute/model/experiment_name/" - "r1i1p1f1/Omon/var/gn", + "expid/original_files/cmorfiles/activity/institute/model/" + "experiment_name/r1i1p1f1/Omon/var/gn", ) os.makedirs(member_path) self.config.var_manager.tables = {"Omon": Mock()} diff --git a/test/unit/data_convention/test_specs.py b/test/unit/data_convention/test_specs.py index 699a2e2bf255dece1f7884ce27d0d3bf67fcce19..a0da62247c8d60793f79156783321856512e7e2c 100644 --- a/test/unit/data_convention/test_specs.py +++ b/test/unit/data_convention/test_specs.py @@ -41,7 +41,8 @@ class TestSpecsConvention(TestCase): self.convention.get_startdate_path("19900101"), os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101", ), ) @@ -69,8 +70,8 @@ class TestSpecsConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101/" - "mon/ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101/mon/ocean/var/r2i1p1/version", ), ) @@ -86,8 +87,8 @@ class TestSpecsConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101/" - "mon/atmos/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101/mon/atmos/var/r2i1p1/version", ), ) @@ -103,8 +104,8 @@ class TestSpecsConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101/" - "mon/ocean/var/grid/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101/mon/ocean/var/grid/r2i1p1/version", ), ) @@ -126,8 +127,8 @@ class TestSpecsConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101/" - "mon/ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101/mon/ocean/var/r2i1p1/version", ), ) @@ -150,8 +151,8 @@ class TestSpecsConvention(TestCase): file_path, os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S19900101/" - "mon/ocean/var/r2i1p1", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S19900101/mon/ocean/var/r2i1p1", ), ) @@ -418,8 +419,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertTrue( @@ -438,8 +439,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/ocean/var/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertFalse( @@ -457,7 +458,9 @@ class TestSpecsConvention(TestCase): self.config.cmor.min_cmorized_vars = 1 os.makedirs( os.path.join( - self.tmp_dir, "expid/cmorfiles/institute/model/experiment_name" + self.tmp_dir, + "expid/original_files/cmorfiles/institute/model/" + "experiment_name" ) ) self.assertFalse( @@ -474,8 +477,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/ocean/var/", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/ocean/var/", ) ) self.assertFalse( @@ -484,7 +487,7 @@ class TestSpecsConvention(TestCase): @mock.patch("os.path.isfile") def test_is_cmorized_not_enough_vars(self, mock_is_file): - mock_is_file.return_value = True + mock_is_file.return_value = False cmor_var = Mock() omon = Mock() omon.name = "Omon" @@ -494,9 +497,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/ocean/var/" - "r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/ocean/var/r2i1p1/version", ) ) self.assertFalse( @@ -513,8 +515,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101/" - "mon/r2i1p1/version", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101/mon/r2i1p1/version", ) ) self.assertFalse( @@ -531,7 +533,8 @@ class TestSpecsConvention(TestCase): os.makedirs( os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20000101", + "expid/original_files/cmorfiles/institute/model/" + "experiment_name/S20000101", ) ) self.assertFalse( @@ -542,8 +545,8 @@ class TestSpecsConvention(TestCase): def test_create_links(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20010101/mon/" - "ocean/var/r2i1p1", + "expid/original_files/cmorfiles/institute/model/experiment_name/" + "S20010101/mon/ocean/var/r2i1p1", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) @@ -554,8 +557,8 @@ class TestSpecsConvention(TestCase): def test_create_links_member_not_found(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20010101/mon/" - "ocean/var/r1i1p1", + "expid/original_files/cmorfiles/institute/model/experiment_name/" + "S20010101/mon/ocean/var/r1i1p1", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) @@ -566,9 +569,8 @@ class TestSpecsConvention(TestCase): def test_create_links_with_grid(self, mock_create_link): member_path = os.path.join( self.tmp_dir, - "expid/cmorfiles/institute/model/experiment_name/S20010101/" - "mon/ocean/var/" - "r2i1p1/grid", + "expid/original_files/cmorfiles/institute/model/experiment_name/" + "S20010101/mon/ocean/var/r2i1p1/grid", ) os.makedirs(member_path) tempfile.mkstemp(dir=member_path) diff --git a/test/unit/general/test_relink.py b/test/unit/general/test_relink.py deleted file mode 100644 index f3c87dcab7a74163e1a1e33c843bdbaa47993b66..0000000000000000000000000000000000000000 --- a/test/unit/general/test_relink.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding=utf-8 -from unittest import TestCase - -from earthdiagnostics.diagnostic import DiagnosticVariableOption -from earthdiagnostics.box import Box -from earthdiagnostics.general.relink import Relink -from mock import Mock, patch - -from earthdiagnostics.modelingrealm import ModelingRealms - - -class TestRelink(TestCase): - def setUp(self): - self.data_manager = Mock() - - self.diags = Mock() - self.diags.model_version = "model_version" - self.diags.config.experiment.get_chunk_list.return_value = ( - ("20010101", 0, 0), - ("20010101", 0, 1), - ) - - self.box = Box() - self.box.min_depth = 0 - self.box.max_depth = 100 - - def fake_parse(self, value): - return value - - @patch.object(DiagnosticVariableOption, "parse", fake_parse) - def test_generate_jobs(self): - - jobs = Relink.generate_jobs(self.diags, ["diagnostic", "atmos", "var"]) - self.assertEqual(len(jobs), 2) - self.assertEqual( - jobs[0], - Relink( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var", - True, - "", - ), - ) - self.assertEqual( - jobs[1], - Relink( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.atmos, - "var", - True, - "", - ), - ) - - jobs = Relink.generate_jobs( - self.diags, ["diagnostic", "seaIce", "var", "False"] - ) - self.assertEqual(len(jobs), 2) - self.assertEqual( - jobs[0], - Relink( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.seaIce, - "var", - False, - "", - ), - ) - self.assertEqual( - jobs[1], - Relink( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.seaIce, - "var", - False, - "", - ), - ) - - jobs = Relink.generate_jobs( - self.diags, ["diagnostic", "ocean", "var", "True", "grid"] - ) - self.assertEqual(len(jobs), 2) - self.assertEqual( - jobs[0], - Relink( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.ocean, - "var", - True, - "grid", - ), - ) - self.assertEqual( - jobs[1], - Relink( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.ocean, - "var", - True, - "grid", - ), - ) - - with self.assertRaises(Exception): - Relink.generate_jobs(self.diags, ["diagnostic"]) - - with self.assertRaises(Exception): - Relink.generate_jobs( - self.diags, ["diagnostic", "0", "0", "0", "0", "0", "0", "0"] - ) - - def test_str(self): - mixed = Relink( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.ocean, - "var", - True, - "grid", - ) - self.assertEqual( - str(mixed), - "Relink output Startdate: 20010101 Member: 0 Chunk: 0 " - "Move old: True Variable: ocean:var Grid: grid", - ) diff --git a/test/unit/general/test_relinkall.py b/test/unit/general/test_relinkall.py deleted file mode 100644 index b49595e0ab00a0f20643133bbed0f5c113f29a14..0000000000000000000000000000000000000000 --- a/test/unit/general/test_relinkall.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -from unittest import TestCase - -from earthdiagnostics.diagnostic import DiagnosticVariableOption -from earthdiagnostics.box import Box -from earthdiagnostics.general.relinkall import RelinkAll -from mock import Mock, patch - - -class TestRelinkAll(TestCase): - def setUp(self): - self.data_manager = Mock() - - self.diags = Mock() - self.diags.config.experiment.get_chunk_list.return_value = ( - ("20010101", 0, 0), - ("20010101", 0, 1), - ) - self.diags.config.experiment.startdates = [ - "20010101", - ] - - self.box = Box() - self.box.min_depth = 0 - self.box.max_depth = 100 - - def fake_parse(self, value): - return value - - @patch.object(DiagnosticVariableOption, "parse", fake_parse) - def test_generate_jobs(self): - jobs = RelinkAll.generate_jobs(self.diags, ["diagnostic"]) - self.assertEqual(len(jobs), 1) - self.assertEqual(jobs[0], RelinkAll(self.data_manager, "20010101")) - - with self.assertRaises(Exception): - RelinkAll.generate_jobs(self.diags, ["diagnostic", "0"]) - - def test_str(self): - mixed = RelinkAll(self.data_manager, "20010101") - self.assertEqual(str(mixed), "Relink all output Startdate: 20010101") diff --git a/test/unit/general/test_select_levels.py b/test/unit/general/test_select_levels.py deleted file mode 100644 index f05b0a2f70abbf11846958d128604c3248661e88..0000000000000000000000000000000000000000 --- a/test/unit/general/test_select_levels.py +++ /dev/null @@ -1,198 +0,0 @@ -# coding=utf-8 -from unittest import TestCase -import os -from tempfile import mktemp - -from earthdiagnostics.diagnostic import ( - DiagnosticVariableListOption, - DiagnosticOptionError, -) -from earthdiagnostics.box import Box -from earthdiagnostics.general.select_levels import SelectLevels -from earthdiagnostics.frequency import Frequencies -from mock import Mock, patch - -from earthdiagnostics.modelingrealm import ModelingRealms - - -class TestSelectLevels(TestCase): - def setUp(self): - self.data_manager = Mock() - - self.diags = Mock() - self.diags.config.experiment.get_chunk_list.return_value = ( - ("20010101", 0, 0), - ("20010101", 0, 1), - ) - self.diags.config.experiment.startdates = [ - "20010101", - ] - self.diags.config.frequency = Frequencies.monthly - - self.box = Box() - self.box.min_depth = 0 - self.box.max_depth = 100 - self.var_file = mktemp(".nc") - - def tearDown(self): - if os.path.exists(self.var_file): - os.remove(self.var_file) - - def fake_parse(self, value): - return value.split("-") - - @patch.object(DiagnosticVariableListOption, "parse", fake_parse) - def test_generate_jobs(self): - jobs = SelectLevels.generate_jobs( - self.diags, ["diagnostic", "atmos", "var", "0", "20"] - ) - self.assertEqual(len(jobs), 2) - self.assertEqual( - jobs[0], - SelectLevels( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var", - "", - 0, - 20, - ), - ) - self.assertEqual( - jobs[1], - SelectLevels( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.atmos, - "var", - "", - 0, - 20, - ), - ) - - jobs = SelectLevels.generate_jobs( - self.diags, ["diagnostic", "atmos", "var1-var2", "0", "20"] - ) - self.assertEqual(len(jobs), 4) - self.assertEqual( - jobs[0], - SelectLevels( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var1", - "", - 0, - 20, - ), - ) - self.assertEqual( - jobs[1], - SelectLevels( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.atmos, - "var1", - "", - 0, - 20, - ), - ) - self.assertEqual( - jobs[2], - SelectLevels( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var2", - "", - 0, - 20, - ), - ) - self.assertEqual( - jobs[3], - SelectLevels( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.atmos, - "var2", - "", - 0, - 20, - ), - ) - - jobs = SelectLevels.generate_jobs( - self.diags, ["diagnostic", "atmos", "var", "0", "20", "grid"] - ) - self.assertEqual(len(jobs), 2) - self.assertEqual( - jobs[0], - SelectLevels( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var", - "grid", - 0, - 20, - ), - ) - self.assertEqual( - jobs[1], - SelectLevels( - self.data_manager, - "20010101", - 0, - 1, - ModelingRealms.atmos, - "var", - "grid", - 0, - 20, - ), - ) - - with self.assertRaises(DiagnosticOptionError): - SelectLevels.generate_jobs(self.diags, ["diagnostic"]) - - with self.assertRaises(DiagnosticOptionError): - SelectLevels.generate_jobs( - self.diags, - ["diagnostic", "atmos", "var", "0", "20", "grid", "extra"], - ) - - def test_str(self): - select = SelectLevels( - self.data_manager, - "20010101", - 0, - 0, - ModelingRealms.atmos, - "var", - "grid", - 0, - 20, - ) - self.assertEqual( - str(select), - "Select levels Startdate: 20010101 Member: 0 Chunk: 0 " - "Variable: atmos:var Levels: 0-20 Grid: grid", - ) diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 73102e2201e3316459df34a6d98f017966a97d04..5e4a1ae658d68feca96adfc1591f9a380c883701 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -492,34 +492,6 @@ class TestExperimentConfig(TestCase): config.parse_ini(self.mock_parser) self.assertEqual(config.startdates, ["20001101", "20011101"]) - self.mock_parser.add_value("EXPERIMENT", "STARTDATES", "200(0|1)1101") - config = ExperimentConfig() - config.parse_ini(self.mock_parser) - self.assertEqual(config.startdates, ["20001101", "20011101"]) - - self.mock_parser.add_value( - "EXPERIMENT", "STARTDATES", "200[0-2](02|05|08|11)01" - ) - config = ExperimentConfig() - config.parse_ini(self.mock_parser) - self.assertEqual( - config.startdates, - [ - u"20000201", - u"20000501", - u"20000801", - u"20001101", - u"20010201", - u"20010501", - u"20010801", - u"20011101", - u"20020201", - u"20020501", - u"20020801", - u"20021101", - ], - ) - def test_auto_startdates(self): """ Test parsing startdates using the automatic generation