# coding=utf-8 """ Common utilities for multiple topics that are not big enough to have their own module """ import datetime import os import re import shutil import stat import subprocess import sys import tarfile import tempfile from contextlib import contextmanager import cf_units import iris import iris.exceptions import netCDF4 import numpy as np import six import xxhash from bscearth.utils.log import Log from cdo import Cdo from nco import Nco from earthdiagnostics.constants import Basins @contextmanager def suppress_stdout(): """Redirects the standard output to devnull""" with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout class Utils(object): """Container class for miscellaneous utility methods""" nco = Nco() """An instance of Nco class ready to be used""" cdo = Cdo() """An instance of Cdo class ready to be used""" @staticmethod def get_mask(basin): """ Return the mask for the given basin Parameters ---------- basin: Basin Returns ------- numpy.array Raises ------ Exception: If mask.regions.nc is not available """ basin = Basins().parse(basin) if basin != Basins().Global: try: mask_handler = Utils.openCdf('mask_regions.nc') mask = mask_handler.variables[basin.name][:, 0, :] mask_handler.close() except IOError: raise Exception('File mask.regions.nc is required for basin {0}'.format(basin)) else: mask_handler = Utils.openCdf('mask.nc') mask = mask_handler.variables['tmask'][0, 0, :] mask_handler.close() return mask @staticmethod def setminmax(filename, variable_list): """ Set the valid_max and valid_min values to the current max and min values on the file Parameters ---------- filename: str variable_list: str or iterable of str """ """ Sets the valid_max and valid_min values to the current max and min values on the file :param filename: path to file :type filename: str :param variable_list: list of variables in which valid_min and valid_max will be set :type variable_list: str | list """ # noinspection PyTypeChecker if isinstance(variable_list, six.string_types): variable_list = variable_list.split() Log.info('Getting max and min values for {0}', ' '.join(variable_list)) handler = Utils.openCdf(filename) for variable in variable_list: var = handler.variables[variable] values = [np.max(var), np.min(var)] Utils.nco.ncatted(input=filename, output=filename, options=('-h -a valid_max,{0},m,f,{1}'.format(variable, values[0]),)) Utils.nco.ncatted(input=filename, output=filename, options=('-h -a valid_min,{0},m,f,{1}'.format(variable, values[1]),)) handler.close() @staticmethod def rename_variable(filepath, old_name, new_name, must_exist=True, rename_dimension=False): """ Rename variable from a NetCDF file This function is just a wrapper around Utils.rename_variables Parameters ---------- filepath: str old_name: str new_name: str must_exist: bool, optional rename_dimension: bool, optional See Also -------- Utils.rename_variables """ Utils.rename_variables(filepath, {old_name: new_name}, must_exist, rename_dimension) @staticmethod def rename_variables(filepath, dic_names, must_exist=True, rename_dimension=False): """ Rename multiple variables from a NetCDF file Parameters ---------- filepath: str dic_names: dict of str: str Gives the renaming to do in the form old_name: new_name must_exist: bool, optional rename_dimension: bool, optional Raises ------- ValueError If any original name is the same as the new Exception If any requested variable does not exist and must_exist is True """ for old, new in six.iteritems(dic_names): if old == new: raise ValueError('{0} original name is the same as the new') handler = Utils.openCdf(filepath) original_names = set(handler.variables.keys()).union(handler.dimensions.keys()) if not any((True for x in dic_names.keys() if x in original_names)): handler.close() if must_exist: raise Exception("Variables {0} does not exist in file {1}".format(','.join(dic_names.keys()), filepath)) return handler.close() temp = TempFile.get() shutil.copyfile(filepath, temp) handler = Utils.openCdf(temp) error = False try: Utils._rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension) except RuntimeError as ex: Log.debug('Renaming error: {0}', ex) error = True handler.close() if not error and not Utils.check_netcdf_file(temp): error = True if error: Log.debug('First attemp to rename failed. Using secondary rename method for netCDF') Utils._rename_vars_by_creating_new_file(dic_names, filepath, temp) Log.debug('Rename done') Utils.move_file(temp, filepath) @staticmethod def check_netcdf_file(filepath): """ Check if a NetCDF file is well stored This functions is used to check if a NetCDF file is corrupted. It prefers to raise a false postive than to have false negatives. Parameters ---------- filepath Returns ------- bool """ with suppress_stdout(): try: handler = Utils.openCdf(filepath) if 'time' in handler.variables: if handler.variables['time'].dimensions != ('time', ): handler.close() return False handler.close() iris.FUTURE.netcdf_promote = True cubes = iris.load(filepath) if len(cubes) == 0: return False except iris.exceptions.IrisError as ex: Log.debug('netCDF checks failed: {0}', ex) return False except RuntimeError as ex: # HDF error, usually Log.debug('netCDF checks failed: {0}', ex) return False except Exception as ex: # HDF error, usually Log.debug('netCDF checks failed: {0}', ex) return False return True @staticmethod def get_file_variables(filename): """ Get all the variables in a file Parameters ---------- filename Returns ------- iterable of str """ handler = Utils.openCdf(filename) variables = handler.variables.keys() handler.close() return variables @staticmethod def _rename_vars_by_creating_new_file(dic_names, filepath, temp): original_handler = Utils.openCdf(filepath) new_handler = Utils.openCdf(temp, 'w') for attribute in original_handler.ncattrs(): original = getattr(original_handler, attribute) setattr(new_handler, attribute, Utils.convert_to_ascii_if_possible(original)) for dimension in original_handler.dimensions.keys(): Utils.copy_dimension(original_handler, new_handler, dimension, new_names=dic_names) for variable in original_handler.variables.keys(): Utils.copy_variable(original_handler, new_handler, variable, new_names=dic_names) original_handler.close() new_handler.close() # noinspection PyPep8Naming @staticmethod def convert_to_ascii_if_possible(string, encoding='ascii'): u""" Convert an Unicode string to ASCII if all characters can be translated. If a string can not be translated it is unchanged. It also automatically replaces Bretonnière with Bretonniere Parameters ---------- string: unicode encoding: str, optional Returns ------- str """ # noinspection PyTypeChecker if isinstance(string, six.string_types): try: return string.encode(encoding) except UnicodeEncodeError: if u'Bretonnière' in string: string = string.replace(u'Bretonnière', 'Bretonnière') return Utils.convert_to_ascii_if_possible(string, encoding) return string @staticmethod def _rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension): for old_name, new_name in dic_names.items(): if rename_dimension: if old_name in handler.dimensions: handler.renameDimension(old_name, new_name) elif must_exist: raise Exception("Dimension {0} does not exist in file {1}".format(old_name, filepath)) if old_name in handler.variables: if new_name not in handler.variables: handler.renameVariable(old_name, new_name) for var in handler.variables: if hasattr(var, 'coordinates') and " {0} ".format(old_name) in var.coordinates: new_coordinates = var.coordinates.replace(" {0} ".format(old_name), " {0} ".format(new_name)) var.coordinates = Utils.convert_to_ascii_if_possible(new_coordinates) elif must_exist: raise Exception("Variable {0} does not exist in file {1}".format(old_name, filepath)) handler.sync() @staticmethod def copy_file(source, destiny, save_hash=False, use_stored_hash=True, retrials=3): """ Copy a file and compute a hash to check if the copy is equal to the source Parameters ---------- source: str destiny: str save_hash: bool, optional If True, stores a copy of the hash use_stored_hash: bool, optional If True, try to use the stored value of the source hash instead of computing it retrials: int, optional Minimum value is 1 See Also -------- move_file """ dirname_path = os.path.dirname(destiny) if dirname_path and not os.path.exists(dirname_path): try: os.makedirs(dirname_path) Utils.give_group_write_permissions(dirname_path) except OSError as ex: # This can be due to a race condition. If directory already exists, we don have to do nothing if not os.path.exists(dirname_path): raise ex hash_destiny = None Log.debug('Hashing original file... {0}', datetime.datetime.now()) hash_original = Utils.get_file_hash(source, use_stored=use_stored_hash) if retrials < 1: retrials = 1 while hash_original != hash_destiny: if retrials == 0: raise Exception('Can not copy {0} to {1}'.format(source, destiny)) Log.debug('Copying... {0}', datetime.datetime.now()) shutil.copyfile(source, destiny) Log.debug('Hashing copy ... {0}', datetime.datetime.now()) hash_destiny = Utils.get_file_hash(destiny, save=save_hash) retrials -= 1 Log.debug('Finished {0}', datetime.datetime.now()) @staticmethod def move_file(source, destiny, save_hash=False, retrials=3): """ Move a file and compute a hash to check if the copy is equal to the source It is just a call to Utils.copy_file followed bu Parameters ---------- source: str destiny: str save_hash: bool, optional If True, stores a copy of the hash retrials: int, optional Minimum value is 1 See Also -------- copy_file """ Utils.copy_file(source, destiny, save_hash, retrials) os.remove(source) @staticmethod def remove_file(path): """ Delete a file only if it previously exists Parameters ---------- path: str """ if os.path.isfile(path): os.remove(path) @staticmethod def copy_tree(source, destiny): """ Copies a full tree to a new location Parameters ---------- source: str destiny: str See Also -------- move_tree """ if not os.path.exists(destiny): os.makedirs(destiny) shutil.copystat(source, destiny) lst = os.listdir(source) for item in lst: item_source = os.path.join(source, item) item_destiny = os.path.join(destiny, item) if os.path.isdir(item_source): Utils.copy_tree(item_source, item_destiny) else: shutil.copy2(item_source, item_destiny) @staticmethod def move_tree(source, destiny): """ Move a tree to a new location Parameters ---------- source: str destiny: str See Also ------- copy_tree """ Utils.copy_tree(source, destiny) shutil.rmtree(source) @staticmethod def get_file_hash(filepath, use_stored=False, save=False): """ Get the xxHash hash for a given file Parameters ---------- filepath: str use_stored: bool, optional If True, tries to use the stored hash before computing it save: bool, optional If True, saves the hash to a file """ if use_stored: hash_file = Utils._get_hash_filename(filepath) if os.path.isfile(hash_file): hash_value = open(hash_file, 'r').readline() return hash_value blocksize = 104857600 hasher = xxhash.xxh64() with open(filepath, 'rb') as afile: buf = afile.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = afile.read(blocksize) hash_value = hasher.hexdigest() if save: hash_file = open(Utils._get_hash_filename(filepath), 'w') hash_file.write(hash_value) hash_file.close() return hash_value @staticmethod def _get_hash_filename(filepath): folder = os.path.dirname(filepath) filename = os.path.basename(filepath) hash_file = os.path.join(folder, '.{0}.xxhash64.hash'.format(filename)) return hash_file @staticmethod def execute_shell_command(command, log_level=Log.DEBUG): """ Execute shell command Writes the output to the log with the specified level Parameters ---------- command: str or iterable of str log_level: int, optional Returns ------- iterable of str Standard output of the command Raises ------ Utils.ExecutionError If the command return value is non zero """ # noinspection PyTypeChecker if isinstance(command, six.string_types): command = command.split() process = subprocess.Popen(command, stdout=subprocess.PIPE) output = list() comunicate = process.communicate() for line in comunicate: if not line: continue if log_level != Log.NO_LOG: Log.log.log(log_level, line) output.append(line) if process.returncode != 0: raise Utils.ExecutionError('Error executing {0}\n Return code: {1}'.format(' '.join(command), str(process.returncode))) return output _cpu_count = None @staticmethod def available_cpu_count(): """Number of available virtual or physical CPUs on this system""" if Utils._cpu_count is None: try: m = re.search(r'(?m)^Cpus_allowed:\s*(.*)$', open('/proc/self/status').read()) if m: res = bin(int(m.group(1).replace(',', ''), 16)).count('1') if res > 0: Utils._cpu_count = res except IOError: try: import multiprocessing Utils._cpu_count = multiprocessing.cpu_count() return Utils._cpu_count except (ImportError, NotImplementedError): Utils._cpu_count = -1 return Utils._cpu_count @staticmethod def convert2netcdf4(filetoconvert): """ Convert a file to NetCDF4 Conversion only performed if required. Deflation level set to 4 and shuffle activated. Parameters ---------- filetoconvert: str """ if Utils._is_compressed_netcdf4(filetoconvert): return Log.debug('Reformatting to netCDF-4') temp = TempFile.get() Utils.execute_shell_command(["nccopy", "-4", "-d4", "-s", filetoconvert, temp]) shutil.move(temp, filetoconvert) @classmethod def _is_compressed_netcdf4(cls, filetoconvert): is_compressed = True handler = Utils.openCdf(filetoconvert) if not handler.file_format == 'NETCDF4': is_compressed = False handler.close() else: handler.close() ncdump_result = Utils.execute_shell_command('ncdump -hs {0}'.format(filetoconvert), Log.NO_LOG) ncdump_result = ncdump_result[0].replace('\t', '').split('\n') for var in handler.variables: if not '{0}:_DeflateLevel = 4 ;'.format(var) in ncdump_result: is_compressed = False break if not '{0}:_Shuffle = "true" ;'.format(var) in ncdump_result: is_compressed = False break return is_compressed # noinspection PyPep8Naming @staticmethod def openCdf(filepath, mode='a'): """ Open a NetCDF file Parameters ---------- filepath: str mode: str, optional Returns ------- netCDF4.Dataset """ return netCDF4.Dataset(filepath, mode) @staticmethod def get_datetime_from_netcdf(handler, time_variable='time'): """ Get time from NetCDF files Parameters ---------- handler: netCDF4.Dataset time_variable: str, optional Returns ------- numpy.array of Datetime """ var_time = handler.variables[time_variable] nctime = var_time[:] # get values units = var_time.units try: cal_temps = var_time.calendar except AttributeError: cal_temps = u"standard" return netCDF4.num2date(nctime, units=units, calendar=cal_temps) @staticmethod def copy_variable(source, destiny, variable, must_exist=True, add_dimensions=False, new_names=None): """ Copies the given variable from source to destiny :param add_dimensions: if it's true, dimensions required by the variable will be automatically added to the file. It will also add the dimension variable :type add_dimensions: bool :param source: origin file :type source: netCDF4.Dataset :param destiny: destiny file :type destiny: netCDF4.Dataset :param variable: variable to copy :type variable: str :param must_exist: if false, does not raise an error uf variable does not exist :type must_exist: bool :param new_names: dictionary containing variables to rename and new name as key-value pairs :type new_names: dict :return: """ if not must_exist and variable not in source.variables.keys(): return if not new_names: new_names = dict() if variable in new_names: new_name = new_names[variable] else: new_name = variable if new_name in destiny.variables.keys(): return translated_dimensions = Utils._translate(source.variables[variable].dimensions, new_names) if not set(translated_dimensions).issubset(destiny.dimensions): if not add_dimensions: raise Exception('Variable {0} can not be added because dimensions does not match: ' '{1} {2}'.format(variable, translated_dimensions, destiny.dimensions)) for dimension in source.variables[variable].dimensions: Utils.copy_dimension(source, destiny, dimension, must_exist, new_names) if new_name in destiny.variables.keys(): # Just in case the variable we are copying match a dimension name return original_var = source.variables[variable] new_var = destiny.createVariable(new_name, original_var.datatype, translated_dimensions) Utils.copy_attributes(new_var, original_var) if hasattr(new_var, 'coordinates'): coords = [new_names[coord] if coord in new_names else coord for coord in new_var.coordinates.split(' ')] new_var.coordinates = Utils.convert_to_ascii_if_possible(' '.join(coords)) new_var[:] = original_var[:] @staticmethod def copy_attributes(new_var, original_var, omitted_attributtes=None): if omitted_attributtes is None: omitted_attributtes = [] new_var.setncatts({k: Utils.convert_to_ascii_if_possible(original_var.getncattr(k)) for k in original_var.ncattrs() if k not in omitted_attributtes}) @staticmethod def copy_dimension(source, destiny, dimension, must_exist=True, new_names=None): """ Copies the given dimension from source to destiny, including dimension variables if present :param new_names: dictionary containing variables to rename and new name as key-value pairs :type new_names: dict :param source: origin file :type source: netCDF4.Dataset :param destiny: destiny file :type destiny: netCDF4.Dataset :param dimension: variable to copy :type dimension: str :param must_exist: if false, does not raise an error uf variable does not exist :type must_exist: bool :return: """ if not must_exist and dimension not in source.dimensions.keys(): return if not new_names: new_names = dict() if dimension in new_names: new_name = new_names[dimension] else: new_name = dimension if new_name in destiny.dimensions.keys(): return if not new_name: new_name = dimension destiny.createDimension(new_name, source.dimensions[dimension].size) if dimension in source.variables: Utils.copy_variable(source, destiny, dimension, new_names=new_names) @staticmethod def concat_variables(source, destiny, remove_source=False): """ Add variables from a nc file to another :param source: path to source file :type source: str :param destiny: path to destiny file :type destiny: str :param remove_source: if True, removes source file :type remove_source: bool """ if os.path.exists(destiny): handler_total = Utils.openCdf(destiny) handler_variable = Utils.openCdf(source) concatenated = dict() for var in handler_variable.variables: if var not in handler_total.variables: Utils.copy_variable(handler_variable, handler_total, var, add_dimensions=True) else: variable = handler_variable.variables[var] if 'time' not in variable.dimensions: continue concatenated[var] = np.concatenate((handler_total.variables[var][:], variable[:]), axis=variable.dimensions.index('time')) for var, array in six.iteritems(concatenated): handler_total.variables[var][:] = array handler_total.close() handler_variable.close() if remove_source: os.remove(source) else: if remove_source: Utils.move_file(source, destiny) else: shutil.copy(source, destiny) Utils.convert2netcdf4(destiny) class ExecutionError(Exception): """ Exception to raise when a command execution fails """ pass @classmethod def _translate(cls, dimensions, new_names): translated = list() for dim in dimensions: if dim in new_names: translated.append(new_names[dim]) else: translated.append(dim) return translated @staticmethod def create_folder_tree(path): """ Createas a fodle path will and parent directories if needed. :param path: folder's path :type path: str """ if not os.path.exists(path): # noinspection PyBroadException try: os.makedirs(path) except Exception: # Here we can have a race condition. Let's check again for existence and rethrow if still not exists if not os.path.isdir(path): raise @staticmethod def give_group_write_permissions(path): st = os.stat(path) if st.st_mode & stat.S_IWGRP: return os.chmod(path, st.st_mode | stat.S_IWGRP) @staticmethod def convert_units(var_handler, new_units, calendar=None, old_calendar=None): if new_units == var_handler.units: return if hasattr(var_handler, 'calendar'): old_calendar = var_handler.calendar new_unit = cf_units.Unit(new_units, calendar=calendar) old_unit = cf_units.Unit(var_handler.units, calendar=old_calendar) var_handler[:] = old_unit.convert(var_handler[:], new_unit, inplace=True) if 'valid_min' in var_handler.ncattrs(): var_handler.valid_min = old_unit.convert(float(var_handler.valid_min), new_unit, inplace=True) if 'valid_max' in var_handler.ncattrs(): var_handler.valid_max = old_unit.convert(float(var_handler.valid_max), new_unit, inplace=True) var_handler.units = new_units @staticmethod def untar(files, destiny_path): """ Untar files to a given destiny :param files: files to unzip :type files: list[Any] | Tuple[Any] :param destiny_path: path to destination folder :type destiny_path: str """ for filepath in files: Log.debug('Unpacking {0}', filepath) tar = tarfile.open(filepath) for file_compressed in tar.getmembers(): if file_compressed.isdir(): if os.path.isdir(os.path.join(destiny_path, file_compressed.name)): continue else: if os.path.exists(os.path.join(destiny_path, file_compressed.name)): os.remove(os.path.join(destiny_path, file_compressed.name)) tar.extract(file_compressed, destiny_path) tar.close() @staticmethod def unzip(files, force=False): """ Unzip a list of files :param files: files to unzip :type files: list | str :param force: if True, it will overwrite unzipped files :type force: bool """ # noinspection PyTypeChecker if isinstance(files, six.string_types): files = [files] for filepath in files: Log.debug('Unzipping {0}', filepath) if force: option = ' -f' else: option = '' try: Utils.execute_shell_command('gunzip{1} {0}'.format(filepath, option)) except Exception as ex: raise Utils.UnzipException('Can not unzip {0}: {1}'.format(filepath, ex)) class UnzipException(Exception): """ Excpetion raised when unzip fails """ pass class TempFile(object): """ Class to manage temporal files """ autoclean = True """ If True, new temporary files are added to the list for future cleaning """ files = list() """ List of files to clean automatically """ scratch_folder = '' """ Scratch folder to create temporary files on it """ prefix = 'temp' """ Prefix for temporary filenames """ @staticmethod def get(filename=None, clean=None, suffix='.nc'): """ Gets a new temporal filename, storing it for automated cleaning :param suffix: :param filename: if it is not none, the function will use this filename instead of a random one :type filename: str :param clean: if true, stores filename for cleaning :type clean: bool :return: path to the temporal file :rtype: str """ if clean is None: clean = TempFile.autoclean if filename: path = os.path.join(TempFile.scratch_folder, filename) else: fd, path = tempfile.mkstemp(dir=TempFile.scratch_folder, prefix=TempFile.prefix, suffix=suffix) path = str(path) os.close(fd) if clean: TempFile.files.append(path) return path @staticmethod def clean(): """ Removes all temporary files created with Tempfile until now """ for temp_file in TempFile.files: if os.path.exists(temp_file): os.remove(temp_file) TempFile.files = list()