# coding=utf-8 import datetime import os import re import shutil import stat import subprocess import sys import tarfile import tempfile from contextlib import contextmanager import iris import iris.exceptions import netCDF4 import numpy as np import six import xxhash from bscearth.utils.log import Log from cdo import Cdo from cfunits import Units from nco import Nco from earthdiagnostics.constants import Basins @contextmanager def suppress_stdout(): with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout class File(object): pass class Utils(object): """ Container class for miscellaneous utility methods """ nco = Nco() """An instance of Nco class ready to be used""" cdo = Cdo() """An instance of Cdo class ready to be used""" @staticmethod def get_mask(basin): """ Returns a numpy array containing the mask for the given basin :param basin: basin to retrieve :type basin: Basin :return: mask :rtype: numpy.array """ basin = Basins().parse(basin) if basin != Basins().Global: try: mask_handler = Utils.openCdf('mask_regions.nc') mask = mask_handler.variables[basin.name][:, 0, :] mask_handler.close() except IOError: raise Exception('File mask.regions.nc is required for basin {0}'.format(basin)) else: mask_handler = Utils.openCdf('mask.nc') mask = mask_handler.variables['tmask'][0, 0, :] mask_handler.close() return mask @staticmethod def setminmax(filename, variable_list): """ Sets the valid_max and valid_min values to the current max and min values on the file :param filename: path to file :type filename: str :param variable_list: list of variables in which valid_min and valid_max will be set :type variable_list: str | list """ # noinspection PyTypeChecker if isinstance(variable_list, six.string_types): variable_list = variable_list.split() Log.info('Getting max and min values for {0}', ' '.join(variable_list)) handler = Utils.openCdf(filename) for variable in variable_list: var = handler.variables[variable] values = [np.max(var), np.min(var)] Utils.nco.ncatted(input=filename, output=filename, options=('-h -a valid_max,{0},m,f,{1}'.format(variable, values[0]),)) Utils.nco.ncatted(input=filename, output=filename, options=('-h -a valid_min,{0},m,f,{1}'.format(variable, values[1]),)) handler.close() @staticmethod def rename_variable(filepath, old_name, new_name, must_exist=True, rename_dimension=False): """ Rename multiple variables from a NetCDF file :param filepath: path to file :type filepath: str :param old_name: variable's name to change :type old_name: str :param new_name: new name :type new_name: str :param must_exist: if True, the function will raise an exception if the variable name does not exist :type must_exist: bool :param rename_dimension: if True, also rename dimensions with the same name :type rename_dimension: bool """ Utils.rename_variables(filepath, {old_name: new_name}, must_exist, rename_dimension) @staticmethod def rename_variables(filepath, dic_names, must_exist=True, rename_dimension=False): """ Rename multiple variables from a NetCDF file :param filepath: path to file :type filepath: str :param dic_names: dictionary containing old names as keys and new names as values :type dic_names: dict :param must_exist: if True, the function will raise an exception if the variable name does not exist :type must_exist: bool :param rename_dimension: if True, also rename dimensions with the same name :type rename_dimension: bool """ for old, new in six.iteritems(dic_names): if old == new: raise ValueError('{0} original name is the same as the new') handler = Utils.openCdf(filepath) original_names = set(handler.variables.keys()).union(handler.dimensions.keys()) if not any((True for x in dic_names.keys() if x in original_names)): handler.close() if must_exist: raise Exception("Variables {0} does not exist in file {1}".format(','.join(dic_names.keys()), filepath)) return handler.close() temp = TempFile.get() shutil.copyfile(filepath, temp) handler = Utils.openCdf(temp) error = False try: Utils._rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension) except RuntimeError: error = True handler.close() if not Utils.check_netcdf_file(temp): error = True if error: Log.debug('First attemp to rename failed. Using secondary rename method for netCDF') Utils._rename_vars_by_creating_new_file(dic_names, filepath, temp) Log.debug('Rename done') Utils.move_file(temp, filepath) @staticmethod def check_netcdf_file(filepath): with suppress_stdout(): try: iris.FUTURE.netcdf_promote = True iris.load(filepath) except iris.exceptions.IrisError: return False return True @staticmethod def get_file_variables(filename): handler = Utils.openCdf(filename) variables = handler.variables.keys() handler.close() return variables @staticmethod def _rename_vars_by_creating_new_file(dic_names, filepath, temp): original_handler = Utils.openCdf(filepath) new_handler = Utils.openCdf(temp, 'w') for attribute in original_handler.ncattrs(): original = getattr(original_handler, attribute) setattr(new_handler, attribute, Utils.convert_to_ASCII_if_possible(original)) for dimension in original_handler.dimensions.keys(): Utils.copy_dimension(original_handler, new_handler, dimension, new_names=dic_names) for variable in original_handler.variables.keys(): Utils.copy_variable(original_handler, new_handler, variable, new_names=dic_names) original_handler.close() new_handler.close() # noinspection PyPep8Naming @staticmethod def convert_to_ASCII_if_possible(string, encoding='ascii'): # noinspection PyTypeChecker if isinstance(string, six.string_types): try: return string.encode(encoding) except UnicodeEncodeError: if u'Bretonnière' in string: string = string.replace(u'Bretonnière', 'Bretonniere') return Utils.convert_to_ASCII_if_possible(string, encoding) return string @staticmethod def _rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension): for old_name, new_name in dic_names.items(): if rename_dimension: if old_name in handler.dimensions: handler.renameDimension(old_name, new_name) elif must_exist: raise Exception("Dimension {0} does not exist in file {1}".format(old_name, filepath)) if old_name in handler.variables: if new_name not in handler.variables: handler.renameVariable(old_name, new_name) elif must_exist: raise Exception("Variable {0} does not exist in file {1}".format(old_name, filepath)) handler.sync() @staticmethod def copy_file(source, destiny, save_hash=False): """ Copies a file from source to destiny, creating dirs if necessary :param save_hash: if True, stores hash value in a file :type save_hash: bool :param source: path to source :type source: str :param destiny: path to destiny :type destiny: str """ dirname_path = os.path.dirname(destiny) if dirname_path and not os.path.exists(dirname_path): try: os.makedirs(dirname_path) Utils.give_group_write_permissions(dirname_path) except OSError as ex: # This can be due to a race condition. If directory already exists, we don have to do nothing if not os.path.exists(dirname_path): raise ex hash_destiny = None Log.debug('Hashing original file... {0}', datetime.datetime.now()) hash_original = Utils.get_file_hash(source, use_stored=True) retrials = 3 while hash_original != hash_destiny: if retrials == 0: raise Exception('Can not copy {0} to {1}'.format(source, destiny)) Log.debug('Copying... {0}', datetime.datetime.now()) shutil.copyfile(source, destiny) Log.debug('Hashing copy ... {0}', datetime.datetime.now()) hash_destiny = Utils.get_file_hash(destiny, save=save_hash) retrials -= 1 Log.info('Finished {0}', datetime.datetime.now()) @staticmethod def move_file(source, destiny, save_hash=False): """ Moves a file from source to destiny, creating dirs if necessary :param source: path to source :type source: str :param destiny: path to destiny :type destiny: str :param save_hash: if True, stores hash value in a file :type save_hash: bool """ Utils.copy_file(source, destiny, save_hash) os.remove(source) @staticmethod def remove_file(path): """ Removes a file, checking before if its exists :param path: path to file :type path: str """ if os.path.isfile(path): os.remove(path) @staticmethod def copy_tree(source, destiny): if not os.path.exists(destiny): os.makedirs(destiny) shutil.copystat(source, destiny) lst = os.listdir(source) for item in lst: item_source = os.path.join(source, item) item_destiny = os.path.join(destiny, item) if os.path.isdir(item_source): Utils.copy_tree(item_source, item_destiny) else: shutil.copy2(item_source, item_destiny) @staticmethod def move_tree(source, destiny): Utils.copy_tree(source, destiny) shutil.rmtree(source) @staticmethod def get_file_hash(filepath, use_stored=False, save=False): """ Returns the xxHash hash for the given filepath :param filepath: path to the file to compute hash on :type filepath:str :param use_stored: if True, try to read the hash value from file :type use_stored: bool :param save: if True, stores hash value in a file :type save: bool :return: file's xxHash hash :rtype: str """ if use_stored: hash_file = Utils._get_hash_filename(filepath) if os.path.isfile(hash_file): hash_value = open(hash_file, 'r').readline() return hash_value blocksize = 104857600 hasher = xxhash.xxh64() with open(filepath, 'rb') as afile: buf = afile.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = afile.read(blocksize) hash_value = hasher.hexdigest() if save: hash_file = open(Utils._get_hash_filename(filepath), 'w') hash_file.write(hash_value) hash_file.close() return hash_value @staticmethod def _get_hash_filename(filepath): folder = os.path.dirname(filepath) filename = os.path.basename(filepath) hash_file = os.path.join(folder, '.{0}.xxhash64.hash'.format(filename)) return hash_file @staticmethod def execute_shell_command(command, log_level=Log.DEBUG): """ Executes a sheel commandsi :param command: command to execute Log.info('Detailed time for diagnostic class') :param log_level: log level to use for command output :type log_level: int :return: command output :rtype: list """ # noinspection PyTypeChecker if isinstance(command, six.string_types): command = command.split() process = subprocess.Popen(command, stdout=subprocess.PIPE) output = list() comunicate = process.communicate() for line in comunicate: if not line: continue if log_level != Log.NO_LOG: Log.log.log(log_level, line) output.append(line) if process.returncode != 0: raise Utils.ExecutionError('Error executing {0}\n Return code: {1}'.format(' '.join(command), process.returncode)) return output _cpu_count = None @staticmethod def available_cpu_count(): """ Number of available virtual or physical CPUs on this systemx """ if Utils._cpu_count is None: try: m = re.search(r'(?m)^Cpus_allowed:\s*(.*)$', open('/proc/self/status').read()) if m: res = bin(int(m.group(1).replace(',', ''), 16)).count('1') if res > 0: Utils._cpu_count = res except IOError: try: import multiprocessing Utils._cpu_count = multiprocessing.cpu_count() return Utils._cpu_count except (ImportError, NotImplementedError): Utils._cpu_count = -1 return Utils._cpu_count @staticmethod def convert2netcdf4(filetoconvert): """ Checks if a file is in netCDF4 format and converts to netCDF4 if not :param filetoconvert: file to convert :type filetoconvert: str """ if Utils._is_compressed_netcdf4(filetoconvert): return Log.debug('Reformatting to netCDF-4') temp = TempFile.get() Utils.execute_shell_command(["nccopy", "-4", "-d4", "-s", filetoconvert, temp]) shutil.move(temp, filetoconvert) @classmethod def _is_compressed_netcdf4(cls, filetoconvert): is_compressed = True handler = Utils.openCdf(filetoconvert) if not handler.file_format == 'NETCDF4': is_compressed = False else: ncdump_result = Utils.execute_shell_command('ncdump -hs {0}'.format(filetoconvert), Log.NO_LOG) ncdump_result = ncdump_result[0].replace('\t', '').split('\n') for var in handler.variables: if not '{0}:_DeflateLevel = 4 ;'.format(var) in ncdump_result: is_compressed = False break if not '{0}:_Shuffle = "true" ;'.format(var) in ncdump_result: is_compressed = False break handler.close() return is_compressed # noinspection PyPep8Naming @staticmethod def openCdf(filepath, mode='a'): """ Opens a netCDF file and returns a handler to it :param filepath: path to the file :type filepath: str :param mode: mode to open the file. By default, a (append) :type mode: str :return: handler to the file :rtype: netCDF4.Dataset """ return netCDF4.Dataset(filepath, mode) @staticmethod def get_datetime_from_netcdf(handler, time_variable='time'): """ Gets a datetime array from a netCDF file :param handler: file to read :type handler: netCDF4.Dataset :param time_variable: variable to read, by default 'time' :type time_variable: str :return: Datetime numpy array created from the values stored at the netCDF file :rtype: np.array """ var_time = handler.variables[time_variable] nctime = var_time[:] # get values units = var_time.units try: cal_temps = var_time.calendar except AttributeError: cal_temps = u"standard" return netCDF4.num2date(nctime, units=units, calendar=cal_temps) @staticmethod def copy_variable(source, destiny, variable, must_exist=True, add_dimensions=False, new_names=None): """ Copies the given variable from source to destiny :param add_dimensions: if it's true, dimensions required by the variable will be automatically added to the file. It will also add the dimension variable :type add_dimensions: bool :param source: origin file :type source: netCDF4.Dataset :param destiny: destiny file :type destiny: netCDF4.Dataset :param variable: variable to copy :type variable: str :param must_exist: if false, does not raise an error uf variable does not exist :type must_exist: bool :param new_names: dictionary containing variables to rename and new name as key-value pairs :type new_names: dict :return: """ if not must_exist and variable not in source.variables.keys(): return if not new_names: new_names = dict() if variable in new_names: new_name = new_names[variable] else: new_name = variable if new_name in destiny.variables.keys(): return translated_dimensions = Utils._translate(source.variables[variable].dimensions, new_names) if not set(translated_dimensions).issubset(destiny.dimensions): if not add_dimensions: raise Exception('Variable {0} can not be added because dimensions does not match: ' '{1} {2}'.format(variable, translated_dimensions, destiny.dimensions)) for dimension in source.variables[variable].dimensions: Utils.copy_dimension(source, destiny, dimension, must_exist, new_names) if new_name in destiny.variables.keys(): # Just in case the variable we are copying match a dimension name return original_var = source.variables[variable] new_var = destiny.createVariable(new_name, original_var.datatype, translated_dimensions) Utils.copy_attributes(new_var, original_var) new_var[:] = original_var[:] @staticmethod def copy_attributes(new_var, original_var, omitted_attributtes=None): if omitted_attributtes is None: omitted_attributtes = [] new_var.setncatts({k: Utils.convert_to_ASCII_if_possible(original_var.getncattr(k)) for k in original_var.ncattrs() if k not in omitted_attributtes}) @staticmethod def copy_dimension(source, destiny, dimension, must_exist=True, new_names=None): """ Copies the given dimension from source to destiny, including dimension variables if present :param new_names: dictionary containing variables to rename and new name as key-value pairs :type new_names: dict :param source: origin file :type source: netCDF4.Dataset :param destiny: destiny file :type destiny: netCDF4.Dataset :param dimension: variable to copy :type dimension: str :param must_exist: if false, does not raise an error uf variable does not exist :type must_exist: bool :return: """ if not must_exist and dimension not in source.dimensions.keys(): return if not new_names: new_names = dict() if dimension in new_names: new_name = new_names[dimension] else: new_name = dimension if new_name in destiny.dimensions.keys(): return if not new_name: new_name = dimension destiny.createDimension(new_name, source.dimensions[dimension].size) if dimension in source.variables: Utils.copy_variable(source, destiny, dimension, new_names=new_names) @staticmethod def concat_variables(source, destiny, remove_source=False): """ Add variables from a nc file to another :param source: path to source file :type source: str :param destiny: path to destiny file :type destiny: str :param remove_source: if True, removes source file :type remove_source: bool """ if os.path.exists(destiny): handler_total = Utils.openCdf(destiny) handler_variable = Utils.openCdf(source) concatenated = dict() for var in handler_variable.variables: if var not in handler_total.variables: Utils.copy_variable(handler_variable, handler_total, var, add_dimensions=True) else: variable = handler_variable.variables[var] if 'time' not in variable.dimensions: continue concatenated[var] = np.concatenate((handler_total.variables[var][:], variable[:]), axis=variable.dimensions.index('time')) for var, array in six.iteritems(concatenated): handler_total.variables[var][:] = array handler_total.close() handler_variable.close() if remove_source: os.remove(source) else: if remove_source: Utils.move_file(source, destiny) else: shutil.copy(source, destiny) Utils.convert2netcdf4(destiny) class ExecutionError(Exception): """ Exception to raise when a command execution fails """ pass @classmethod def _translate(cls, dimensions, new_names): translated = list() for dim in dimensions: if dim in new_names: translated.append(new_names[dim]) else: translated.append(dim) return translated @staticmethod def create_folder_tree(path): """ Createas a fodle path will and parent directories if needed. :param path: folder's path :type path: str """ if not os.path.exists(path): # noinspection PyBroadException try: os.makedirs(path) except Exception: # Here we can have a race condition. Let's check again for existence and rethrow if still not exists if not os.path.isdir(path): raise @staticmethod def give_group_write_permissions(path): st = os.stat(path) if st.st_mode & stat.S_IWGRP: return os.chmod(path, st.st_mode | stat.S_IWGRP) @staticmethod def convert_units(var_handler, new_units): if new_units == var_handler.units: return new_unit = Units(new_units) old_unit = Units(var_handler.units) var_handler[:] = Units.conform(var_handler[:], old_unit, new_unit, inplace=True) if 'valid_min' in var_handler.ncattrs(): var_handler.valid_min = Units.conform(float(var_handler.valid_min), old_unit, new_unit, inplace=True) if 'valid_max' in var_handler.ncattrs(): var_handler.valid_max = Units.conform(float(var_handler.valid_max), old_unit, new_unit, inplace=True) var_handler.units = new_units @staticmethod def untar(files, destiny_path): """ Untar files to a given destiny :param files: files to unzip :type files: list[Any] | Tuple[Any] :param destiny_path: path to destination folder :type destiny_path: str """ for filepath in files: Log.debug('Unpacking {0}', filepath) tar = tarfile.open(filepath) for file_compressed in tar.getmembers(): if file_compressed.isdir(): if os.path.isdir(os.path.join(destiny_path, file_compressed.name)): continue else: if os.path.exists(os.path.join(destiny_path, file_compressed.name)): os.remove(os.path.join(destiny_path, file_compressed.name)) tar.extract(file_compressed, destiny_path) tar.close() @staticmethod def unzip(files, force=False): """ Unzip a list of files :param files: files to unzip :type files: list | str :param force: if True, it will overwrite unzipped files :type force: bool """ # noinspection PyTypeChecker if isinstance(files, six.string_types): files = [files] for filepath in files: Log.debug('Unzipping {0}', filepath) if force: option = ' -f' else: option = '' try: Utils.execute_shell_command('gunzip{1} {0}'.format(filepath, option)) except Exception as ex: raise Utils.UnzipException('Can not unzip {0}: {1}'.format(filepath, ex)) class UnzipException(Exception): """ Excpetion raised when unzip fails """ pass class TempFile(object): """ Class to manage temporal files """ autoclean = True """ If True, new temporary files are added to the list for future cleaning """ files = list() """ List of files to clean automatically """ scratch_folder = '' """ Scratch folder to create temporary files on it """ prefix = 'temp' """ Prefix for temporary filenames """ @staticmethod def get(filename=None, clean=None, suffix='.nc'): """ Gets a new temporal filename, storing it for automated cleaning :param suffix: :param filename: if it is not none, the function will use this filename instead of a random one :type filename: str :param clean: if true, stores filename for cleaning :type clean: bool :return: path to the temporal file :rtype: str """ if clean is None: clean = TempFile.autoclean if filename: path = os.path.join(TempFile.scratch_folder, filename) else: fd, path = tempfile.mkstemp(dir=TempFile.scratch_folder, prefix=TempFile.prefix, suffix=suffix) path = str(path) os.close(fd) if clean: TempFile.files.append(path) return path @staticmethod def clean(): """ Removes all temporary files created with Tempfile until now """ for temp_file in TempFile.files: if os.path.exists(temp_file): os.remove(temp_file) TempFile.files = list()