diff --git a/earthdiagnostics/datafile.py b/earthdiagnostics/datafile.py index a3d0ad22ea89153d8ab96fe6d5d8d80e8de44464..7c903e1e057a4f538b0c060a450ff55ea4eca40b 100644 --- a/earthdiagnostics/datafile.py +++ b/earthdiagnostics/datafile.py @@ -648,11 +648,11 @@ class NetCDFFile(DataFile): Log.debug('Downloading file {0}...', self.remote_file) if not self.local_file: self.local_file = TempFile.get() - Utils.get_file_hash(self.remote_file, use_stored=True, save=True) + #Utils.get_file_hash(self.remote_file, use_stored=True, save=True) try: Utils.copy_file(self.remote_file, self.local_file, retrials=1) except Utils.CopyException: - Utils.get_file_hash(self.remote_file, use_stored=False, save=True) + # Utils.get_file_hash(self.remote_file, use_stored=False, save=True) Utils.copy_file(self.remote_file, self.local_file, retrials=2) if self.data_convention == 'meteofrance': diff --git a/earthdiagnostics/ocean/regionmean.py b/earthdiagnostics/ocean/regionmean.py index c70c950085fcef2dc3072608baebe6c67a5b348a..89a14fbcf5da07a944e90b71c05b04da88a75f9f 100644 --- a/earthdiagnostics/ocean/regionmean.py +++ b/earthdiagnostics/ocean/regionmean.py @@ -223,6 +223,7 @@ class RegionMean(Diagnostic): var_name='j'), dims - 2) return cube + def _load_data(self): coords = [] handler = Utils.open_cdf(self.variable_file.local_file) diff --git a/earthdiagnostics/ocean/siasiesiv.py b/earthdiagnostics/ocean/siasiesiv.py index f623f48361126d85c4c5e1894c98ac4f03bbd681..94c65d3c98dcc1039169474af7f6f0c8dc034904 100644 --- a/earthdiagnostics/ocean/siasiesiv.py +++ b/earthdiagnostics/ocean/siasiesiv.py @@ -70,6 +70,9 @@ class Siasiesiv(Diagnostic): 'Basins: {1} Omit volume: {0.omit_volume}'.format(self, ','.join(str(basin) for basin in self.masks.keys())) + def __hash__(self): + return hash(str(self)) + @classmethod def generate_jobs(cls, diags, options): """ @@ -140,8 +143,10 @@ class Siasiesiv(Diagnostic): self.sic.local_file, self.sic_varname ) sic = iris.load_cube(self.sic.local_file) - if sic.units.origin == '%' and sic.data.max() < 2: - sic.units = '1.0' + #if sic.units.origin == '%' and sic.data.max() < 2: + # sic.units = '1.0' + if sic.units.origin is not '%': + sic.convert_units('%') sic_slices = [] for sic_data in sic.slices_over('time'): @@ -150,8 +155,17 @@ class Siasiesiv(Diagnostic): mesh = Nemo('mesh_hgr.nc', 'mask_regions.nc') areacello = mesh.get_areacello(cell_point='T') gphit = mesh.get_grid_latitude(cell_point='T') - self.results['siextentn'], self.results['siextents'], self.results['siarean'], self.results['siareas'] = \ - siasie.compute(gphit, areacello, sic_slices, self.masks) + + if not self.omit_volume: + sit = iris.load_cube(self.sit.local_file) + sit_slices = [] + for sit_data in sit.slices_over('time'): + sit_data.data = np.ma.filled(sit_data.data, 0.0).astype(np.float32) + sit_slices.append(sit_data) + self.results['siextentn'], self.results['siextents'], self.results['siarean'], self.results['siareas'], self.results['sivoln'], self.results['sivols'] = siasie.compute(gphit, areacello, sic_slices, self.masks, sit_slices) + else: + self.results['siextentn'], self.results['siextents'], self.results['siarean'], self.results['siareas'] = siasie.compute(gphit, areacello, sic_slices, self.masks, None) + self.save() @@ -166,6 +180,7 @@ class Siasiesiv(Diagnostic): ' '.join(set(coordinates) | add_coordinates) handler.close() + def save(self): for var in self.results.keys(): res = self.results[var] @@ -175,15 +190,18 @@ class Siasiesiv(Diagnostic): Utils.copy_variable(handler_source, handler_temp, 'time', True, True) handler_temp.createDimension('region', len(self.masks)) handler_temp.createDimension('region_length', 50) - var_region = handler_temp.createVariable( - 'region', 'S1', ('region', 'region_length') - ) - var_res = handler_temp.createVariable( - '{0}'.format(var), float, ('time', 'region',) - ) - var_res.units = 'm^2' + var_region = handler_temp.createVariable('region', 'S1', + ('region', 'region_length')) + var_res = handler_temp.createVariable('{0}'.format(var), float, + ('time', 'region',)) + if var in ('sivoln', 'sivols'): + var_res.units = 'm^3' + else: + var_res.units = 'm^2' for i, basin in enumerate(self.masks): var_region[i, ...] = netCDF4.stringtoarr(str(basin), 50) var_res[..., i] = res[i, ...] handler_temp.close() self.generated[var].set_local_file(temp, diagnostic=self) + + diff --git a/earthdiagnostics/utils.py b/earthdiagnostics/utils.py index c08c1f08b84e1cf27fbcdce11e6e8dc3bde99274..0920a3657014f9d13d2af41754ff39e6537d524d 100644 --- a/earthdiagnostics/utils.py +++ b/earthdiagnostics/utils.py @@ -328,21 +328,21 @@ class Utils(object): # This can be due to a race condition. If directory already exists, we don have to do nothing if not os.path.exists(dirname_path): raise ex - hash_destiny = None - Log.debug('Hashing original file... {0}', datetime.datetime.now()) - hash_original = Utils.get_file_hash(source, use_stored=use_stored_hash) - - if retrials < 1: - retrials = 1 - - while hash_original != hash_destiny: - if retrials == 0: - raise Utils.CopyException('Can not copy {0} to {1}'.format(source, destiny)) - Log.debug('Copying... {0}', datetime.datetime.now()) - shutil.copyfile(source, destiny) - Log.debug('Hashing copy ... {0}', datetime.datetime.now()) - hash_destiny = Utils.get_file_hash(destiny, save=save_hash) - retrials -= 1 + #hash_destiny = None + #Log.debug('Hashing original file... {0}', datetime.datetime.now()) + #hash_original = Utils.get_file_hash(source, use_stored=use_stored_hash) + + # if retrials < 1: + # retrials = 1 + + # while hash_original != hash_destiny: + # if retrials == 0: + # raise Utils.CopyException('Can not copy {0} to {1}'.format(source, destiny)) + Log.debug('Copying... {0}', datetime.datetime.now()) + shutil.copyfile(source, destiny) + # Log.debug('Hashing copy ... {0}', datetime.datetime.now()) + #hash_destiny = Utils.get_file_hash(destiny, save=save_hash) + # retrials -= 1 Log.debug('Finished {0}', datetime.datetime.now()) @staticmethod @@ -426,8 +426,8 @@ class Utils(object): time.sleep(2) shutil.rmtree(source) - @staticmethod - def get_file_hash(filepath, use_stored=False, save=False): + # @staticmethod + # def get_file_hash(filepath, use_stored=False, save=False): """ Get the xxHash hash for a given file @@ -439,33 +439,33 @@ class Utils(object): save: bool, optional If True, saves the hash to a file """ - if use_stored: - hash_file = Utils._get_hash_filename(filepath) - if os.path.isfile(hash_file): - hash_value = open(hash_file, 'r').readline() - return hash_value - - blocksize = 104857600 - hasher = xxhash.xxh64() - with open(filepath, 'rb') as afile: - buf = afile.read(blocksize) - while len(buf) > 0: - hasher.update(buf) - buf = afile.read(blocksize) - hash_value = hasher.hexdigest() - if save: - hash_file = open(Utils._get_hash_filename(filepath), 'w') - hash_file.write(hash_value) - hash_file.close() - - return hash_value - - @staticmethod - def _get_hash_filename(filepath): - folder = os.path.dirname(filepath) - filename = os.path.basename(filepath) - hash_file = os.path.join(folder, '.{0}.xxhash64.hash'.format(filename)) - return hash_file + # if use_stored: + # hash_file = Utils._get_hash_filename(filepath) + # if os.path.isfile(hash_file): + # hash_value = open(hash_file, 'r').readline() + # return hash_value + + # blocksize = 104857600 + # hasher = xxhash.xxh64() + # with open(filepath, 'rb') as afile: + # buf = afile.read(blocksize) + # while len(buf) > 0: + # hasher.update(buf) + # buf = afile.read(blocksize) + # hash_value = hasher.hexdigest() + # if save: + # hash_file = open(Utils._get_hash_filename(filepath), 'w') + # hash_file.write(hash_value) + # hash_file.close() + + # return hash_value + + # @staticmethod + # def _get_hash_filename(filepath): + # folder = os.path.dirname(filepath) + # filename = os.path.basename(filepath) + # hash_file = os.path.join(folder, '.{0}.xxhash64.hash'.format(filename)) + # return hash_file @staticmethod def execute_shell_command(command, log_level=Log.DEBUG):