From 9db135dde43ba543b272c34421c67b6687ed8fbe Mon Sep 17 00:00:00 2001 From: Johanna Gehlen Date: Tue, 4 Feb 2025 15:39:25 +0100 Subject: [PATCH 1/5] function to convert longitudes from [0, 360] to [-180, 180] --- nes/nc_projections/default_nes.py | 207 ++++++++++++++++++++++-------- 1 file changed, 157 insertions(+), 50 deletions(-) diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py index a5533e4..5a14f10 100644 --- a/nes/nc_projections/default_nes.py +++ b/nes/nc_projections/default_nes.py @@ -1,11 +1,12 @@ #!/usr/bin/env python -import sys +import os, sys from gc import collect from warnings import warn +from math import isclose from numpy import (array, ndarray, abs, mean, diff, dstack, append, tile, empty, unique, stack, vstack, full, isnan, flipud, nan, float32, float64, ma, generic, character, issubdtype, arange, newaxis, concatenate, - split, cumsum, zeros, column_stack) + split, cumsum, zeros, column_stack, argsort, take) from pandas import Index, concat from geopandas import GeoDataFrame from datetime import timedelta, datetime @@ -206,7 +207,7 @@ class Nes(object): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : List[datetime] or None List of times to substitute the current ones while creation. @@ -301,7 +302,7 @@ class Nes(object): # Lazy variables self.variables = self._get_lazy_variables() - + # Complete dimensions self._full_time = self.__get_time() self._full_time_bnds = self.__get_time_bnds() @@ -373,7 +374,7 @@ class Nes(object): @staticmethod def new(comm=None, path=None, info=False, dataset=None, parallel_method="Y", - avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ Initialize the Nes class. @@ -402,7 +403,7 @@ class Nes(object): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : List[datetime] or None List of times to substitute the current ones while creation. @@ -422,13 +423,13 @@ class Nes(object): ------- int Max length of the string data - """ - + """ + if "strlen" in self.dataset.dimensions: strlen = self.dataset.dimensions["strlen"].size else: return None - + return strlen def set_strlen(self, strlen=75): @@ -442,7 +443,7 @@ class Nes(object): strlen : int or None Max length of the string """ - + self.strlen = strlen return None @@ -960,7 +961,7 @@ class Nes(object): self.read_axis_limits = self._get_read_axis_limits() self.write_axis_limits = self._get_write_axis_limits() - + return None def set_climatology(self, is_climatology): @@ -1098,11 +1099,11 @@ class Nes(object): ---------- coordinates : array Coordinates in degrees (latitude or longitude). - inc : float + inc : float Increment between centre values. - spatial_nv : int + spatial_nv : int Non-mandatory parameter that informs the number of vertices that the boundaries must have. Default: 2. - inverse : bool + inverse : bool For some grid latitudes. Returns @@ -1110,7 +1111,7 @@ class Nes(object): bounds : array An Array with as many elements as vertices for each value of coords. """ - + # Create new arrays moving the centres half increment less and more. coords_left = coordinates - inc / 2 coords_right = coordinates + inc / 2 @@ -1300,7 +1301,7 @@ class Nes(object): aux_nessy.variables[var_name][att_name] = att_value else: aux_nessy.variables[var_name]["data"] = aux_nessy.variables[var_name]["data"][[idx_time]] - + return aux_nessy def sel(self, hours_start=None, time_min=None, hours_end=None, time_max=None, lev_min=None, lev_max=None, @@ -1450,7 +1451,7 @@ class Nes(object): self.lon_min = None return None - + def _get_projection_data(self, create_nes, **kwargs): """ Retrieves projection data based on grid details. @@ -1804,7 +1805,7 @@ class Nes(object): # ================================================================================================================== # Reading # ================================================================================================================== - + def _get_read_axis_limits(self): """ Calculate the 4D reading axis limits depending on if them have to balanced or not. @@ -1836,7 +1837,7 @@ class Nes(object): "y_min": None, "y_max": None, "z_min": None, "z_max": None, "t_min": None, "t_max": None} - + idx = self._get_idx_intervals() if self.parallel_method == "Y": y_len = idx["idx_y_max"] - idx["idx_y_min"] @@ -1855,7 +1856,7 @@ class Nes(object): axis_limits["t_min"] = idx["idx_t_min"] axis_limits["t_max"] = idx["idx_t_max"] - + elif self.parallel_method == "X": x_len = idx["idx_x_max"] - idx["idx_x_min"] if x_len < self.size: @@ -1873,7 +1874,7 @@ class Nes(object): axis_limits["t_min"] = idx["idx_t_min"] axis_limits["t_max"] = idx["idx_t_max"] - + elif self.parallel_method == "T": t_len = idx["idx_t_max"] - idx["idx_t_min"] if t_len < self.size: @@ -2616,10 +2617,10 @@ class Nes(object): else: raise NotImplementedError("Error with {0}. Only can be read netCDF with 4 dimensions or less".format( var_name)) - + # Unmask array data = self._unmask_array(data) - + return data def load(self, var_list=None): @@ -2633,7 +2634,7 @@ class Nes(object): var_list : List, str, None List (or single string) of the variables to be loaded. """ - + if (self.__ini_path is None) and (self.dataset is None): raise RuntimeError("Only data from existing files can be loaded.") @@ -2674,7 +2675,7 @@ class Nes(object): @staticmethod def _unmask_array(data): - """ + """ Missing to nan. This operation is done because sometimes the missing value is lost during the calculation. Parameters @@ -2687,7 +2688,7 @@ class Nes(object): array Unmasked array. """ - + if isinstance(data, ma.MaskedArray): try: data = data.filled(nan) @@ -2706,7 +2707,7 @@ class Nes(object): data_type : str or Type Data type, by default "float32" """ - + for var_name, var_info in self.variables.items(): if isinstance(var_info["data"], ndarray): self.variables[var_name]["data"] = self.variables[var_name]["data"].astype(data_type) @@ -2778,7 +2779,7 @@ class Nes(object): # ================================================================================================================== # Writing # ================================================================================================================== - + def _get_write_axis_limits(self): """ Calculate the 4D writing axis limits depending on if them have to balanced or not. @@ -3225,26 +3226,26 @@ class Nes(object): var_dtype = var_dict["data"].dtype if var_dtype is object: raise TypeError("Data dtype is object. Define dtype explicitly as dictionary key 'dtype'") - + if var_dict["data"] is not None: - + # Ensure data is of type numpy array (to create NES) if not isinstance(var_dict["data"], (ndarray, generic)): try: var_dict["data"] = array(var_dict["data"]) except AttributeError: raise AttributeError("Data for variable {0} must be a numpy array.".format(var_name)) - + # Convert list of strings to chars for parallelization if issubdtype(var_dtype, character): var_dict["data_aux"] = self._str2char(var_dict["data"]) var_dims += ("strlen",) var_dtype = "S1" - + if self.info: print("Rank {0:03d}: Writing {1} var ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) - + if not chunking: var = netcdf.createVariable(var_name, var_dtype, var_dims, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) @@ -3280,7 +3281,7 @@ class Nes(object): self.write_axis_limits["z_min"]:self.write_axis_limits["z_max"], self.write_axis_limits["y_min"]:self.write_axis_limits["y_max"], self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"]] = 0 - + elif len(att_value.shape) == 5: if "strlen" in var_dims: var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], @@ -3304,11 +3305,11 @@ class Nes(object): :] = att_value else: raise NotImplementedError("It is not possible to write 3D variables.") - + if self.info: print("Rank {0:03d}: Var {1} data ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) - + elif att_name not in ["chunk_size", "var_dims", "dimensions", "dtype", "data_aux"]: var.setncattr(att_name, att_value) @@ -3702,7 +3703,7 @@ class Nes(object): shapefile : GeoPandasDataFrame Shapefile dataframe. """ - + if self.shapefile is None: if self.lat_bnds is None or self.lon_bnds is None: @@ -3743,14 +3744,14 @@ class Nes(object): fids = self.get_fids() gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=geometry, crs="EPSG:4326") self.shapefile = gdf - + else: gdf = self.shapefile - + return gdf def write_shapefile(self, path): - """ + """ Save spatial GeoDataFrame (shapefile). Parameters @@ -3758,10 +3759,10 @@ class Nes(object): path : str Path to the output file. """ - + if self.shapefile is None: raise ValueError("Shapefile was not created.") - + if self.size == 1: # In serial, avoid gather self.shapefile.to_file(path) @@ -3771,13 +3772,13 @@ class Nes(object): if self.master: data = concat(data) data.to_file(path) - + return None def to_shapefile(self, path, time=None, lev=None, var_list=None, info=True): """ Create shapefile from NES data. - + 1. Create grid shapefile. 2. Add variables to shapefile (as independent function). 3. Write shapefile. @@ -3795,7 +3796,7 @@ class Nes(object): info: bool Flag to allow/suppress warnings when the 'time' or 'lev' parameters are None. Default is True. """ - + # If list is not defined, get all variables if var_list is None: var_list = list(self.variables.keys()) @@ -3835,7 +3836,7 @@ class Nes(object): if time not in self.time: raise ValueError("Time {} is not available. Choose from {}".format(time, self.time)) idx_time = self.time.index(time) - + # Create shapefile self.create_shapefile() @@ -3880,7 +3881,7 @@ class Nes(object): for lon_ind in range(0, len(self.lon["data"])): centroids.append(Point(self.lon["data"][lon_ind], self.lat["data"][lat_ind])) - + # Create dataframe containing all points fids = self.get_fids() centroids_gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=centroids, crs="EPSG:4326") @@ -4155,9 +4156,9 @@ class Nes(object): keep_nan : bool Indicates if you want to keep nan values after the interpolation """ - + return horizontal_interpolation.interpolate_horizontal( - self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info, + self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info, to_providentia=to_providentia, only_create_wm=only_create_wm, wm=wm, flux=flux, keep_nan=keep_nan) def spatial_join(self, ext_shp, method=None, var_list=None, info=False, apply_bbox=True): @@ -4199,7 +4200,7 @@ class Nes(object): self.cell_measures["cell_area"] = {"data": grid_area} else: grid_area = self.cell_measures["cell_area"]["data"] - + return grid_area @staticmethod @@ -4264,3 +4265,109 @@ class Nes(object): Dictionary with data of grid edge longitudes. """ raise NotImplementedError("create_providentia_exp_grid_edge_coordinates function is not implemented by default") + + def _detect_longitude_format(self): + """ + Determines whether longitude values are in the [0, 360] or [-180, 180] range. + + Returns + --------- + bool: True if longitudes are in [0, 360], False otherwise. + """ + longitudes = self.lon["data"] + longitudes = array(longitudes) + if all((longitudes >= 0) & (longitudes <= 360)): + return True + elif all((longitudes >= -180) & (longitudes <= 180)): + return False + else: + return False + + def _check_if_data_is_loaded(self): + """ + Verifies that data is loaded for all variables. + + Raises + ------- + ValueError: If any variable's data is missing. + """ + # check if data is loaded + for variable in self.variables.keys(): + if self.variables[variable]["data"] is None: + raise ValueError(f"All variables data must be loaded before using this function. Data for {variable} is not loaded.") + + def convert_lon_360_to_180(self, path): + """ + Converts longitudes from the [0, 360] range to the [-180, 180] range. + + Parameters + ------------ + path (str): The file path where the converted data will be saved. + + Raises + -------- + ValueError: If the method is run in parallel processing mode. + ValueError: If longitudes are already in [-180, 180] format or an unrecognized format. + ValueError: If data is not fully loaded before conversion. + """ + # make this only available in serial. + if self.comm.size > 1: + raise ValueError("This method is currently only available in serial.") + + # check if the longitude flip is needed + if not self._detect_longitude_format(self): + raise ValueError("Longitudes are already in [-180, 180] format or another unrecognised format.") + + # check if data is loaded + self._check_if_data_is_loaded(self) + + # make a copy + dst = deepcopy(self) + + # copy global attributes and dimensions + dst.global_attrs = self.global_attrs + dst.lat = self.lat + dst.lon = self.lon + dst.time = self.time + dst.lat_bnds = self.lat_bnds + dst.lon_bnds = self.lon_bnds + dst.lev = self.lev + + # adjust longitude values + lon = self.lon + lon_data = lon["data"] + lon_data = lon_data % 360 + lon_data[lon_data > 180] -= 360 + sorted_indices = argsort(lon_data) + dst.lon["data"] = lon_data[sorted_indices] + dst.set_full_longitudes(dst.lon) + + # adjust longitude bounds + lon_bnds_data = dst.lon_bnds["data"] + lon_bnds_data = lon_bnds_data % 360 + lon_bnds_data[lon_bnds_data > 180] -= 360 + lon_bnds_sorted = lon_bnds_data[sorted_indices] + + if (lon_bnds_sorted[0][0] > lon_bnds_sorted[0][1]) and (isclose(lon_bnds_sorted[0][0], 180)): + lon_bnds_sorted[0][0] = -180 + elif (lon_bnds_sorted[-1][0] > lon_bnds_sorted[-1][1]) and (isclose(lon_bnds_sorted[-1][1], -180)): + lon_bnds_sorted[-1][1] = 180 + + dst.lon_bnds["data"] = lon_bnds_sorted + dst.set_full_longitudes_boundaries(dst.lon_bnds) + + # copy and adjust remaining variables + for name, var in dst.variables.items(): + dst.load(name) # TODO: is it necessary? + dst.variables[name] = var + if "longitude" in var["dimensions"]: + data = var["data"] + reordered_data = take(data, sorted_indices, axis=3) + dst.variables[name]["data"] = reordered_data + else: + dst.variables[name] = var + + # save the file + tmp_file_path = "tmp.nc" + dst.to_netcdf(tmp_file_path) + os.path.replace(tmp_file_path, path) -- GitLab From 6dff6b32717607fd0e6e48e1e401e2797cd77ca4 Mon Sep 17 00:00:00 2001 From: Johanna Gehlen Date: Tue, 4 Feb 2025 16:55:24 +0100 Subject: [PATCH 2/5] change function name --- nes/nc_projections/default_nes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py index 5a14f10..5faf81e 100644 --- a/nes/nc_projections/default_nes.py +++ b/nes/nc_projections/default_nes.py @@ -4296,7 +4296,7 @@ class Nes(object): if self.variables[variable]["data"] is None: raise ValueError(f"All variables data must be loaded before using this function. Data for {variable} is not loaded.") - def convert_lon_360_to_180(self, path): + def convert_longitudes(self, path): """ Converts longitudes from the [0, 360] range to the [-180, 180] range. -- GitLab From e9072baad1d5fdd47d2e4d3f5d3bd12ce08a146e Mon Sep 17 00:00:00 2001 From: Johanna Gehlen Date: Tue, 4 Feb 2025 17:14:19 +0100 Subject: [PATCH 3/5] add to init file --- nes/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nes/__init__.py b/nes/__init__.py index 1dcabe9..be4838e 100644 --- a/nes/__init__.py +++ b/nes/__init__.py @@ -1,7 +1,7 @@ __date__ = "2024-10-07" __version__ = "1.1.8" __all__ = [ - 'open_netcdf', 'concatenate_netcdfs', 'create_nes', 'from_shapefile', 'calculate_geometry_area', 'Nes', 'LatLonNes', + 'open_netcdf', 'convert_longitudes', 'concatenate_netcdfs', 'create_nes', 'from_shapefile', 'calculate_geometry_area', 'Nes', 'LatLonNes', 'LCCNes', 'RotatedNes', 'RotatedNestedNes', 'MercatorNes', 'PointsNesProvidentia', 'PointsNesGHOST', 'PointsNes' ] -- GitLab From c1e8f85e890d5700c8580582d70ea19687618957 Mon Sep 17 00:00:00 2001 From: Johanna Gehlen Date: Tue, 4 Feb 2025 17:15:54 +0100 Subject: [PATCH 4/5] nvm --- nes/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nes/__init__.py b/nes/__init__.py index be4838e..1dcabe9 100644 --- a/nes/__init__.py +++ b/nes/__init__.py @@ -1,7 +1,7 @@ __date__ = "2024-10-07" __version__ = "1.1.8" __all__ = [ - 'open_netcdf', 'convert_longitudes', 'concatenate_netcdfs', 'create_nes', 'from_shapefile', 'calculate_geometry_area', 'Nes', 'LatLonNes', + 'open_netcdf', 'concatenate_netcdfs', 'create_nes', 'from_shapefile', 'calculate_geometry_area', 'Nes', 'LatLonNes', 'LCCNes', 'RotatedNes', 'RotatedNestedNes', 'MercatorNes', 'PointsNesProvidentia', 'PointsNesGHOST', 'PointsNes' ] -- GitLab From d00663fa4243b81c1984c85795245f01df72c4b4 Mon Sep 17 00:00:00 2001 From: Johanna Gehlen Date: Tue, 4 Feb 2025 17:35:55 +0100 Subject: [PATCH 5/5] comment out the var.coordinates attribute to avoid the ncview warning --- nes/nc_projections/latlon_nes.py | 12 +++++----- nes/nc_projections/lcc_nes.py | 32 ++++++++++++------------- nes/nc_projections/mercator_nes.py | 38 +++++++++++++++--------------- nes/nc_projections/points_nes.py | 12 +++++----- nes/nc_projections/rotated_nes.py | 26 ++++++++++---------- 5 files changed, 60 insertions(+), 60 deletions(-) diff --git a/nes/nc_projections/latlon_nes.py b/nes/nc_projections/latlon_nes.py index 35d68c8..d7902f8 100644 --- a/nes/nc_projections/latlon_nes.py +++ b/nes/nc_projections/latlon_nes.py @@ -50,12 +50,12 @@ class LatLonNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. """ - + super(LatLonNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, parallel_method=parallel_method, balanced=balanced, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -78,7 +78,7 @@ class LatLonNes(Nes): @staticmethod def new(comm=None, path=None, info=False, dataset=None, parallel_method="Y", - avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ Initialize the Nes class. @@ -107,14 +107,14 @@ class LatLonNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. """ new = LatLonNes(comm=comm, path=path, info=info, dataset=dataset, - parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=create_nes, balanced=balanced, times=times, **kwargs) @@ -352,7 +352,7 @@ class LatLonNes(Nes): """ var.grid_mapping = "crs" - var.coordinates = "lat lon" + # var.coordinates = "lat lon" return None diff --git a/nes/nc_projections/lcc_nes.py b/nes/nc_projections/lcc_nes.py index f9eda6e..2aea912 100644 --- a/nes/nc_projections/lcc_nes.py +++ b/nes/nc_projections/lcc_nes.py @@ -63,7 +63,7 @@ class LCCNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -89,7 +89,7 @@ class LCCNes(Nes): # Dimensions screening self.y = self._get_coordinate_values(self.get_full_y(), "Y") self.x = self._get_coordinate_values(self.get_full_x(), "X") - + # Set axis limits for parallel writing self.write_axis_limits = self._get_write_axis_limits() @@ -101,7 +101,7 @@ class LCCNes(Nes): @staticmethod def new(comm=None, path=None, info=False, dataset=None, parallel_method="Y", - avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ Initialize the Nes class. @@ -130,7 +130,7 @@ class LCCNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -261,7 +261,7 @@ class LCCNes(Nes): ) return projection - + def _get_projection_data(self, create_nes, **kwargs): """ Retrieves projection data based on grid details. @@ -467,12 +467,12 @@ class LCCNes(Nes): # Get edges for regular coordinates grid_edge_lon_data, grid_edge_lat_data = self.projection(x_grid_edge, y_grid_edge, inverse=True) - + # Create grid outline by stacking the edges in both coordinates model_grid_outline = vstack((grid_edge_lon_data, grid_edge_lat_data)).T grid_edge_lat = {"data": model_grid_outline[:, 1]} grid_edge_lon = {"data": model_grid_outline[:, 0]} - + return grid_edge_lat, grid_edge_lon # noinspection DuplicatedCode @@ -507,7 +507,7 @@ class LCCNes(Nes): :]} return None - + @staticmethod def _set_var_crs(var): """ @@ -520,7 +520,7 @@ class LCCNes(Nes): """ var.grid_mapping = "Lambert_Conformal" - var.coordinates = "lat lon" + # var.coordinates = "lat lon" return None @@ -567,13 +567,13 @@ class LCCNes(Nes): def create_shapefile(self): """ Create spatial GeoDataFrame (shapefile). - + Returns ------- shapefile : GeoPandasDataFrame Shapefile dataframe. """ - + if self.shapefile is None: # Get latitude and longitude cell boundaries @@ -594,7 +594,7 @@ class LCCNes(Nes): (aux_b_lon[i, 2], aux_b_lat[i, 2]), (aux_b_lon[i, 3], aux_b_lat[i, 3]), (aux_b_lon[i, 0], aux_b_lat[i, 0])])) - + # Create dataframe containing all polygons fids = self.get_fids() gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=geometry, crs="EPSG:4326") @@ -602,7 +602,7 @@ class LCCNes(Nes): else: gdf = self.shapefile - + return gdf # noinspection DuplicatedCode @@ -615,14 +615,14 @@ class LCCNes(Nes): centroids_gdf: GeoPandasDataFrame Centroids dataframe. """ - + # Get centroids from coordinates centroids = [] for lat_ind in range(0, self.lon["data"].shape[0]): for lon_ind in range(0, self.lon["data"].shape[1]): - centroids.append(Point(self.lon["data"][lat_ind, lon_ind], + centroids.append(Point(self.lon["data"][lat_ind, lon_ind], self.lat["data"][lat_ind, lon_ind])) - + # Create dataframe containing all points fids = self.get_fids() centroids_gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=centroids, crs="EPSG:4326") diff --git a/nes/nc_projections/mercator_nes.py b/nes/nc_projections/mercator_nes.py index 520f9bb..ef63cb7 100644 --- a/nes/nc_projections/mercator_nes.py +++ b/nes/nc_projections/mercator_nes.py @@ -57,7 +57,7 @@ class MercatorNes(Nes): avoid_last_hours : int Number of hours to remove from last time steps. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. first_level : int Index of the first level to use. @@ -86,7 +86,7 @@ class MercatorNes(Nes): # Complete dimensions self._full_y = self._get_coordinate_dimension("y") self._full_x = self._get_coordinate_dimension("x") - + # Dimensions screening self.y = self._get_coordinate_values(self.get_full_y(), "Y") self.x = self._get_coordinate_values(self.get_full_x(), "X") @@ -131,7 +131,7 @@ class MercatorNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -252,7 +252,7 @@ class MercatorNes(Nes): b=self.earth_radius[0], lat_ts=float64(self.projection_data["standard_parallel"]), lon_0=float64(self.projection_data["longitude_of_projection_origin"]),) - + return projection # noinspection DuplicatedCode @@ -449,12 +449,12 @@ class MercatorNes(Nes): # Get edges for regular coordinates grid_edge_lon_data, grid_edge_lat_data = self.projection(x_grid_edge, y_grid_edge, inverse=True) - + # Create grid outline by stacking the edges in both coordinates model_grid_outline = vstack((grid_edge_lon_data, grid_edge_lat_data)).T grid_edge_lat = {"data": model_grid_outline[:, 1]} grid_edge_lon = {"data": model_grid_outline[:, 0]} - + return grid_edge_lat, grid_edge_lon # noinspection DuplicatedCode @@ -480,15 +480,15 @@ class MercatorNes(Nes): # Obtain regular coordinates bounds self.set_full_latitudes_boundaries({"data": deepcopy(lat_bnds)}) self.lat_bnds = {"data": lat_bnds[self.write_axis_limits["y_min"]:self.write_axis_limits["y_max"], - self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"], + self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"], :]} self.set_full_longitudes_boundaries({"data": deepcopy(lon_bnds)}) self.lon_bnds = {"data": lon_bnds[self.write_axis_limits["y_min"]:self.write_axis_limits["y_max"], - self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"], + self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"], :]} return None - + @staticmethod def _set_var_crs(var): """ @@ -499,9 +499,9 @@ class MercatorNes(Nes): var : Variable netCDF4-python variable object. """ - + var.grid_mapping = "mercator" - var.coordinates = "lat lon" + # var.coordinates = "lat lon" return None @@ -520,7 +520,7 @@ class MercatorNes(Nes): mapping.grid_mapping_name = self.projection_data["grid_mapping_name"] mapping.standard_parallel = self.projection_data["standard_parallel"] mapping.longitude_of_projection_origin = self.projection_data["longitude_of_projection_origin"] - + return None def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): @@ -540,7 +540,7 @@ class MercatorNes(Nes): info : bool Indicates if you want to print extra information during the process. """ - + raise NotImplementedError("Grib2 format cannot be written in a Mercator projection.") # noinspection DuplicatedCode @@ -555,7 +555,7 @@ class MercatorNes(Nes): """ if self.shapefile is None: - + # Get latitude and longitude cell boundaries if self.lat_bnds is None or self.lon_bnds is None: self.create_spatial_bounds() @@ -574,7 +574,7 @@ class MercatorNes(Nes): (aux_b_lon[i, 2], aux_b_lat[i, 2]), (aux_b_lon[i, 3], aux_b_lat[i, 3]), (aux_b_lon[i, 0], aux_b_lat[i, 0])])) - + # Create dataframe containing all polygons fids = self.get_fids() gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=geometry, crs="EPSG:4326") @@ -582,7 +582,7 @@ class MercatorNes(Nes): else: gdf = self.shapefile - + return gdf # noinspection DuplicatedCode @@ -595,14 +595,14 @@ class MercatorNes(Nes): centroids_gdf: GeoPandasDataFrame Centroids dataframe. """ - + # Get centroids from coordinates centroids = [] for lat_ind in range(0, self.lon["data"].shape[0]): for lon_ind in range(0, self.lon["data"].shape[1]): - centroids.append(Point(self.lon["data"][lat_ind, lon_ind], + centroids.append(Point(self.lon["data"][lat_ind, lon_ind], self.lat["data"][lat_ind, lon_ind])) - + # Create dataframe containing all points fids = self.get_fids() centroids_gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=centroids, crs="EPSG:4326") diff --git a/nes/nc_projections/points_nes.py b/nes/nc_projections/points_nes.py index 29022b5..c63e9d5 100644 --- a/nes/nc_projections/points_nes.py +++ b/nes/nc_projections/points_nes.py @@ -62,7 +62,7 @@ class PointsNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -123,7 +123,7 @@ class PointsNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -466,12 +466,12 @@ class PointsNes(Nes): var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"]] = att_value except IndexError: - out_shape = var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], + out_shape = var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"]].shape raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( out_shape, att_value.shape)) except ValueError: - out_shape = var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], + out_shape = var[self.write_axis_limits["t_min"]:self.write_axis_limits["t_max"], self.write_axis_limits["x_min"]:self.write_axis_limits["x_max"]].shape raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( out_shape, att_value.shape)) @@ -596,7 +596,7 @@ class PointsNes(Nes): def to_providentia(self, model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat): """ Transform a PointsNes into a PointsNesProvidentia object - + Returns ---------- points_nes_providentia : nes.Nes @@ -750,6 +750,6 @@ class PointsNes(Nes): var : Variable netCDF4-python variable object. """ - var.coordinates = "lat lon" + # var.coordinates = "lat lon" return None diff --git a/nes/nc_projections/rotated_nes.py b/nes/nc_projections/rotated_nes.py index c5c3794..e1647ea 100644 --- a/nes/nc_projections/rotated_nes.py +++ b/nes/nc_projections/rotated_nes.py @@ -65,7 +65,7 @@ class RotatedNes(Nes): create_nes : bool Indicates if you want to create the object from scratch (True) or through an existing file. balanced : bool - Indicates if you want a balanced parallelization or not. + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. times : list, None List of times to substitute the current ones while creation. @@ -260,7 +260,7 @@ class RotatedNes(Nes): o_lat_p=float64(self.projection_data["grid_north_pole_latitude"]), o_lon_p=float64(self.projection_data["grid_north_pole_longitude"]), ) - + return projection # noinspection DuplicatedCode @@ -532,13 +532,13 @@ class RotatedNes(Nes): # Get edges for regular coordinates grid_edge_lon_data, grid_edge_lat_data = self.rotated2latlon(rlon_grid_edge, rlat_grid_edge) - + # Create grid outline by stacking the edges in both coordinates model_grid_outline = vstack((grid_edge_lon_data, grid_edge_lat_data)).T grid_edge_lat = {"data": model_grid_outline[:, 1]} grid_edge_lon = {"data": model_grid_outline[:, 0]} - + return grid_edge_lat, grid_edge_lon # noinspection DuplicatedCode @@ -586,7 +586,7 @@ class RotatedNes(Nes): """ var.grid_mapping = "rotated_pole" - var.coordinates = "lat lon" + # var.coordinates = "lat lon" return None @@ -645,11 +645,11 @@ class RotatedNes(Nes): self.create_spatial_bounds() # Reshape arrays to create geometry - aux_b_lats = self.lat_bnds["data"].reshape((self.lat_bnds["data"].shape[0] * self.lat_bnds["data"].shape[1], + aux_b_lats = self.lat_bnds["data"].reshape((self.lat_bnds["data"].shape[0] * self.lat_bnds["data"].shape[1], self.lat_bnds["data"].shape[2])) - aux_b_lons = self.lon_bnds["data"].reshape((self.lon_bnds["data"].shape[0] * self.lon_bnds["data"].shape[1], + aux_b_lons = self.lon_bnds["data"].reshape((self.lon_bnds["data"].shape[0] * self.lon_bnds["data"].shape[1], self.lon_bnds["data"].shape[2])) - + # Get polygons from bounds geometry = [] for i in range(aux_b_lons.shape[0]): @@ -658,7 +658,7 @@ class RotatedNes(Nes): (aux_b_lons[i, 2], aux_b_lats[i, 2]), (aux_b_lons[i, 3], aux_b_lats[i, 3]), (aux_b_lons[i, 0], aux_b_lats[i, 0])])) - + # Create dataframe cointaining all polygons fids = self.get_fids() gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=geometry, crs="EPSG:4326") @@ -679,16 +679,16 @@ class RotatedNes(Nes): centroids_gdf: GeoPandasDataFrame Centroids dataframe. """ - + # Get centroids from coordinates centroids = [] for lat_ind in range(0, self.lon["data"].shape[0]): for lon_ind in range(0, self.lon["data"].shape[1]): - centroids.append(Point(self.lon["data"][lat_ind, lon_ind], + centroids.append(Point(self.lon["data"][lat_ind, lon_ind], self.lat["data"][lat_ind, lon_ind])) - + # Create dataframe cointaining all points fids = self.get_fids() centroids_gdf = GeoDataFrame(index=Index(name="FID", data=fids.ravel()), geometry=centroids, crs="EPSG:4326") - + return centroids_gdf -- GitLab