From 1ad969cd74c31e1083a762fe45954c7571e8bc8f Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 22 Oct 2019 12:24:42 +0200 Subject: [PATCH 1/4] Added communicator to config and now is possible to pass a specific communicator to run or use COMM_WORLD - Added fist_time argument --- conf/hermes.conf | 1 + hermesv3_bu/config/config.py | 20 ++++++++++++-------- hermesv3_bu/hermes.py | 14 ++++++++++---- 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index 059faa8..5236ed3 100755 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -10,6 +10,7 @@ start_date = 2016/11/29 00:00:00 # end_date = 2010/01/01 00:00:00 output_timestep_num = 24 auxiliary_files_path = /scratch/Earth/HERMESv3_BU_aux/_ +first_time = 1 erase_auxiliary_files = 0 diff --git a/hermesv3_bu/config/config.py b/hermesv3_bu/config/config.py index 3091a86..c3fccf8 100755 --- a/hermesv3_bu/config/config.py +++ b/hermesv3_bu/config/config.py @@ -10,13 +10,17 @@ class Config(ArgParser): """ Configuration arguments class. """ - def __init__(self, new_date=None): + def __init__(self, new_date=None, comm=None): """ Read and parse all the arguments. :param new_date: Starting date for simulation loop day. :type new_date: datetime.datetime """ + if comm is None: + comm = MPI.COMM_WORLD + self.comm = comm + self.new_date = new_date super(Config, self).__init__() @@ -55,6 +59,8 @@ class Config(ArgParser): p.add_argument('--auxiliary_files_path', required=True, help='Path to the directory where the necessary auxiliary files will be created if them are ' + 'not created yet.') + p.add_argument('--first_time', required=False, default='False', type=str, + help='Indicates if you want to run it for first time (only create auxiliary files).') p.add_argument('--erase_auxiliary_files', required=False, default='False', type=str, help='Indicates if you want to start from scratch removing the auxiliary files already created.') @@ -686,15 +692,15 @@ class Config(ArgParser): arguments.end_date = self._parse_end_date(arguments.end_date, arguments.start_date) arguments.output_name = self.get_output_name(arguments) + arguments.first_time = self._parse_bool(arguments.first_time) arguments.erase_auxiliary_files = self._parse_bool(arguments.erase_auxiliary_files) self.create_dir(arguments.output_dir) if arguments.erase_auxiliary_files: if os.path.exists(arguments.auxiliary_files_path): - comm = MPI.COMM_WORLD - if comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: rmtree(arguments.auxiliary_files_path) - comm.Barrier() + self.comm.Barrier() self.create_dir(arguments.auxiliary_files_path) # Booleans @@ -797,8 +803,7 @@ class Config(ArgParser): full_path = os.path.join(arguments.output_dir, file_name) return full_path - @staticmethod - def create_dir(path): + def create_dir(self, path): """ Create the given folder if it is not created yet. @@ -807,8 +812,7 @@ class Config(ArgParser): """ import os from mpi4py import MPI - icomm = MPI.COMM_WORLD - comm = icomm.Split(color=0, key=0) + comm = self.comm.Split(color=0, key=0) rank = comm.Get_rank() if rank == 0: diff --git a/hermesv3_bu/hermes.py b/hermesv3_bu/hermes.py index 9101d5f..8b31789 100755 --- a/hermesv3_bu/hermes.py +++ b/hermesv3_bu/hermes.py @@ -18,9 +18,11 @@ class Hermes(object): """ Interface class for HERMESv3. """ - def __init__(self, config): + def __init__(self, config, comm=None): self.initial_time = timeit.default_timer() - self.comm = MPI.COMM_WORLD + if comm is None: + comm = MPI.COMM_WORLD + self.comm = comm self.arguments = config.arguments self.logger = Log(self.arguments) @@ -48,6 +50,10 @@ class Hermes(object): """ from datetime import timedelta + if self.arguments.fist_time: + # Stop run + self.comm.Abort(0) + emis = self.sector_manager.run() waiting_time = timeit.default_timer() self.comm.Barrier() @@ -67,8 +73,8 @@ class Hermes(object): return None -def run(): - date = Hermes(Config()).main() +def run(comm=None): + date = Hermes(Config(comm), comm).main() while date is not None: date = Hermes(Config(new_date=date)).main() sys.exit(0) -- GitLab From fb9120534d7d39c8b154c9257ab66cfa01463ba0 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 22 Oct 2019 13:23:46 +0200 Subject: [PATCH 2/4] Added communicator to config and now is possible to pass a specific communicator to run or use COMM_WORLD - Added fist_time argument --- conf/hermes.conf | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index 5236ed3..a1fd305 100755 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -9,9 +9,9 @@ start_date = 2016/11/29 00:00:00 # ----- end_date = start_date [DEFAULT] ----- # end_date = 2010/01/01 00:00:00 output_timestep_num = 24 -auxiliary_files_path = /scratch/Earth/HERMESv3_BU_aux/_ +auxiliary_files_path = /scratch/Earth/HERMESv3_BU_aux/__test first_time = 1 -erase_auxiliary_files = 0 +erase_auxiliary_files = 1 [DOMAIN] @@ -56,10 +56,10 @@ vertical_description = /profiles/vertical/MONARCH_Global_48layers_ver #y_0 = 43862.90625 # CATALUNYA test - #nx = 28 - #ny = 30 - nx = 4 - ny = 4 + nx = 28 + ny = 30 + #nx = 4 + #ny = 4 inc_x = 10000 inc_y = 10000 x_0 = 253151.59375 @@ -119,17 +119,17 @@ vertical_description = /profiles/vertical/MONARCH_Global_48layers_ver #################################################################### [SECTOR MANAGEMENT] writing_processors = 1 - -aviation_processors = 1 -shipping_port_processors = 1 -livestock_processors = 12 -crop_operations_processors = 1 -crop_fertilizers_processors = 4 -agricultural_machinery_processors = 1 -residential_processors = 4 -recreational_boats_processors = 4 -point_sources_processors = 16 -traffic_processors = 256 +# +# aviation_processors = 1 +# shipping_port_processors = 1 +# livestock_processors = 12 +# crop_operations_processors = 1 +# crop_fertilizers_processors = 4 +# agricultural_machinery_processors = 1 +# residential_processors = 4 +# recreational_boats_processors = 4 +# point_sources_processors = 16 +# traffic_processors = 256 traffic_area_processors = 1 [SHAPEFILES] -- GitLab From 3538ae50c6e96e30b74d754942567938dd652035 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Wed, 23 Oct 2019 11:38:16 +0200 Subject: [PATCH 3/4] comm & logger class variables as hidden variables --- hermesv3_bu/clipping/clip.py | 4 +- hermesv3_bu/clipping/custom_clip.py | 6 +- hermesv3_bu/clipping/default_clip.py | 6 +- hermesv3_bu/clipping/shapefile_clip.py | 6 +- hermesv3_bu/grids/grid.py | 10 +- hermesv3_bu/grids/grid_latlon.py | 8 +- hermesv3_bu/grids/grid_lcc.py | 8 +- hermesv3_bu/grids/grid_mercator.py | 8 +- hermesv3_bu/grids/grid_rotated.py | 12 +- hermesv3_bu/hermes.py | 61 ++++---- hermesv3_bu/io_server/io_raster.py | 20 +-- hermesv3_bu/io_server/io_server.py | 2 +- hermesv3_bu/io_server/io_shapefile.py | 44 +++--- .../agricultural_crop_fertilizers_sector.py | 138 +++++++++--------- .../agricultural_crop_operations_sector.py | 28 ++-- .../sectors/agricultural_machinery_sector.py | 40 ++--- hermesv3_bu/sectors/agricultural_sector.py | 66 ++++----- hermesv3_bu/sectors/aviation_sector.py | 106 +++++++------- hermesv3_bu/sectors/livestock_sector.py | 124 ++++++++-------- hermesv3_bu/sectors/point_source_sector.py | 96 ++++++------ .../sectors/recreational_boats_sector.py | 44 +++--- hermesv3_bu/sectors/residential_sector.py | 78 +++++----- hermesv3_bu/sectors/sector.py | 80 +++++----- hermesv3_bu/sectors/sector_manager.py | 30 ++-- hermesv3_bu/sectors/shipping_port_sector.py | 56 +++---- hermesv3_bu/sectors/solvents_sector.py | 106 +++++++------- hermesv3_bu/sectors/traffic_area_sector.py | 126 ++++++++-------- hermesv3_bu/sectors/traffic_sector.py | 132 ++++++++--------- hermesv3_bu/writer/cmaq_writer.py | 28 ++-- hermesv3_bu/writer/default_writer.py | 34 ++--- hermesv3_bu/writer/monarch_writer.py | 30 ++-- hermesv3_bu/writer/wrfchem_writer.py | 24 +-- hermesv3_bu/writer/writer.py | 26 ++-- 33 files changed, 797 insertions(+), 790 deletions(-) diff --git a/hermesv3_bu/clipping/clip.py b/hermesv3_bu/clipping/clip.py index 5b2ebb6..e1449dd 100755 --- a/hermesv3_bu/clipping/clip.py +++ b/hermesv3_bu/clipping/clip.py @@ -51,8 +51,8 @@ class Clip(object): def __init__(self, logger, auxiliary_path): spent_time = timeit.default_timer() - self.logger = logger + self.__logger = logger self.shapefile = None self.shapefile_path = os.path.join(auxiliary_path, 'clip', 'clip.shp') - self.logger.write_time_log('Clip', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Clip', '__init__', timeit.default_timer() - spent_time) diff --git a/hermesv3_bu/clipping/custom_clip.py b/hermesv3_bu/clipping/custom_clip.py index f6f79b2..45cfe12 100755 --- a/hermesv3_bu/clipping/custom_clip.py +++ b/hermesv3_bu/clipping/custom_clip.py @@ -27,7 +27,7 @@ class CustomClip(Clip): super(CustomClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Custom clip' self.shapefile = self.create_clip(points_str) - self.logger.write_time_log('CustomClip', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CustomClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, points_str): """ @@ -64,6 +64,6 @@ class CustomClip(Clip): clip.to_file(self.shapefile_path) else: clip = gpd.read_file(self.shapefile_path) - self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.logger.write_time_log('CustomClip', 'create_clip', timeit.default_timer() - spent_time) + self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.__logger.write_time_log('CustomClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/clipping/default_clip.py b/hermesv3_bu/clipping/default_clip.py index f05bda8..43f2238 100755 --- a/hermesv3_bu/clipping/default_clip.py +++ b/hermesv3_bu/clipping/default_clip.py @@ -27,7 +27,7 @@ class DefaultClip(Clip): super(DefaultClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Default clip' self.shapefile = self.create_clip(grid) - self.logger.write_time_log('DefaultClip', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('DefaultClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, grid): """ @@ -49,6 +49,6 @@ class DefaultClip(Clip): clip.to_file(self.shapefile_path) else: clip = gpd.read_file(self.shapefile_path) - self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.logger.write_time_log('DefaultClip', 'create_clip', timeit.default_timer() - spent_time) + self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.__logger.write_time_log('DefaultClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/clipping/shapefile_clip.py b/hermesv3_bu/clipping/shapefile_clip.py index 88792ef..07bc87b 100755 --- a/hermesv3_bu/clipping/shapefile_clip.py +++ b/hermesv3_bu/clipping/shapefile_clip.py @@ -28,7 +28,7 @@ class ShapefileClip(Clip): super(ShapefileClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Shapefile clip' self.shapefile = self.create_clip(clip_input_path) - self.logger.write_time_log('ShapefileClip', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShapefileClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, clip_path): """ @@ -52,6 +52,6 @@ class ShapefileClip(Clip): error_exit(" Clip shapefile {0} not found.") else: clip = gpd.read_file(self.shapefile_path) - self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.logger.write_time_log('ShapefileClip', 'create_clip', timeit.default_timer() - spent_time) + self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.__logger.write_time_log('ShapefileClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/grids/grid.py b/hermesv3_bu/grids/grid.py index 6dea082..b454d44 100755 --- a/hermesv3_bu/grids/grid.py +++ b/hermesv3_bu/grids/grid.py @@ -83,8 +83,8 @@ class Grid(object): :type vertical_description_path: str """ spent_time = timeit.default_timer() - self.logger = logger - self.logger.write_log('\tGrid specifications: {0}'.format(attributes), 3) + self.__logger = logger + self.__logger.write_log('\tGrid specifications: {0}'.format(attributes), 3) self.attributes = attributes self.netcdf_path = os.path.join(auxiliary_path, 'grid', 'grid.nc') self.shapefile_path = os.path.join(auxiliary_path, 'grid', 'grid.shp') @@ -117,7 +117,7 @@ class Grid(object): df = pd.read_csv(path, sep=',') heights = df.height_magl.values - self.logger.write_time_log('Grid', 'get_vertical_description', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('Grid', 'get_vertical_description', timeit.default_timer() - spent_time, 3) return heights def write_netcdf(self): @@ -171,7 +171,7 @@ class Grid(object): bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) else: error_exit('The number of vertices of the boundaries must be 2 or 4.') - self.logger.write_time_log('Grid', 'create_bounds', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('Grid', 'create_bounds', timeit.default_timer() - spent_time, 3) return bound_coords def create_shapefile(self): @@ -235,7 +235,7 @@ class Grid(object): gdf = gpd.read_file(self.shapefile_path) gdf.set_index('FID', inplace=True) - self.logger.write_time_log('Grid', 'create_shapefile', timeit.default_timer() - spent_time, 2) + self.__logger.write_time_log('Grid', 'create_shapefile', timeit.default_timer() - spent_time, 2) return gdf diff --git a/hermesv3_bu/grids/grid_latlon.py b/hermesv3_bu/grids/grid_latlon.py index d1c0513..8e9b28b 100755 --- a/hermesv3_bu/grids/grid_latlon.py +++ b/hermesv3_bu/grids/grid_latlon.py @@ -56,7 +56,7 @@ class LatLonGrid(Grid): self.shape = (tstep_num, len(self.vertical_desctiption), n_lat, n_lon) - self.logger.write_time_log('LatLonGrid', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LatLonGrid', '__init__', timeit.default_timer() - spent_time) def create_coords(self): """ @@ -81,7 +81,7 @@ class LatLonGrid(Grid): self.boundary_latitudes = self.boundary_latitudes.reshape((1,) + self.boundary_latitudes.shape) self.boundary_longitudes = self.boundary_longitudes.reshape((1,) + self.boundary_longitudes.shape) - self.logger.write_time_log('LatLonGrid', 'create_coords', timeit.default_timer() - spent_time, 2) + self.__logger.write_time_log('LatLonGrid', 'create_coords', timeit.default_timer() - spent_time, 2) def write_netcdf(self): """ @@ -98,5 +98,5 @@ class LatLonGrid(Grid): boundary_longitudes=self.boundary_longitudes, regular_latlon=True) - self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.logger.write_time_log('LatLonGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.__logger.write_time_log('LatLonGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) diff --git a/hermesv3_bu/grids/grid_lcc.py b/hermesv3_bu/grids/grid_lcc.py index ee6fdee..33432e0 100755 --- a/hermesv3_bu/grids/grid_lcc.py +++ b/hermesv3_bu/grids/grid_lcc.py @@ -79,7 +79,7 @@ class LccGrid(Grid): # Initialises with parent class super(LccGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), ny, nx) - self.logger.write_time_log('LccGrid', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LccGrid', '__init__', timeit.default_timer() - spent_time) def write_netcdf(self): """ @@ -99,8 +99,8 @@ class LccGrid(Grid): lat_1_2="{0}, {1}".format(self.attributes['lat_1'], self.attributes['lat_2']), lon_0=self.attributes['lon_0'], lat_0=self.attributes['lat_0']) - self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.logger.write_time_log('LccGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.__logger.write_time_log('LccGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True def create_coords(self): @@ -143,5 +143,5 @@ class LccGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - self.logger.write_time_log('LccGrid', 'create_coords', timeit.default_timer() - spent_time, 2) + self.__logger.write_time_log('LccGrid', 'create_coords', timeit.default_timer() - spent_time, 2) return True diff --git a/hermesv3_bu/grids/grid_mercator.py b/hermesv3_bu/grids/grid_mercator.py index 24faf79..1f6fc54 100755 --- a/hermesv3_bu/grids/grid_mercator.py +++ b/hermesv3_bu/grids/grid_mercator.py @@ -69,7 +69,7 @@ class MercatorGrid(Grid): super(MercatorGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), ny, nx) - self.logger.write_time_log('MercatorGrid', '__init__', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('MercatorGrid', '__init__', timeit.default_timer() - spent_time, 3) def write_netcdf(self): """ @@ -88,8 +88,8 @@ class MercatorGrid(Grid): boundary_longitudes=self.boundary_longitudes, mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.attributes['lon_0'], lat_ts=self.attributes['lat_ts']) - self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.logger.write_time_log('MercatorGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.__logger.write_time_log('MercatorGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True def create_coords(self): @@ -120,6 +120,6 @@ class MercatorGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - self.logger.write_time_log('MercatorGrid', 'create_coords', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('MercatorGrid', 'create_coords', timeit.default_timer() - spent_time, 3) return True diff --git a/hermesv3_bu/grids/grid_rotated.py b/hermesv3_bu/grids/grid_rotated.py index 2195707..8cf8bd2 100755 --- a/hermesv3_bu/grids/grid_rotated.py +++ b/hermesv3_bu/grids/grid_rotated.py @@ -44,7 +44,7 @@ class RotatedGrid(Grid): super(RotatedGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), attributes['n_lat'], attributes['n_lon']) - self.logger.write_time_log('RotatedGrid', '__init__', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('RotatedGrid', '__init__', timeit.default_timer() - spent_time, 3) def create_regular_rotated(self): """ @@ -66,7 +66,7 @@ class RotatedGrid(Grid): inverse=True) corner_longitudes = self.create_bounds(center_longitudes, self.attributes['inc_rlon'], number_vertices=4) - self.logger.write_time_log('RotatedGrid', 'create_regular_rotated', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('RotatedGrid', 'create_regular_rotated', timeit.default_timer() - spent_time, 3) return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes def create_coords(self): @@ -90,7 +90,7 @@ class RotatedGrid(Grid): self.boundary_longitudes, self.boundary_latitudes = self.rotated2latlon(b_lons, b_lats) self.center_longitudes, self.center_latitudes = self.rotated2latlon(c_lons, c_lats) - self.logger.write_time_log('RotatedGrid', 'create_coords', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('RotatedGrid', 'create_coords', timeit.default_timer() - spent_time, 3) return True def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180): @@ -153,7 +153,7 @@ class RotatedGrid(Grid): almd[almd > (lon_min + 360)] -= 360 almd[almd < lon_min] += 360 - self.logger.write_time_log('RotatedGrid', 'rotated2latlon', timeit.default_timer() - spent_time, 3) + self.__logger.write_time_log('RotatedGrid', 'rotated2latlon', timeit.default_timer() - spent_time, 3) return almd, aphd @@ -174,6 +174,6 @@ class RotatedGrid(Grid): rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, north_pole_lat=90 - self.attributes['new_pole_latitude_degrees'], north_pole_lon=self.attributes['new_pole_longitude_degrees']) - self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.logger.write_time_log('RotatedGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.__logger.write_time_log('RotatedGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True diff --git a/hermesv3_bu/hermes.py b/hermesv3_bu/hermes.py index 8b31789..4eafb06 100755 --- a/hermesv3_bu/hermes.py +++ b/hermesv3_bu/hermes.py @@ -7,7 +7,7 @@ from mpi4py import MPI from datetime import timedelta from hermesv3_bu.config.config import Config -from hermesv3_bu.grids.grid import select_grid +from hermesv3_bu.grids.grid import select_grid, Grid from hermesv3_bu.clipping.clip import select_clip from hermesv3_bu.writer.writer import select_writer from hermesv3_bu.sectors.sector_manager import SectorManager @@ -19,30 +19,38 @@ class Hermes(object): Interface class for HERMESv3. """ def __init__(self, config, comm=None): - self.initial_time = timeit.default_timer() + """ + + :param config: Configuration file object + :type config: Config + + :param comm: Communicator + :type comm: MPI.Comm + """ + self.__initial_time = timeit.default_timer() if comm is None: comm = MPI.COMM_WORLD - self.comm = comm + self.__comm = comm self.arguments = config.arguments - self.logger = Log(self.arguments) - self.logger.write_log('====== Starting HERMESv3_BU simulation =====') - self.grid = select_grid(self.comm, self.logger, self.arguments) - self.clip = select_clip(self.comm, self.logger, self.arguments.auxiliary_files_path, self.arguments.clipping, + self.__logger = Log(self.arguments) + self.__logger.write_log('====== Starting HERMESv3_BU simulation =====') + self.grid = select_grid(self.__comm, self.__logger, self.arguments) + self.clip = select_clip(self.__comm, self.__logger, self.arguments.auxiliary_files_path, self.arguments.clipping, self.grid) self.date_array = [self.arguments.start_date + timedelta(hours=hour) for hour in range(self.arguments.output_timestep_num)] - self.logger.write_log('Dates to simulate:', message_level=3) + self.__logger.write_log('Dates to simulate:', message_level=3) for aux_date in self.date_array: - self.logger.write_log('\t{0}'.format(aux_date.strftime("%Y/%m/%d, %H:%M:%S")), message_level=3) + self.__logger.write_log('\t{0}'.format(aux_date.strftime("%Y/%m/%d, %H:%M:%S")), message_level=3) self.sector_manager = SectorManager( - self.comm, self.logger, self.grid, self.clip, self.date_array, self.arguments) + self.__comm, self.__logger, self.grid, self.clip, self.date_array, self.arguments) - self.writer = select_writer(self.logger, self.arguments, self.grid, self.date_array) + self.writer = select_writer(self.__logger, self.arguments, self.grid, self.date_array) - self.logger.write_time_log('Hermes', '__init__', timeit.default_timer() - self.initial_time) + self.__logger.write_time_log('Hermes', '__init__', timeit.default_timer() - self.__initial_time) def main(self): """ @@ -51,21 +59,20 @@ class Hermes(object): from datetime import timedelta if self.arguments.fist_time: - # Stop run - self.comm.Abort(0) - - emis = self.sector_manager.run() - waiting_time = timeit.default_timer() - self.comm.Barrier() - self.logger.write_log('All emissions calculated!') - self.logger.write_time_log('Hermes', 'Waiting_to_write', timeit.default_timer() - waiting_time) - - self.writer.write(emis) - self.comm.Barrier() - - self.logger.write_log('***** HERMESv3_BU simulation finished successfully *****') - self.logger.write_time_log('Hermes', 'TOTAL', timeit.default_timer() - self.initial_time) - self.logger.finish_logs() + self.__logger.write_log('***** HERMESv3_BU First Time finished successfully *****') + else: + emis = self.sector_manager.run() + waiting_time = timeit.default_timer() + self.__comm.Barrier() + self.__logger.write_log('All emissions calculated!') + self.__logger.write_time_log('Hermes', 'Waiting_to_write', timeit.default_timer() - waiting_time) + + self.writer.write(emis) + self.__comm.Barrier() + + self.__logger.write_log('***** HERMESv3_BU simulation finished successfully *****') + self.__logger.write_time_log('Hermes', 'TOTAL', timeit.default_timer() - self.__initial_time) + self.__logger.finish_logs() if self.arguments.start_date < self.arguments.end_date: return self.arguments.start_date + timedelta(days=1) diff --git a/hermesv3_bu/io_server/io_raster.py b/hermesv3_bu/io_server/io_raster.py index de05202..f74b041 100755 --- a/hermesv3_bu/io_server/io_raster.py +++ b/hermesv3_bu/io_server/io_raster.py @@ -186,13 +186,13 @@ class IoRaster(IoServer): :return: """ - if self.comm.Get_rank() == rank: + if self.__comm.Get_rank() == rank: gdf = self.to_shapefile_serie(raster_path, out_path=out_path, write=write, crs=crs, nodata=nodata) else: gdf = None - if self.comm.Get_size() > 1: - gdf = self.comm.bcast(gdf, root=0) + if self.__comm.Get_size() > 1: + gdf = self.__comm.bcast(gdf, root=0) return gdf @@ -316,7 +316,7 @@ class IoRaster(IoServer): def to_shapefile_parallel(self, raster_path, gather=False, bcast=False, crs=None, nodata=0): spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: ds = rasterio.open(raster_path) grid_info = ds.transform @@ -357,11 +357,11 @@ class IoRaster(IoServer): gdf = None b_lons = None b_lats = None - self.comm.Barrier() - gdf = IoShapefile(self.comm).split_shapefile(gdf) + self.__comm.Barrier() + gdf = IoShapefile(self.__comm).split_shapefile(gdf) - b_lons = IoShapefile(self.comm).split_shapefile(b_lons) - b_lats = IoShapefile(self.comm).split_shapefile(b_lats) + b_lons = IoShapefile(self.__comm).split_shapefile(b_lons) + b_lats = IoShapefile(self.__comm).split_shapefile(b_lats) i = 0 for j, df_aux in gdf.iterrows(): @@ -379,7 +379,7 @@ class IoRaster(IoServer): gdf = gdf.to_crs(crs) if gather and not bcast: - gdf = IoShapefile(self.comm).gather_shapefile(gdf) + gdf = IoShapefile(self.__comm).gather_shapefile(gdf) elif gather and bcast: - gdf = IoShapefile(self.comm).gather_bcast_shapefile(gdf) + gdf = IoShapefile(self.__comm).gather_bcast_shapefile(gdf) return gdf diff --git a/hermesv3_bu/io_server/io_server.py b/hermesv3_bu/io_server/io_server.py index 46bd918..77dae06 100755 --- a/hermesv3_bu/io_server/io_server.py +++ b/hermesv3_bu/io_server/io_server.py @@ -9,4 +9,4 @@ class IoServer(object): :type comm: MPI.Comm """ def __init__(self, comm): - self.comm = comm + self.__comm = comm diff --git a/hermesv3_bu/io_server/io_shapefile.py b/hermesv3_bu/io_server/io_shapefile.py index 59ece64..b995773 100755 --- a/hermesv3_bu/io_server/io_shapefile.py +++ b/hermesv3_bu/io_server/io_shapefile.py @@ -49,14 +49,14 @@ class IoShapefile(IoServer): :return: True when the writing is finished. :rtype: bool """ - data = self.comm.gather(data, root=rank) - if self.comm.Get_rank() == rank: + data = self.__comm.gather(data, root=rank) + if self.__comm.Get_rank() == rank: if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) data = pd.concat(data) data.to_file(path) - self.comm.Barrier() + self.__comm.Barrier() return True @@ -67,19 +67,19 @@ class IoShapefile(IoServer): return gdf def read_shapefile(self, path, rank=0): - if self.comm.Get_rank() == rank: + if self.__comm.Get_rank() == rank: check_files(path) gdf = gpd.read_file(path) - gdf = np.array_split(gdf, self.comm.Get_size()) + gdf = np.array_split(gdf, self.__comm.Get_size()) else: gdf = None - gdf = self.comm.scatter(gdf, root=rank) + gdf = self.__comm.scatter(gdf, root=rank) return gdf def read_shapefile_parallel(self, path, rank=0): - if self.comm.Get_rank() == rank: + if self.__comm.Get_rank() == rank: data = self.read_shapefile_serial(path) else: data = None @@ -97,38 +97,38 @@ class IoShapefile(IoServer): :rtype: GeoDataFrame """ - if self.comm.Get_size() == 1: + if self.__comm.Get_size() == 1: data = data else: - if self.comm.Get_rank() == rank: - data = np.array_split(data, self.comm.Get_size()) + if self.__comm.Get_rank() == rank: + data = np.array_split(data, self.__comm.Get_size()) else: data = None - data = self.comm.scatter(data, root=rank) + data = self.__comm.scatter(data, root=rank) return data def gather_bcast_shapefile(self, data, rank=0): - if self.comm.Get_size() == 1: + if self.__comm.Get_size() == 1: data = data else: - data = self.comm.gather(data, root=rank) - if self.comm.Get_rank() == rank: + data = self.__comm.gather(data, root=rank) + if self.__comm.Get_rank() == rank: data = pd.concat(data) else: data = None - data = self.comm.bcast(data, root=rank) + data = self.__comm.bcast(data, root=rank) return data def gather_shapefile(self, data, rank=0): - if self.comm.Get_size() == 1: + if self.__comm.Get_size() == 1: data = data else: - data = self.comm.gather(data, root=rank) - if self.comm.Get_rank() == rank: + data = self.__comm.gather(data, root=rank) + if self.__comm.Get_rank() == rank: data = pd.concat(data) else: data = None @@ -136,13 +136,13 @@ class IoShapefile(IoServer): def balance(self, data, rank=0): - data = self.comm.gather(data, root=rank) - if self.comm.Get_rank() == rank: + data = self.__comm.gather(data, root=rank) + if self.__comm.Get_rank() == rank: data = pd.concat(data) - data = np.array_split(data, self.comm.Get_size()) + data = np.array_split(data, self.__comm.Get_size()) else: data = None - data = self.comm.scatter(data, root=rank) + data = self.__comm.scatter(data, root=rank) return data diff --git a/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py b/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py index 2b0be93..10f3ed0 100755 --- a/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py +++ b/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py @@ -53,7 +53,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): self.temperature_path = temperature_path self.wind_speed_path = wind_speed_path self.crop_growing_degree_day_path = crop_growing_degree_day_path - self.logger.write_time_log('AgriculturalCropFertilizersSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', '__init__', timeit.default_timer() - spent_time) def get_ftype_fcrop_fmode_by_nut(self, crop, nut_list): spent_time = timeit.default_timer() @@ -71,8 +71,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): filtered_crop_f_parameter['f_crop'], filtered_crop_f_parameter['f_mode']], axis=1).reset_index() f_by_nut.rename(columns={0: 'f_type'}, inplace=True) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ftype_fcrop_fmode_by_nut', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ftype_fcrop_fmode_by_nut', + timeit.default_timer() - spent_time) return f_by_nut @@ -114,8 +114,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): total_crop_df['EF_{0}'.format(crop)] = crop_ef['f_sum'] - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ef_by_crop', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ef_by_crop', + timeit.default_timer() - spent_time) return total_crop_df def to_dst_resolution(self, src_shapefile, value): @@ -143,8 +143,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): # dst_shapefile.drop('involved_area', axis=1, inplace=True) dst_shapefile.dropna(inplace=True) - dst_shapefile = IoShapefile(self.comm).gather_shapefile(dst_shapefile.reset_index()) - if self.comm.Get_rank() == 0: + dst_shapefile = IoShapefile(self.__comm).gather_shapefile(dst_shapefile.reset_index()) + if self.__comm.Get_rank() == 0: # dst_shapefile['FID_involved_area'] = dst_shapefile.groupby('FID')['involved_area'].sum() # dst_shapefile['involved_area'] = dst_shapefile['involved_area'] / dst_shapefile['FID_involved_area'] # dst_shapefile[value] = dst_shapefile[value] * dst_shapefile['involved_area'] @@ -154,55 +154,55 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): dst_shapefile = dst_shapefile.groupby(['FID'])[value].mean() else: dst_shapefile = None - dst_shapefile = IoShapefile(self.comm).split_shapefile(dst_shapefile) + dst_shapefile = IoShapefile(self.__comm).split_shapefile(dst_shapefile) # print('Rank {0} -Z {1}: \n{2}\n'.format(self.comm.Get_rank(), value, dst_shapefile)) # sys.stdout.flush() - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'to_dst_resolution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'to_dst_resolution', + timeit.default_timer() - spent_time) return dst_shapefile def get_gridded_constants(self, ph_path, cec_path): spent_time = timeit.default_timer() - self.logger.write_log('Getting gridded constants', message_level=2) + self.__logger.write_log('Getting gridded constants', message_level=2) gridded_ph_cec_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_constants') if not os.path.exists(gridded_ph_cec_path): - self.logger.write_log('Getting PH from {0}'.format(ph_path), message_level=2) + self.__logger.write_log('Getting PH from {0}'.format(ph_path), message_level=2) clipped_ph_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_PH.tiff') - if self.comm.Get_rank() == 0: - IoRaster(self.comm).clip_raster_with_shapefile_poly(ph_path, self.clip.shapefile, clipped_ph_path, - nodata=255) - self.logger.write_log('PH clipped done!', message_level=3) - ph_gridded = IoRaster(self.comm).to_shapefile_parallel(clipped_ph_path, nodata=255) - self.logger.write_log('PH to shapefile done!', message_level=3) + if self.__comm.Get_rank() == 0: + IoRaster(self.__comm).clip_raster_with_shapefile_poly(ph_path, self.clip.shapefile, clipped_ph_path, + nodata=255) + self.__logger.write_log('PH clipped done!', message_level=3) + ph_gridded = IoRaster(self.__comm).to_shapefile_parallel(clipped_ph_path, nodata=255) + self.__logger.write_log('PH to shapefile done!', message_level=3) ph_gridded.set_index('CELL_ID', inplace=True) ph_gridded.rename(columns={'data': 'ph'}, inplace=True) - ph_gridded = IoShapefile(self.comm).balance(ph_gridded) + ph_gridded = IoShapefile(self.__comm).balance(ph_gridded) # To correct input data ph_gridded['ph'] = ph_gridded['ph'] / 10 - self.logger.write_log('PH to destiny resolution ...', message_level=3) + self.__logger.write_log('PH to destiny resolution ...', message_level=3) ph_gridded = self.to_dst_resolution(ph_gridded, value='ph') - self.logger.write_log('PH to destiny resolution done!', message_level=3) + self.__logger.write_log('PH to destiny resolution done!', message_level=3) - self.logger.write_log('Getting CEC from {0}'.format(cec_path), message_level=2) + self.__logger.write_log('Getting CEC from {0}'.format(cec_path), message_level=2) clipped_cec_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_CEC.tiff') - if self.comm.Get_rank() == 0: - IoRaster(self.comm).clip_raster_with_shapefile_poly(cec_path, self.clip.shapefile, clipped_cec_path, - nodata=-32768) - self.logger.write_log('CEC clipped done!', message_level=3) - cec_gridded = IoRaster(self.comm).to_shapefile_parallel(clipped_cec_path, nodata=-32768) - self.logger.write_log('CEC to shapefile done!', message_level=3) + if self.__comm.Get_rank() == 0: + IoRaster(self.__comm).clip_raster_with_shapefile_poly(cec_path, self.clip.shapefile, clipped_cec_path, + nodata=-32768) + self.__logger.write_log('CEC clipped done!', message_level=3) + cec_gridded = IoRaster(self.__comm).to_shapefile_parallel(clipped_cec_path, nodata=-32768) + self.__logger.write_log('CEC to shapefile done!', message_level=3) cec_gridded.rename(columns={'data': 'cec'}, inplace=True) cec_gridded.set_index('CELL_ID', inplace=True) - cec_gridded = IoShapefile(self.comm).balance(cec_gridded) - self.logger.write_log('CEC to destiny resolution ...', message_level=3) + cec_gridded = IoShapefile(self.__comm).balance(cec_gridded) + self.__logger.write_log('CEC to destiny resolution ...', message_level=3) cec_gridded = self.to_dst_resolution(cec_gridded.reset_index(), value='cec') - self.logger.write_log('CEC to destiny resolution done!', message_level=3) + self.__logger.write_log('CEC to destiny resolution done!', message_level=3) - ph_gridded = IoShapefile(self.comm).gather_shapefile(ph_gridded.reset_index()) - cec_gridded = IoShapefile(self.comm).gather_shapefile(cec_gridded.reset_index()) - if self.comm.Get_rank() == 0: + ph_gridded = IoShapefile(self.__comm).gather_shapefile(ph_gridded.reset_index()) + cec_gridded = IoShapefile(self.__comm).gather_shapefile(cec_gridded.reset_index()) + if self.__comm.Get_rank() == 0: gridded_ph_cec = ph_gridded # gridded_ph_cec = ph_gridded.groupby('FID').mean() # cec_gridded = cec_gridded.groupby('FID').mean() @@ -216,7 +216,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): crs=self.grid.shapefile.crs) else: gridded_ph_cec = None - gridded_ph_cec = IoShapefile(self.comm).split_shapefile(gridded_ph_cec) + gridded_ph_cec = IoShapefile(self.__comm).split_shapefile(gridded_ph_cec) # print('Rank {0} -Z PH: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec['ph']))) # print('Rank {0} -Z CEC: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec['cec']))) # print('Rank {0} -Z FID: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec.index))) @@ -226,19 +226,19 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): gridded_ph_cec = gridded_ph_cec[gridded_ph_cec['nut_code'] != -999] gridded_ph_cec.set_index('FID', inplace=True) - IoShapefile(self.comm).write_shapefile_parallel(gridded_ph_cec.reset_index(), gridded_ph_cec_path) + IoShapefile(self.__comm).write_shapefile_parallel(gridded_ph_cec.reset_index(), gridded_ph_cec_path) - gridded_ph_cec = IoShapefile(self.comm).gather_bcast_shapefile(gridded_ph_cec) + gridded_ph_cec = IoShapefile(self.__comm).gather_bcast_shapefile(gridded_ph_cec) else: - gridded_ph_cec = IoShapefile(self.comm).read_shapefile_serial(gridded_ph_cec_path) + gridded_ph_cec = IoShapefile(self.__comm).read_shapefile_serial(gridded_ph_cec_path) gridded_ph_cec.set_index('FID', inplace=True) # Selecting only PH and CEC cells that have also some crop. gridded_ph_cec = gridded_ph_cec.loc[self.crop_distribution.index, :] # gridded_ph_cec = gridded_ph_cec.loc[(gridded_ph_cec['ph'] > 0) & (gridded_ph_cec['cec'] > 0)] - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_gridded_constants', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_gridded_constants', + timeit.default_timer() - spent_time) return gridded_ph_cec def get_daily_inputs(self, yearly_emissions): @@ -254,34 +254,34 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): for day in self.day_dict.keys(): aux_df = yearly_emissions.copy().reset_index() - self.logger.write_log('Getting temperature from {0}'.format( + self.__logger.write_log('Getting temperature from {0}'.format( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))))) - meteo_df = IoNetcdf(self.comm).get_data_from_netcdf( + meteo_df = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) meteo_df['tas'] = meteo_df['tas'] - 273.15 - self.logger.write_log('Getting surface wind speed from {0}'.format( + self.__logger.write_log('Getting surface wind speed from {0}'.format( os.path.join(self.wind_speed_path, 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))))) - meteo_df['sfcWind'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo_df['sfcWind'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.wind_speed_path, 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'sfcWind', 'daily', day, geometry_shp).loc[:, 'sfcWind'] for crop in self.crop_list: - self.logger.write_log('Getting fertilizer denominator yearly factor from {0}'.format( + self.__logger.write_log('Getting fertilizer denominator yearly factor from {0}'.format( self.fertilizer_denominator_yearly_factor_path.replace('', crop).replace( '', str(day.year)))) - meteo_df['d_{0}'.format(crop)] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo_df['d_{0}'.format(crop)] = IoNetcdf(self.__comm).get_data_from_netcdf( self.fertilizer_denominator_yearly_factor_path.replace('', crop).replace( '', str(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - self.logger.write_log('Getting growing degree day from {0}'.format( + self.__logger.write_log('Getting growing degree day from {0}'.format( self.crop_growing_degree_day_path.replace('', 'winter').replace('', str(day.year)))) - meteo_df['winter'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo_df['winter'] = IoNetcdf(self.__comm).get_data_from_netcdf( self.crop_growing_degree_day_path.replace('', 'winter').replace('', str(day.year)), 'Tsum', 'yearly', day, geometry_shp).loc[:, 'Tsum'].astype(np.int16) - self.logger.write_log('Getting growing degree day from {0}'.format( + self.__logger.write_log('Getting growing degree day from {0}'.format( self.crop_growing_degree_day_path.replace('', 'spring').replace('', str(day.year)))) - meteo_df['spring'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo_df['spring'] = IoNetcdf(self.__comm).get_data_from_netcdf( self.crop_growing_degree_day_path.replace('', 'spring').replace('', str(day.year)), 'Tsum', 'yearly', day, geometry_shp).loc[:, 'Tsum'].astype(np.int16) @@ -297,14 +297,14 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): aux_df.set_index('FID', inplace=True) daily_inputs[day] = aux_df - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_daily_inputs', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_daily_inputs', + timeit.default_timer() - spent_time) return daily_inputs def calculate_yearly_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('Calculating yearly emissions') + self.__logger.write_log('Calculating yearly emissions') self.crop_distribution = pd.merge(self.crop_distribution.reset_index(), self.ef_by_crop.loc[:, ['nut_code']].reset_index(), how='left', on='FID') @@ -317,8 +317,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): self.fertilizer_rate.loc[self.fertilizer_rate['code'] == x.name, crop].values[0])) self.crop_distribution[crop] = self.crop_distribution[crop] * self.ef_by_crop['EF_{0}'.format(crop)] - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_yearly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_yearly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_nh3_emissions(self, day, daily_inputs): @@ -364,8 +364,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): # From kg NH3-N to g NH3 daily_emissions['nh3'] = daily_emissions['nh3'].multiply((17. / 14.) * 1000.) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_nh3_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_nh3_emissions', + timeit.default_timer() - spent_time) return daily_emissions def add_dates(self, df_by_day): @@ -383,23 +383,23 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): dataframe_by_day = self.to_timezone(dataframe_by_day) dataframe_by_day.set_index(['FID', 'tstep'], inplace=True) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'add_dates', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'add_dates', + timeit.default_timer() - spent_time) return dataframe_by_day def calculate_daily_emissions(self, emissions): spent_time = timeit.default_timer() - self.logger.write_log('Calculating daily emissions') + self.__logger.write_log('Calculating daily emissions') df_by_day = self.get_daily_inputs(emissions) for day, daily_inputs in df_by_day.items(): df_by_day[day] = self.calculate_nh3_emissions(day, daily_inputs) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_daily_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_daily_emissions', + timeit.default_timer() - spent_time) return df_by_day def calculate_hourly_emissions(self, emissions): spent_time = timeit.default_timer() - self.logger.write_log('Calculating hourly emissions') + self.__logger.write_log('Calculating hourly emissions') emissions['hour'] = emissions['date'].dt.hour emissions['nh3'] = emissions.groupby('hour')['nh3'].apply( lambda x: x.multiply(self.hourly_profiles.loc['nh3', x.name])) @@ -407,13 +407,13 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): emissions['date'] = emissions['date_utc'] emissions.drop(columns=['hour', 'date_utc'], axis=1, inplace=True) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return emissions def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') emissions = self.calculate_yearly_emissions() @@ -426,7 +426,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): emissions['layer'] = 0 emissions.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.logger.write_log('\t\tCrop fertilizers emissions calculated', message_level=2) - self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tCrop fertilizers emissions calculated', message_level=2) + self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/agricultural_crop_operations_sector.py b/hermesv3_bu/sectors/agricultural_crop_operations_sector.py index 9edb2a9..5457b92 100755 --- a/hermesv3_bu/sectors/agricultural_crop_operations_sector.py +++ b/hermesv3_bu/sectors/agricultural_crop_operations_sector.py @@ -106,7 +106,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.months = self.get_date_array_by_month() - self.logger.write_time_log('AgriculturalCropOperationsSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', '__init__', timeit.default_timer() - spent_time) def read_monthly_profiles(self, path): """ @@ -126,8 +126,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): profiles.reset_index(inplace=True) profiles.set_index(['P_month', 'operation'], inplace=True) - self.logger.write_time_log('AgriculturalCropOperationsSector', 'read_monthly_profiles', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'read_monthly_profiles', + timeit.default_timer() - spent_time) return profiles def get_date_array_by_month(self): @@ -140,8 +140,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): for month in month_list: month_dict[month] = np.array(self.date_array)[month_array == month] - self.logger.write_time_log('AgriculturalCropOperationsSector', 'get_date_array_by_month', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'get_date_array_by_month', + timeit.default_timer() - spent_time) return month_dict @@ -168,8 +168,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): # From Kg to g factor *= 1000.0 month_distribution[pollutant] += self.crop_distribution[crop].multiply(factor) - self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_distribution_by_month', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_distribution_by_month', + timeit.default_timer() - spent_time) return month_distribution @@ -187,7 +187,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): dataframe_by_day.set_index(['FID', 'tstep'], inplace=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.logger.write_time_log('AgriculturalCropOperationsSector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -239,14 +239,14 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.crop_distribution.drop(columns=['month', 'weekday', 'hour', 'WF', 'HF', 'date_as_date'], inplace=True) - self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') distribution_by_month = {} for month in self.months.keys(): @@ -259,7 +259,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.crop_distribution['layer'] = 0 - self.logger.write_log('\t\tCrop operations emissions calculated', message_level=2) - self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tCrop operations emissions calculated', message_level=2) + self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution diff --git a/hermesv3_bu/sectors/agricultural_machinery_sector.py b/hermesv3_bu/sectors/agricultural_machinery_sector.py index 6d62458..8a82fed 100755 --- a/hermesv3_bu/sectors/agricultural_machinery_sector.py +++ b/hermesv3_bu/sectors/agricultural_machinery_sector.py @@ -53,7 +53,7 @@ class AgriculturalMachinerySector(AgriculturalSector): self.vehicle_power = self.read_profiles(vehicle_power_path) self.emission_factors = self.read_profiles(ef_files_dir) - self.logger.write_time_log('AgriculturalMachinerySector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', '__init__', timeit.default_timer() - spent_time) def get_crop_distribution_by_nut(self, crop_distribution, nut_shapefile, nut_code=None, write_crop_by_nut=False): spent_time = timeit.default_timer() @@ -92,12 +92,12 @@ class AgriculturalMachinerySector(AgriculturalSector): crop_distribution.drop(columns=self.crop_list, inplace=True) crop_distribution.rename(columns={nut_code: 'NUT_code'}, inplace=True) - IoShapefile(self.comm).write_shapefile_parallel(crop_distribution, crop_distribution_nut_path) + IoShapefile(self.__comm).write_shapefile_parallel(crop_distribution, crop_distribution_nut_path) else: - crop_distribution = IoShapefile(self.comm).read_shapefile(crop_distribution_nut_path) + crop_distribution = IoShapefile(self.__comm).read_shapefile(crop_distribution_nut_path) - self.logger.write_time_log('AgriculturalMachinerySector', 'get_crop_distribution_by_nut', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'get_crop_distribution_by_nut', + timeit.default_timer() - spent_time) return crop_distribution @@ -110,8 +110,8 @@ class AgriculturalMachinerySector(AgriculturalSector): for month in month_list: month_dict[month] = np.array(self.date_array)[month_array == month] - self.logger.write_time_log('AgriculturalMachinerySector', 'get_date_array_by_month', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'get_date_array_by_month', + timeit.default_timer() - spent_time) return month_dict def calcualte_yearly_emissions_by_nut_vehicle(self): @@ -221,8 +221,8 @@ class AgriculturalMachinerySector(AgriculturalSector): database.drop(columns=['N', 'S', 'T', 'P', 'LF'], inplace=True) database = database.groupby(['NUT_code', 'vehicle']).sum() - self.logger.write_time_log('AgriculturalMachinerySector', 'calcualte_yearly_emissions_by_nut_vehicle', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'calcualte_yearly_emissions_by_nut_vehicle', + timeit.default_timer() - spent_time) return database def calculate_monthly_emissions_by_nut(self, month): @@ -242,8 +242,8 @@ class AgriculturalMachinerySector(AgriculturalSector): dataframe = dataframe.groupby('NUT_code').sum() - self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_monthly_emissions_by_nut', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_monthly_emissions_by_nut', + timeit.default_timer() - spent_time) return dataframe def distribute(self, dataframe): @@ -263,8 +263,8 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution['timezone'] = timezones self.crop_distribution.reset_index(inplace=True) - self.logger.write_time_log('AgriculturalMachinerySector', 'distribute', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'distribute', + timeit.default_timer() - spent_time) return self.crop_distribution def add_dates(self, df_by_month): @@ -281,7 +281,7 @@ class AgriculturalMachinerySector(AgriculturalSector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.logger.write_time_log('AgriculturalMachinerySector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day def calculate_hourly_emissions(self): @@ -331,13 +331,13 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution['HF'] * self.crop_distribution['WF'], axis=0) self.crop_distribution.drop(columns=['month', 'weekday', 'hour', 'WF', 'HF', 'date_as_date'], inplace=True) - self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') distribution_by_month = {} for month in self.months.keys(): @@ -351,7 +351,7 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution = self.crop_distribution.groupby(['FID', 'layer', 'tstep']).sum() self.crop_distribution = self.speciate(self.crop_distribution) - self.logger.write_log('\t\tAgricultural machinery emissions calculated', message_level=2) - self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tAgricultural machinery emissions calculated', message_level=2) + self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution diff --git a/hermesv3_bu/sectors/agricultural_sector.py b/hermesv3_bu/sectors/agricultural_sector.py index 07a10e9..5cc43e9 100755 --- a/hermesv3_bu/sectors/agricultural_sector.py +++ b/hermesv3_bu/sectors/agricultural_sector.py @@ -133,31 +133,31 @@ class AgriculturalSector(Sector): self.crop_distribution = self.get_crops_by_dst_cell( os.path.join(auxiliary_dir, 'agriculture', 'crops', 'crops.shp')) - self.logger.write_time_log('AgriculturalSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', '__init__', timeit.default_timer() - spent_time) def involved_grid_cells(self, src_shp): spent_time = timeit.default_timer() - grid_shp = IoShapefile(self.comm).split_shapefile(self.grid.shapefile) + grid_shp = IoShapefile(self.__comm).split_shapefile(self.grid.shapefile) src_union = src_shp.to_crs(grid_shp.crs).geometry.unary_union grid_shp = grid_shp.loc[grid_shp.intersects(src_union), :] - grid_shp_list = self.comm.gather(grid_shp, root=0) + grid_shp_list = self.__comm.gather(grid_shp, root=0) animal_dist_list = [] - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: for small_grid in grid_shp_list: animal_dist_list.append(src_shp.loc[src_shp.intersects( small_grid.to_crs(src_shp.crs).geometry.unary_union), :]) grid_shp = pd.concat(grid_shp_list) - grid_shp = np.array_split(grid_shp, self.comm.Get_size()) + grid_shp = np.array_split(grid_shp, self.__comm.Get_size()) else: grid_shp = None animal_dist_list = None - grid_shp = self.comm.scatter(grid_shp, root=0) + grid_shp = self.__comm.scatter(grid_shp, root=0) - animal_dist = self.comm.scatter(animal_dist_list, root=0) + animal_dist = self.__comm.scatter(animal_dist_list, root=0) - self.logger.write_time_log('AgriculturalSector', 'involved_grid_cells', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'involved_grid_cells', timeit.default_timer() - spent_time) return grid_shp, animal_dist @@ -170,7 +170,7 @@ class AgriculturalSector(Sector): day_dict = {} for key, value in zip(days, num_days): day_dict[key] = value - self.logger.write_time_log('AgriculturalSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict def get_crop_from_land_uses(self, crop_from_land_use_path): @@ -207,7 +207,7 @@ class AgriculturalSector(Sector): weights = list(map(float, re.split(' , |, | ,|,| ', element.weight))) crop_dict[element.crop] = list(zip(land_uses, weights)) - self.logger.write_time_log('AgriculturalSector', 'get_crop_from_land_uses', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_crop_from_land_uses', timeit.default_timer() - spent_time) return crop_dict def get_involved_land_uses(self): @@ -225,7 +225,7 @@ class AgriculturalSector(Sector): land_use = int(land_use_and_weight[0]) if land_use not in land_uses_list: land_uses_list.append(land_use) - self.logger.write_time_log('AgriculturalSector', 'get_involved_land_uses', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_involved_land_uses', timeit.default_timer() - spent_time) return land_uses_list @@ -251,9 +251,9 @@ class AgriculturalSector(Sector): land_uses_clipped = IoRaster(self.comm_agr).clip_raster_with_shapefile_poly( self.land_uses_path, self.clip.shapefile, land_uses_clipped, values=land_uses) self.comm_agr.Barrier() - self.logger.write_log('\t\tRaster {0} to_shapefile.'.format(land_uses_clipped), message_level=3) + self.__logger.write_log('\t\tRaster {0} to_shapefile.'.format(land_uses_clipped), message_level=3) land_use_src_by_nut = IoRaster(self.comm_agr).to_shapefile_parallel(land_uses_clipped) - self.logger.write_log('\t\tFiltering shapefile.'.format(land_uses_clipped), message_level=3) + self.__logger.write_log('\t\tFiltering shapefile.'.format(land_uses_clipped), message_level=3) land_use_src_by_nut.rename(columns={'data': 'land_use'}, inplace=True) land_use_src_by_nut['land_use'] = land_use_src_by_nut['land_use'].astype(np.int16) @@ -264,14 +264,14 @@ class AgriculturalSector(Sector): land_use_src_by_nut.set_index('CELL_ID', inplace=True) if write: - self.logger.write_log('\t\tWriting {0} file.'.format(land_use_src_by_nut_path), message_level=3) + self.__logger.write_log('\t\tWriting {0} file.'.format(land_use_src_by_nut_path), message_level=3) IoShapefile(self.comm_agr).write_shapefile_parallel(land_use_src_by_nut.reset_index(), land_use_src_by_nut_path) else: land_use_src_by_nut = IoShapefile(self.comm_agr).read_shapefile_parallel(land_use_src_by_nut_path) land_use_src_by_nut.set_index('CELL_ID', inplace=True) - self.logger.write_time_log('AgriculturalSector', 'get_land_use_src_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_land_use_src_by_nut', timeit.default_timer() - spent_time) return land_use_src_by_nut @@ -293,7 +293,7 @@ class AgriculturalSector(Sector): df['nut_code'] = df['nut_code'].astype(np.int32) df.set_index(['nut_code', 'land_use'], inplace=True) - self.logger.write_time_log('AgriculturalSector', 'get_tot_land_use_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_tot_land_use_by_nut', timeit.default_timer() - spent_time) return df def get_land_use_by_nut_csv(self, land_use_distribution_src_nut, land_uses): @@ -319,7 +319,7 @@ class AgriculturalSector(Sector): land_use_by_nut['area'] += land_use_distribution_src_nut.groupby(['nut_code', 'land_use'])['area'].sum() land_use_by_nut.fillna(0.0, inplace=True) - self.logger.write_time_log('AgriculturalSector', 'get_land_use_by_nut_csv', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_land_use_by_nut_csv', timeit.default_timer() - spent_time) return land_use_by_nut def land_use_to_crop_by_nut(self, land_use_by_nut, nuts=None): @@ -351,7 +351,7 @@ class AgriculturalSector(Sector): aux_df.drop(columns=['land_use'], inplace=True) aux_df.set_index('nut_code', inplace=True) new_df[crop] += aux_df['area'] * weight - self.logger.write_time_log('AgriculturalSector', 'land_use_to_crop_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'land_use_to_crop_by_nut', timeit.default_timer() - spent_time) return new_df @@ -374,7 +374,7 @@ class AgriculturalSector(Sector): for crop in crop_by_nut.columns: crop_share_by_nut[crop] = crop_by_nut[crop] / tot_crop_by_nut[crop] - self.logger.write_time_log('AgriculturalSector', 'get_crop_shape_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_crop_shape_by_nut', timeit.default_timer() - spent_time) return crop_share_by_nut @@ -401,7 +401,7 @@ class AgriculturalSector(Sector): crop_by_nut = crop_by_nut.loc[crop_share_by_nut.index, :] crop_area_by_nut = crop_share_by_nut * crop_by_nut - self.logger.write_time_log('AgriculturalSector', 'get_crop_area_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_crop_area_by_nut', timeit.default_timer() - spent_time) return crop_area_by_nut def calculate_crop_distribution_src(self, crop_area_by_nut, land_use_distribution_src_nut): @@ -436,8 +436,8 @@ class AgriculturalSector(Sector): crop_distribution_src.loc[crop_distribution_src['nut_code'] == nut, crop] *= \ crop_area_by_nut.loc[nut, crop] - self.logger.write_time_log('AgriculturalSector', 'calculate_crop_distribution_src', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'calculate_crop_distribution_src', + timeit.default_timer() - spent_time) crop_distribution_src = IoShapefile(self.comm_agr).balance(crop_distribution_src) return crop_distribution_src @@ -473,8 +473,8 @@ class AgriculturalSector(Sector): crop_distribution.reset_index(inplace=True) crop_distribution.set_index('FID', inplace=True) - self.logger.write_time_log('AgriculturalSector', 'get_crop_distribution_in_dst_cells', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_crop_distribution_in_dst_cells', + timeit.default_timer() - spent_time) return crop_distribution def get_crops_by_dst_cell(self, file_path): @@ -492,15 +492,15 @@ class AgriculturalSector(Sector): """ spent_time = timeit.default_timer() if not os.path.exists(file_path): - self.logger.write_log('Creating the crop distribution shapefile.', message_level=2) + self.__logger.write_log('Creating the crop distribution shapefile.', message_level=2) - self.logger.write_log('\tCreating land use distribution on the source resolution.', message_level=3) + self.__logger.write_log('\tCreating land use distribution on the source resolution.', message_level=3) involved_land_uses = self.get_involved_land_uses() land_use_distribution_src_nut = self.get_land_use_src_by_nut(involved_land_uses, write=False) land_use_by_nut = self.get_land_use_by_nut_csv(land_use_distribution_src_nut, involved_land_uses) - self.logger.write_log('\tCreating the crop distribution on the source resolution.', message_level=3) + self.__logger.write_log('\tCreating the crop distribution on the source resolution.', message_level=3) crop_by_nut = self.land_use_to_crop_by_nut(land_use_by_nut) tot_land_use_by_nut = self.get_tot_land_use_by_nut(involved_land_uses) tot_crop_by_nut = self.land_use_to_crop_by_nut( @@ -510,9 +510,9 @@ class AgriculturalSector(Sector): crop_distribution_src = self.calculate_crop_distribution_src( crop_area_by_nut, land_use_distribution_src_nut) - self.logger.write_log('\tCreating the crop distribution on the grid resolution.', message_level=3) + self.__logger.write_log('\tCreating the crop distribution on the grid resolution.', message_level=3) crop_distribution_dst = self.get_crop_distribution_in_dst_cells(crop_distribution_src) - self.logger.write_log('\tCreating the crop distribution shapefile.', message_level=3) + self.__logger.write_log('\tCreating the crop distribution shapefile.', message_level=3) crop_distribution_dst = IoShapefile(self.comm_agr).gather_shapefile(crop_distribution_dst.reset_index()) if self.comm_agr.Get_rank() == 0: crop_distribution_dst = crop_distribution_dst.groupby('FID').sum() @@ -524,19 +524,19 @@ class AgriculturalSector(Sector): else: crop_distribution_dst = None - self.logger.write_log('\tAdding timezone to the shapefile.', message_level=3) + self.__logger.write_log('\tAdding timezone to the shapefile.', message_level=3) crop_distribution_dst = IoShapefile(self.comm_agr).split_shapefile(crop_distribution_dst) crop_distribution_dst = self.add_timezone(crop_distribution_dst) - self.logger.write_log('\tWriting the crop distribution shapefile.', message_level=3) + self.__logger.write_log('\tWriting the crop distribution shapefile.', message_level=3) IoShapefile(self.comm_agr).write_shapefile_parallel(crop_distribution_dst, file_path) - crop_distribution_dst = IoShapefile(self.comm).read_shapefile_parallel(file_path) + crop_distribution_dst = IoShapefile(self.__comm).read_shapefile_parallel(file_path) crop_distribution_dst.set_index('FID', inplace=True, drop=True) # Filtering crops by used on the sub-sector (operations, fertilizers, machinery) crop_distribution_dst = crop_distribution_dst.loc[:, self.crop_list + ['timezone', 'geometry']] - self.logger.write_time_log('AgriculturalSector', 'get_crops_by_dst_cell', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AgriculturalSector', 'get_crops_by_dst_cell', timeit.default_timer() - spent_time) return crop_distribution_dst @staticmethod diff --git a/hermesv3_bu/sectors/aviation_sector.py b/hermesv3_bu/sectors/aviation_sector.py index b5a6101..98d1b1e 100755 --- a/hermesv3_bu/sectors/aviation_sector.py +++ b/hermesv3_bu/sectors/aviation_sector.py @@ -172,17 +172,17 @@ class AviationSector(Sector): self.trajectory_arrival_distribution = self.calculate_trajectories_distribution( airport_trajectories_shapefile, 'arrival') comm.Barrier() - self.logger.write_time_log('AviationSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', '__init__', timeit.default_timer() - spent_time) def read_ef_files(self, ef_path): - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: ef_files = {} for phase in PHASE_TYPE.keys(): ef_files[phase] = pd.read_csv(os.path.join(ef_path, PHASE_EF_FILE[phase])) else: ef_files = None - ef_files = self.comm.bcast(ef_files, root=0) + ef_files = self.__comm.bcast(ef_files, root=0) return ef_files @@ -219,7 +219,7 @@ class AviationSector(Sector): trajectories.drop(columns=['arrival_f', 'departure_f'], inplace=True) trajectories.set_index(['runway_id', 'operation'], inplace=True) - self.logger.write_time_log('AviationSector', 'read_trajectories_shapefile', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_trajectories_shapefile', timeit.default_timer() - spent_time) return trajectories @@ -234,7 +234,7 @@ class AviationSector(Sector): :rtype: GeoDataFrame, None """ spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: runway_shapefile = gpd.read_file(airport_runways_shapefile_path) runway_shapefile.set_index(['airport_id', 'runway_id'], inplace=True) runway_shapefile = runway_shapefile.loc[self.airport_list_full, :] @@ -243,7 +243,7 @@ class AviationSector(Sector): runway_shapefile.rename(columns={'approach_f': 'arrival_f', 'climbout_f': 'departure_f'}, inplace=True) else: runway_shapefile = None - self.logger.write_time_log('AviationSector', 'read_runway_shapefile', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_runway_shapefile', timeit.default_timer() - spent_time) return runway_shapefile @@ -273,7 +273,7 @@ class AviationSector(Sector): profiles.rename(columns={-1: 'P_hour', -3: "operation", -2: "day_type"}, inplace=True) profiles.set_index(["P_hour", "operation", "day_type"], inplace=True) - self.logger.write_time_log('AviationSector', 'read_hourly_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_hourly_profiles', timeit.default_timer() - spent_time) return profiles @@ -308,8 +308,8 @@ class AviationSector(Sector): operations.set_index(['airport_id', 'plane_id', 'operation'], inplace=True) operations.rename(columns={'1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, '10': 10, '11': 11, '12': 12}, inplace=True) - self.logger.write_time_log('AviationSector', 'read_operations_update_plane_list', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_operations_update_plane_list', + timeit.default_timer() - spent_time) return operations @@ -329,7 +329,7 @@ class AviationSector(Sector): dataframe = dataframe.loc[dataframe['plane_id'].isin(self.plane_list)] dataframe.set_index('plane_id', inplace=True) - self.logger.write_time_log('AviationSector', 'read_planes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_planes', timeit.default_timer() - spent_time) return dataframe @@ -347,7 +347,7 @@ class AviationSector(Sector): dataframe = pd.read_csv(times_path) dataframe = dataframe.loc[dataframe['airport_id'].isin(self.airport_list)] dataframe.set_index(['airport_id', 'plane_type'], inplace=True) - self.logger.write_time_log('AviationSector', 'read_times_info', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'read_times_info', timeit.default_timer() - spent_time) return dataframe @@ -369,7 +369,7 @@ class AviationSector(Sector): :rtype: list """ spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: airport_shapefile = airport_shapefile.reset_index() airport_shapefile = gpd.sjoin(airport_shapefile.to_crs(self.grid.shapefile.crs), self.clip.shapefile.to_crs(self.grid.shapefile.crs), how='inner', @@ -395,8 +395,8 @@ class AviationSector(Sector): # Only for master (rank == 0) self.airport_list_full = new_list - new_list = [new_list[i * len(new_list) // self.comm.size: (i + 1) * len(new_list) // self.comm.size] - for i in range(self.comm.size)] + new_list = [new_list[i * len(new_list) // self.__comm.size: (i + 1) * len(new_list) // self.__comm.size] + for i in range(self.__comm.size)] for sublist in new_list: if len(sublist) == 0: error_exit("The selected number of processors is to high. " + @@ -405,8 +405,8 @@ class AviationSector(Sector): else: new_list = None - new_list = self.comm.scatter(new_list, root=0) - self.logger.write_time_log('AviationSector', 'get_airport_list', timeit.default_timer() - spent_time) + new_list = self.__comm.scatter(new_list, root=0) + self.__logger.write_time_log('AviationSector', 'get_airport_list', timeit.default_timer() - spent_time) return new_list @@ -424,11 +424,11 @@ class AviationSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\t\tCalculating airport distribution', message_level=2) + self.__logger.write_log('\t\tCalculating airport distribution', message_level=2) airport_distribution_path = os.path.join(self.auxiliary_dir, 'aviation', 'airport_distribution.csv') if not os.path.exists(airport_distribution_path): - if self.comm.rank == 0: + if self.__comm.rank == 0: airport_shapefile = airport_shapefile.loc[self.airport_list_full, :].copy() if not os.path.exists(os.path.dirname(airport_distribution_path)): os.makedirs(os.path.dirname(airport_distribution_path)) @@ -445,12 +445,12 @@ class AviationSector(Sector): airport_distribution.to_csv(airport_distribution_path) else: airport_distribution = None - airport_distribution = self.comm.bcast(airport_distribution, root=0) + airport_distribution = self.__comm.bcast(airport_distribution, root=0) else: airport_distribution = pd.read_csv(airport_distribution_path) airport_distribution.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.logger.write_time_log('AviationSector', 'calculate_airport_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'calculate_airport_distribution', + timeit.default_timer() - spent_time) return airport_distribution @@ -482,13 +482,13 @@ class AviationSector(Sector): return df.loc[:, ['{0}_f'.format(phase_type)]] - self.logger.write_log('\t\tCalculating runway distribution for {0}'.format(phase_type), message_level=2) + self.__logger.write_log('\t\tCalculating runway distribution for {0}'.format(phase_type), message_level=2) runway_distribution_path = os.path.join( self.auxiliary_dir, 'aviation', 'runway_{0}_distribution.csv'.format(phase_type)) if not os.path.exists(runway_distribution_path): - if self.comm.rank == 0: + if self.__comm.rank == 0: runway_shapefile['{0}_f'.format(phase_type)] = runway_shapefile.groupby('airport_id').apply(normalize) runway_shapefile.to_crs(self.grid.shapefile.crs, inplace=True) @@ -516,12 +516,12 @@ class AviationSector(Sector): runway_shapefile.to_csv(runway_distribution_path) else: runway_shapefile = None - runway_shapefile = self.comm.bcast(runway_shapefile, root=0) + runway_shapefile = self.__comm.bcast(runway_shapefile, root=0) else: runway_shapefile = pd.read_csv(runway_distribution_path) runway_shapefile.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.logger.write_time_log('AviationSector', 'calculate_runway_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'calculate_runway_distribution', + timeit.default_timer() - spent_time) return runway_shapefile @@ -566,12 +566,12 @@ class AviationSector(Sector): df['fraction'] = df['fraction'] / total_fraction return df.loc[:, ['fraction']] - self.logger.write_log('\t\tCalculating trajectories distribution for {0}'.format(phase_type), message_level=2) + self.__logger.write_log('\t\tCalculating trajectories distribution for {0}'.format(phase_type), message_level=2) trajectories_distribution_path = os.path.join( self.auxiliary_dir, 'aviation', 'trajectories_{0}_distribution.csv'.format(phase_type)) if not os.path.exists(trajectories_distribution_path): - if self.comm.rank == 0: + if self.__comm.rank == 0: if not os.path.exists(os.path.dirname(trajectories_distribution_path)): os.makedirs(os.path.dirname(trajectories_distribution_path)) # Filtering shapefile @@ -625,12 +625,12 @@ class AviationSector(Sector): trajectories_distr.to_csv(trajectories_distribution_path) else: trajectories_distr = None - trajectories_distr = self.comm.bcast(trajectories_distr, root=0) + trajectories_distr = self.__comm.bcast(trajectories_distr, root=0) else: trajectories_distr = pd.read_csv(trajectories_distribution_path) trajectories_distr.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.logger.write_time_log('AviationSector', 'calculate_trajectories_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'calculate_trajectories_distribution', + timeit.default_timer() - spent_time) return trajectories_distr @@ -737,7 +737,7 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.logger.write_time_log('AviationSector', 'get_main_engine_emission', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'get_main_engine_emission', timeit.default_timer() - spent_time) return dataframe @@ -835,8 +835,8 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.logger.write_time_log('AviationSector', 'get_tyre_and_brake_wear_emission', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'get_tyre_and_brake_wear_emission', + timeit.default_timer() - spent_time) return dataframe @@ -938,8 +938,8 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.logger.write_time_log('AviationSector', 'get_auxiliary_power_unit_emission', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'get_auxiliary_power_unit_emission', + timeit.default_timer() - spent_time) return dataframe @@ -966,7 +966,7 @@ class AviationSector(Sector): dataframe[pollutants] = dataframe[pollutants].multiply(dataframe['fraction'], axis=0) dataframe.drop(columns=['airport_id', 'fraction'], inplace=True) dataframe = dataframe.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_time_log('AviationSector', 'distribute', timeit.default_timer() - spent_time) + self.__logger.write_time_log('AviationSector', 'distribute', timeit.default_timer() - spent_time) return dataframe @@ -978,43 +978,43 @@ class AviationSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') taxi_out = self.get_main_engine_emission('taxi_out') - self.logger.write_log('\t\tTaxi out emissions calculated.', message_level=2) + self.__logger.write_log('\t\tTaxi out emissions calculated.', message_level=2) taxi_in = self.get_main_engine_emission('taxi_in') - self.logger.write_log('\t\tTaxi in emissions calculated.', message_level=2) + self.__logger.write_log('\t\tTaxi in emissions calculated.', message_level=2) takeoff = self.get_main_engine_emission('takeoff') - self.logger.write_log('\t\tTake off emissions calculated.', message_level=2) + self.__logger.write_log('\t\tTake off emissions calculated.', message_level=2) climbout = self.get_main_engine_emission('climbout') - self.logger.write_log('\t\tClimb out emissions calculated.', message_level=2) + self.__logger.write_log('\t\tClimb out emissions calculated.', message_level=2) approach = self.get_main_engine_emission('approach') - self.logger.write_log('\t\tApproach emissions calculated.', message_level=2) + self.__logger.write_log('\t\tApproach emissions calculated.', message_level=2) landing = self.get_main_engine_emission('landing') - self.logger.write_log('\t\tLanding emissions calculated.', message_level=2) + self.__logger.write_log('\t\tLanding emissions calculated.', message_level=2) landing_wear = self.get_tyre_and_brake_wear_emission('landing_wear') - self.logger.write_log('\t\tLanding wear emissions calculated.', message_level=2) + self.__logger.write_log('\t\tLanding wear emissions calculated.', message_level=2) post_taxi_in = self.get_auxiliary_power_unit_emission('post-taxi_in') - self.logger.write_log('\t\tPost taxi in emissions calculated.', message_level=2) + self.__logger.write_log('\t\tPost taxi in emissions calculated.', message_level=2) pre_taxi_out = self.get_auxiliary_power_unit_emission('pre-taxi_out') - self.logger.write_log('\t\tPre taxi out emissions calculated.', message_level=2) + self.__logger.write_log('\t\tPre taxi out emissions calculated.', message_level=2) airport_emissions = pd.concat([pre_taxi_out, taxi_out, taxi_in, post_taxi_in]) airport_emissions = airport_emissions.groupby(['airport_id', 'tstep']).sum() airport_emissions = self.distribute(airport_emissions, self.airport_distribution) - self.logger.write_log('\t\tAirport emissions distributed (pre_taxi_out, taxi_out, taxi_in, post_taxi_in)', - message_level=2) + self.__logger.write_log('\t\tAirport emissions distributed (pre_taxi_out, taxi_out, taxi_in, post_taxi_in)', + message_level=2) runway_departure_emissions = self.distribute(takeoff, self.runway_departure_distribution) runway_arrival_emissions = self.distribute(landing, self.runway_arrival_distribution) runway_arrival_emissions_wear = self.distribute(landing_wear, self.runway_arrival_distribution) - self.logger.write_log('\t\tRunway emissions distributed (takeoff, landing, landing_wear)', message_level=2) + self.__logger.write_log('\t\tRunway emissions distributed (takeoff, landing, landing_wear)', message_level=2) trajectory_arrival_emissions = self.distribute(approach, self.trajectory_arrival_distribution) trajectory_departure_emisions = self.distribute(climbout, self.trajectory_departure_distribution) - self.logger.write_log('\t\tTrajectory emissions distributed (approach, climb out)', message_level=2) + self.__logger.write_log('\t\tTrajectory emissions distributed (approach, climb out)', message_level=2) emissions = pd.concat([airport_emissions, runway_departure_emissions, trajectory_arrival_emissions, trajectory_departure_emisions, runway_arrival_emissions], sort=False) @@ -1036,6 +1036,6 @@ class AviationSector(Sector): # From kmol/h or kg/h to mol/h or g/h emissions = emissions * 1000 - self.logger.write_log('\t\tAviation emissions calculated', message_level=2) - self.logger.write_time_log('AviationSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tAviation emissions calculated', message_level=2) + self.__logger.write_time_log('AviationSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/livestock_sector.py b/hermesv3_bu/sectors/livestock_sector.py index 51c4c69..2e2fc21 100755 --- a/hermesv3_bu/sectors/livestock_sector.py +++ b/hermesv3_bu/sectors/livestock_sector.py @@ -191,7 +191,7 @@ class LivestockSector(Sector): # Creating dst resolution shapefile with the amount of animals self.animals_df = self.create_animals_distribution(gridded_livestock_path, nut_shapefile_path, correction_split_factors_path) - self.logger.write_time_log('LivestockSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', '__init__', timeit.default_timer() - spent_time) def create_animals_distribution(self, gridded_livestock_path, nut_shapefile_path, correction_split_factors_path): """ @@ -224,15 +224,15 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCreating animal distribution', message_level=2) + self.__logger.write_log('\tCreating animal distribution', message_level=2) animals_df = self.create_animals_shapefile(gridded_livestock_path) animals_df = self.animal_distribution_by_category(animals_df, nut_shapefile_path, correction_split_factors_path) - self.logger.write_log('Animal distribution done', message_level=2) - self.logger.write_time_log('LivestockSector', 'create_animals_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_log('Animal distribution done', message_level=2) + self.__logger.write_time_log('LivestockSector', 'create_animals_distribution', + timeit.default_timer() - spent_time) return animals_df @@ -250,7 +250,7 @@ class LivestockSector(Sector): day_dict = {} for key, value in zip(days, num_days): day_dict[key] = value - self.logger.write_time_log('LivestockSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict @@ -275,27 +275,27 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\t\tCreating animal shapefile into source resolution', message_level=3) + self.__logger.write_log('\t\tCreating animal shapefile into source resolution', message_level=3) animal_distribution = None # For each one of the animals of the animal list for animal in self.animal_list: - self.logger.write_log('\t\t\t {0}'.format(animal), message_level=3) + self.__logger.write_log('\t\t\t {0}'.format(animal), message_level=3) # Each one of the animal distributions will be stored separately animal_distribution_path = os.path.join(self.auxiliary_dir, 'livestock', animal, '{0}.shp'.format(animal)) if not os.path.exists(animal_distribution_path): # Create clipped raster file clipped_raster_path = os.path.join( self.auxiliary_dir, 'livestock', animal, '{0}_clip.tif'.format(animal)) - if self.comm.Get_rank() == 0: - clipped_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + if self.__comm.Get_rank() == 0: + clipped_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( gridded_livestock_path.replace('', animal), self.clip.shapefile, clipped_raster_path) - animal_df = IoRaster(self.comm).to_shapefile_parallel(clipped_raster_path) + animal_df = IoRaster(self.__comm).to_shapefile_parallel(clipped_raster_path) animal_df.rename(columns={'data': animal}, inplace=True) animal_df.set_index('CELL_ID', inplace=True) - IoShapefile(self.comm).write_shapefile_parallel(animal_df.reset_index(), animal_distribution_path) + IoShapefile(self.__comm).write_shapefile_parallel(animal_df.reset_index(), animal_distribution_path) else: - animal_df = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) + animal_df = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) animal_df.set_index('CELL_ID', inplace=True) # Creating full animal shapefile @@ -311,8 +311,8 @@ class LivestockSector(Sector): # Removing empty data animal_distribution = animal_distribution.loc[(animal_distribution[self.animal_list] != 0).any(axis=1), :] - self.logger.write_time_log('LivestockSector', 'create_animals_shapefile_src_resolution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'create_animals_shapefile_src_resolution', + timeit.default_timer() - spent_time) return animal_distribution def animals_shapefile_to_dst_resolution(self, animal_distribution): @@ -326,10 +326,10 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\t\tCreating animal shapefile into destiny resolution', message_level=3) + self.__logger.write_log('\t\tCreating animal shapefile into destiny resolution', message_level=3) self.grid.shapefile.reset_index(inplace=True) - animal_distribution = IoShapefile(self.comm).balance(animal_distribution) + animal_distribution = IoShapefile(self.__comm).balance(animal_distribution) # Changing coordinates system to the grid one animal_distribution.to_crs(self.grid.shapefile.crs, inplace=True) # Getting src area @@ -347,8 +347,8 @@ class LivestockSector(Sector): # Sum by destiny cell animal_distribution = animal_distribution.loc[:, self.animal_list + ['FID']].groupby('FID').sum() - animal_distribution = IoShapefile(self.comm).gather_shapefile(animal_distribution.reset_index()) - if self.comm.Get_rank() == 0: + animal_distribution = IoShapefile(self.__comm).gather_shapefile(animal_distribution.reset_index()) + if self.__comm.Get_rank() == 0: animal_distribution = animal_distribution.groupby('FID').sum() # Adding geometry and coordinates system from the destiny grid shapefile animal_distribution = gpd.GeoDataFrame( @@ -357,9 +357,9 @@ class LivestockSector(Sector): else: animal_distribution = None - animal_distribution = IoShapefile(self.comm).split_shapefile(animal_distribution) - self.logger.write_time_log('LivestockSector', 'animals_shapefile_to_dst_resolution', - timeit.default_timer() - spent_time) + animal_distribution = IoShapefile(self.__comm).split_shapefile(animal_distribution) + self.__logger.write_time_log('LivestockSector', 'animals_shapefile_to_dst_resolution', + timeit.default_timer() - spent_time) return animal_distribution @@ -384,11 +384,11 @@ class LivestockSector(Sector): if not os.path.exists(animal_distribution_path): dataframe = self.create_animals_shapefile_src_resolution(gridded_livestock_path) dataframe = self.animals_shapefile_to_dst_resolution(dataframe) - IoShapefile(self.comm).write_shapefile_parallel(dataframe.reset_index(), animal_distribution_path) + IoShapefile(self.__comm).write_shapefile_parallel(dataframe.reset_index(), animal_distribution_path) else: - dataframe = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) + dataframe = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) dataframe.set_index('FID', inplace=True) - self.logger.write_time_log('LivestockSector', 'create_animals_shapefile', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'create_animals_shapefile', timeit.default_timer() - spent_time) return dataframe @@ -428,7 +428,7 @@ class LivestockSector(Sector): splitting_factors.reset_index(inplace=True) splitting_factors['nuts3_id'] = splitting_factors['nuts3_id'].astype(np.int16) - self.logger.write_time_log('LivestockSector', 'get_splitting_factors', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'get_splitting_factors', timeit.default_timer() - spent_time) return splitting_factors @@ -467,7 +467,7 @@ class LivestockSector(Sector): nut_value='nuts3_id') animal_distribution.rename(columns={'nut_code': 'nuts3_id'}, inplace=True) animal_distribution = animal_distribution[animal_distribution['nuts3_id'] != -999] - animal_distribution = IoShapefile(self.comm).balance(animal_distribution) + animal_distribution = IoShapefile(self.__comm).balance(animal_distribution) animal_distribution.set_index('FID', inplace=True) splitting_factors = self.get_splitting_factors(correction_split_factors_path) @@ -487,12 +487,12 @@ class LivestockSector(Sector): animal_distribution = self.add_timezone(animal_distribution) animal_distribution.set_index('FID', inplace=True) - IoShapefile(self.comm).write_shapefile_parallel(animal_distribution.reset_index(), animal_distribution_path) + IoShapefile(self.__comm).write_shapefile_parallel(animal_distribution.reset_index(), animal_distribution_path) else: - animal_distribution = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) + animal_distribution = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) animal_distribution.set_index('FID', inplace=True) - self.logger.write_time_log('LivestockSector', 'animal_distribution_by_category', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'animal_distribution_by_category', + timeit.default_timer() - spent_time) return animal_distribution @@ -532,26 +532,26 @@ class LivestockSector(Sector): geometry_shp.drop(columns='geometry', inplace=True) # Extracting temperature - meteo = IoNetcdf(self.comm).get_data_from_netcdf( + meteo = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['temperature_dir'], 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) meteo['tas'] = meteo['tas'] - 273.15 # From Celsius to Kelvin degrees # Extracting wind speed - meteo['sfcWind'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo['sfcWind'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['wind_speed_dir'], 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'sfcWind', 'daily', day, geometry_shp).loc[:, 'sfcWind'] # Extracting denominators already calculated for all the emission types - meteo['D_grassing'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo['D_grassing'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'grassing_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_housing_closed'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo['D_housing_closed'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'housing_closed_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_housing_open'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo['D_housing_open'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'housing_open_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_storage'] = IoNetcdf(self.comm).get_data_from_netcdf( + meteo['D_storage'] = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'storage_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] @@ -581,7 +581,7 @@ class LivestockSector(Sector): meteo.loc[:, 'FD_grassing'].multiply((1 / (SIGMA * math.sqrt(2 * math.pi))) * math.exp( (float(int(day.strftime('%j')) - TAU) ** 2) / (-2 * (SIGMA ** 2)))) - self.logger.write_time_log('LivestockSector', 'get_daily_factors', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'get_daily_factors', timeit.default_timer() - spent_time) return meteo.loc[:, ['REC', 'FD_housing_open', 'FD_housing_closed', 'FD_storage', 'FD_grassing', 'geometry']] @@ -624,7 +624,7 @@ class LivestockSector(Sector): new_df['EF_storage'] = new_df['Estorage_sd_l'] + new_df['Estorage_sl_l'] new_df.drop(['Estorage_sd_l', 'Estorage_sl_l'], axis=1, inplace=True) - self.logger.write_time_log('LivestockSector', 'get_nh3_ef', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'get_nh3_ef', timeit.default_timer() - spent_time) return new_df @@ -661,7 +661,7 @@ class LivestockSector(Sector): new_df['EF_storage'] = new_df['Estorage_sd_l'] + new_df['Estorage_sl_l'] new_df.drop(['Estorage_sd_l', 'Estorage_sl_l'], axis=1, inplace=True) - self.logger.write_time_log('LivestockSector', 'get_nox_no_ef', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'get_nox_no_ef', timeit.default_timer() - spent_time) return new_df @@ -696,8 +696,8 @@ class LivestockSector(Sector): animals_df.drop(columns=['centroid', 'REC', 'geometry_y'], axis=1, inplace=True) animals_df.rename(columns={'geometry_x': 'geometry'}, inplace=True) - self.logger.write_time_log('LivestockSector', 'add_daily_factors_to_animal_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'add_daily_factors_to_animal_distribution', + timeit.default_timer() - spent_time) return animals_df @@ -732,7 +732,7 @@ class LivestockSector(Sector): # get_list out_pollutants from speciation map -> NH3 out_pollutants = self.get_output_pollutants('nh3') for out_p in out_pollutants: - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -771,7 +771,7 @@ class LivestockSector(Sector): # get_list out_pollutants from speciation map -> PAR, OLE, TOL ... (15 species) out_pollutants = self.get_output_pollutants('nmvoc') for out_p in out_pollutants: - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -801,7 +801,7 @@ class LivestockSector(Sector): if 'pm10' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('pm10') for out_p in out_pollutants: - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -853,7 +853,7 @@ class LivestockSector(Sector): if 'pm25' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('pm25') for out_p in out_pollutants: - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -905,7 +905,7 @@ class LivestockSector(Sector): if 'nox_no' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('nox_no') for out_p in out_pollutants: - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -924,7 +924,7 @@ class LivestockSector(Sector): # ===== PMC ===== if 'pmc' in [x.lower() for x in self.speciation_map.keys()]: pmc_name = 'PMC' - self.logger.write_log('\t\t\tCalculating {0} emissions'.format(pmc_name), message_level=3) + self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(pmc_name), message_level=3) if all(x in [x.lower() for x in self.source_pollutants] for x in ['pm10', 'pm25']): if pmc_name not in self.output_pollutants: self.output_pollutants.append(pmc_name) @@ -937,9 +937,9 @@ class LivestockSector(Sector): not_pollutants = [poll for poll in self.source_pollutants if poll not in ['nh3', 'nox_no', 'nh3', 'nmvoc', 'pm10', 'pm25']] if len(not_pollutants) > 0: - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: warn('The pollutants {0} cannot be calculated on the Livestock sector'.format(not_pollutants)) - self.logger.write_time_log('LivestockSector', 'calculate_day_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'calculate_day_emissions', timeit.default_timer() - spent_time) return out_df @@ -962,8 +962,8 @@ class LivestockSector(Sector): daily_emissions = {} for day in self.day_dict.keys(): daily_emissions[day] = self.calculate_day_emissions(animals_df, day) - self.logger.write_time_log('LivestockSector', 'calculate_daily_emissions_dict', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'calculate_daily_emissions_dict', + timeit.default_timer() - spent_time) return daily_emissions @@ -988,7 +988,7 @@ class LivestockSector(Sector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.logger.write_time_log('LivestockSector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -1020,8 +1020,8 @@ class LivestockSector(Sector): distribution['hour'] = distribution['date'].dt.hour for out_p in self.output_pollutants: - self.logger.write_log('\t\t\tDistributing {0} emissions to hourly resolution'.format(out_p), - message_level=3) + self.__logger.write_log('\t\t\tDistributing {0} emissions to hourly resolution'.format(out_p), + message_level=3) if out_p.lower() == 'pmc': in_p = 'pmc' else: @@ -1046,8 +1046,8 @@ class LivestockSector(Sector): distribution['date'] = distribution['date_utc'] distribution.drop(columns=['hour', 'date_utc'], axis=1, inplace=True) - self.logger.write_time_log('LivestockSector', 'calculate_hourly_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('LivestockSector', 'calculate_hourly_distribution', + timeit.default_timer() - spent_time) return distribution @@ -1059,18 +1059,18 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') - self.logger.write_log('\t\tCalculating Daily emissions', message_level=2) + self.__logger.write_log('\t\tCalculating Daily emissions', message_level=2) df_by_day = self.calculate_daily_emissions_dict(self.animals_df) - self.logger.write_log('\t\tCalculating hourly emissions', message_level=2) + self.__logger.write_log('\t\tCalculating hourly emissions', message_level=2) animals_df = self.calculate_hourly_distribution(df_by_day) animals_df.drop(columns=['geometry'], inplace=True) animals_df['layer'] = 0 animals_df = animals_df.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_log('\t\tLivestock emissions calculated', message_level=2) - self.logger.write_time_log('LivestockSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tLivestock emissions calculated', message_level=2) + self.__logger.write_time_log('LivestockSector', 'calculate_emissions', timeit.default_timer() - spent_time) return animals_df diff --git a/hermesv3_bu/sectors/point_source_sector.py b/hermesv3_bu/sectors/point_source_sector.py index 2b7d1b0..6ee451a 100755 --- a/hermesv3_bu/sectors/point_source_sector.py +++ b/hermesv3_bu/sectors/point_source_sector.py @@ -69,7 +69,7 @@ class PointSourceSector(Sector): self.measured_path = measured_emission_path self.plume_rise_pahts = plume_rise_pahts - self.logger.write_time_log('PointSourceSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', '__init__', timeit.default_timer() - spent_time) def check_catalog(self): # Checking monthly profiles IDs @@ -116,7 +116,7 @@ class PointSourceSector(Sector): """ spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: if self.plume_rise: columns = {"Code": np.str, "Cons": np.bool, "SNAP": np.str, "Lon": np.float64, "Lat": np.float64, "Height": np.float64, "Diameter": np.float64, @@ -151,9 +151,9 @@ class PointSourceSector(Sector): else: catalog_df = None - self.comm.Barrier() - catalog_df = IoShapefile(self.comm).split_shapefile(catalog_df) - self.logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) + self.__comm.Barrier() + catalog_df = IoShapefile(self.__comm).split_shapefile(catalog_df) + self.__logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) return catalog_df def read_catalog_shapefile(self, catalog_path, sector_list): @@ -171,7 +171,7 @@ class PointSourceSector(Sector): """ spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: if self.plume_rise: columns = {"Code": np.str, "Cons": np.bool, "SNAP": np.str, "Height": np.float64, "Diameter": np.float64, "Speed": np.float64, "Temp": np.float64, "AF": np.float64, @@ -208,9 +208,9 @@ class PointSourceSector(Sector): else: catalog_df = None - self.comm.Barrier() - catalog_df = IoShapefile(self.comm).split_shapefile(catalog_df) - self.logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) + self.__comm.Barrier() + catalog_df = IoShapefile(self.__comm).split_shapefile(catalog_df) + self.__logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) return catalog_df def read_catalog_for_measured_emissions_csv(self, catalog_path, sector_list): @@ -252,8 +252,8 @@ class PointSourceSector(Sector): catalog_df = catalog_df.loc[catalog_df['SNAP'].str[:2].isin(sector_list)] catalog_df.drop('SNAP', axis=1, inplace=True) - self.logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', + timeit.default_timer() - spent_time) return catalog_df def read_catalog_for_measured_emissions(self, catalog_path, sector_list): @@ -302,8 +302,8 @@ class PointSourceSector(Sector): catalog_df = catalog_df.loc[catalog_df['SNAP'].str[:2].isin(sector_list)] catalog_df.drop('SNAP', axis=1, inplace=True) - self.logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', + timeit.default_timer() - spent_time) return catalog_df def to_geodataframe(self, catalog): @@ -323,7 +323,7 @@ class PointSourceSector(Sector): catalog.drop(['Lon', 'Lat'], axis=1, inplace=True) crs = {'init': 'epsg:4326'} catalog = gpd.GeoDataFrame(catalog, crs=crs, geometry=geometry) - self.logger.write_time_log('PointSourceSector', 'to_geodataframe', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'to_geodataframe', timeit.default_timer() - spent_time) return catalog def get_yearly_emissions(self, catalog): @@ -338,13 +338,13 @@ class PointSourceSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating yearly emissions', message_level=2) + self.__logger.write_log('\tCalculating yearly emissions', message_level=2) for pollutant in self.source_pollutants: catalog.rename(columns={u'EF_{0}'.format(pollutant): pollutant}, inplace=True) catalog[pollutant] = catalog[pollutant] * catalog['AF'] catalog.drop('AF', axis=1, inplace=True) - self.logger.write_time_log('PointSourceSector', 'get_yearly_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'get_yearly_emissions', timeit.default_timer() - spent_time) return catalog def get_temporal_factors(self, catalog): @@ -359,7 +359,7 @@ class PointSourceSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating hourly emissions', message_level=2) + self.__logger.write_log('\tCalculating hourly emissions', message_level=2) def get_mf(df): month_factor = self.monthly_profiles.loc[df.name[1], df.name[0]] @@ -396,7 +396,7 @@ class PointSourceSector(Sector): catalog.drop('temp_factor', axis=1, inplace=True) - self.logger.write_time_log('PointSourceSector', 'get_temporal_factors', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'get_temporal_factors', timeit.default_timer() - spent_time) return catalog def calculate_hourly_emissions(self, catalog): @@ -415,8 +415,8 @@ class PointSourceSector(Sector): catalog = self.get_temporal_factors(catalog) catalog.set_index(['Code', 'tstep'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return catalog def get_meteo_xy(self, dataframe, netcdf_path): @@ -461,7 +461,7 @@ class PointSourceSector(Sector): dataframe['X'] = nc_dataframe.loc[dataframe['meteo_index'], 'X'].values dataframe['Y'] = nc_dataframe.loc[dataframe['meteo_index'], 'Y'].values - self.logger.write_time_log('PointSourceSector', 'get_meteo_xy', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'get_meteo_xy', timeit.default_timer() - spent_time) return dataframe[['X', 'Y']] def get_plumerise_meteo(self, catalog): @@ -675,42 +675,42 @@ class PointSourceSector(Sector): # ===== 3D Meteo variables ===== # Adding stc_temp - self.logger.write_log('\t\tGetting temperature from {0}'.format(self.plume_rise_pahts['temperature_sfc_dir']), - message_level=3) + self.__logger.write_log('\t\tGetting temperature from {0}'.format(self.plume_rise_pahts['temperature_sfc_dir']), + message_level=3) catalog['temp_sfc'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['temperature_sfc_dir'], 't2')) - self.logger.write_log('\t\tGetting friction velocity from {0}'.format( + self.__logger.write_log('\t\tGetting friction velocity from {0}'.format( self.plume_rise_pahts['friction_velocity_dir']), message_level=3) catalog['friction_v'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['friction_velocity_dir'], 'ustar')) - self.logger.write_log('\t\tGetting PBL height from {0}'.format( + self.__logger.write_log('\t\tGetting PBL height from {0}'.format( self.plume_rise_pahts['pblh_dir']), message_level=3) catalog['pbl'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['pblh_dir'], 'mixed_layer_height')) - self.logger.write_log('\t\tGetting obukhov length from {0}'.format( + self.__logger.write_log('\t\tGetting obukhov length from {0}'.format( self.plume_rise_pahts['obukhov_length_dir']), message_level=3) catalog['obukhov_len'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['obukhov_length_dir'], 'rmol')) catalog['obukhov_len'] = 1. / catalog['obukhov_len'] - self.logger.write_log('\t\tGetting layer thickness from {0}'.format( + self.__logger.write_log('\t\tGetting layer thickness from {0}'.format( self.plume_rise_pahts['layer_thickness_dir']), message_level=3) catalog['layers'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_layers(x, self.plume_rise_pahts['layer_thickness_dir'], 'layer_thickness')) - self.logger.write_log('\t\tGetting temperatue at the top from {0}'.format( + self.__logger.write_log('\t\tGetting temperatue at the top from {0}'.format( self.plume_rise_pahts['temperature_4d_dir']), message_level=3) catalog['temp_top'] = catalog.groupby('date_utc')['X', 'Y', 'Height', 'layers', 'temp_sfc'].apply( lambda x: get_temp_top(x, self.plume_rise_pahts['temperature_4d_dir'], 't')) - self.logger.write_log('\t\tGetting wind speed at 10 m', message_level=3) + self.__logger.write_log('\t\tGetting wind speed at 10 m', message_level=3) catalog['wSpeed_10'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_wind_speed_10m(x, self.plume_rise_pahts['u10_wind_speed_dir'], self.plume_rise_pahts['v10_wind_speed_dir'], 'u10', 'v10')) - self.logger.write_log('\t\tGetting wind speed at the top', message_level=3) + self.__logger.write_log('\t\tGetting wind speed at the top', message_level=3) catalog['wSpeed_top'] = catalog.groupby('date_utc')['X', 'Y', 'Height', 'layers', 'wSpeed_10'].apply( lambda x: get_wind_speed_top(x, self.plume_rise_pahts['u_wind_speed_4d_dir'], self.plume_rise_pahts['v_wind_speed_4d_dir'], 'u', 'v')) catalog.drop(columns=['wSpeed_10', 'layers', 'X', 'Y'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'get_plumerise_meteo', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'get_plumerise_meteo', timeit.default_timer() - spent_time) return catalog def get_plume_rise_top_bot(self, catalog): @@ -755,7 +755,7 @@ class PointSourceSector(Sector): catalog.drop(columns=['Height', 'Diameter', 'Speed', 'Temp', 'date_utc', 'temp_sfc', 'friction_v', 'pbl', 'obukhov_len', 'temp_top', 'wSpeed_top', 'Fb', 'S', 'Ah'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'get_plume_rise_top_bot', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'get_plume_rise_top_bot', timeit.default_timer() - spent_time) return catalog def set_layer(self, catalog): @@ -809,7 +809,7 @@ class PointSourceSector(Sector): new_catalog = catalog_by_layer[~catalog_by_layer.index.duplicated(keep='first')] new_catalog[self.source_pollutants] = catalog_by_layer.groupby(['Code', 'tstep', 'layer'])[ self.source_pollutants].sum() - self.logger.write_time_log('PointSourceSector', 'set_layer', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'set_layer', timeit.default_timer() - spent_time) return new_catalog def calculate_vertical_distribution(self, catalog): @@ -838,8 +838,8 @@ class PointSourceSector(Sector): catalog.reset_index(inplace=True) catalog.set_index(['Code', 'tstep', 'layer'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'calculate_vertical_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'calculate_vertical_distribution', + timeit.default_timer() - spent_time) return catalog def add_measured_emissions(self, catalog): @@ -871,7 +871,7 @@ class PointSourceSector(Sector): for pollutant in self.source_pollutants: catalog[pollutant] = catalog.groupby('Code')['date'].apply(lambda x: func(x, pollutant)) - self.logger.write_time_log('PointSourceSector', 'add_measured_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'add_measured_emissions', timeit.default_timer() - spent_time) return catalog def calculate_measured_emissions(self, catalog): @@ -885,8 +885,8 @@ class PointSourceSector(Sector): catalog = self.add_measured_emissions(catalog) catalog.set_index(['Code', 'tstep'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'calculate_measured_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'calculate_measured_emissions', + timeit.default_timer() - spent_time) return catalog def merge_catalogs(self, catalog_list): @@ -894,19 +894,19 @@ class PointSourceSector(Sector): catalog = pd.concat(catalog_list).reset_index() catalog.set_index(['Code', 'tstep'], inplace=True) - self.logger.write_time_log('PointSourceSector', 'merge_catalogs', timeit.default_timer() - spent_time) + self.__logger.write_time_log('PointSourceSector', 'merge_catalogs', timeit.default_timer() - spent_time) return catalog def speciate(self, dataframe, code='default'): spent_time = timeit.default_timer() - self.logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) + self.__logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) new_dataframe = gpd.GeoDataFrame(index=dataframe.index, data=None, crs=dataframe.crs, geometry=dataframe.geometry) for out_pollutant in self.output_pollutants: input_pollutant = self.speciation_map[out_pollutant] if input_pollutant == 'nmvoc' and input_pollutant in dataframe.columns.values: - self.logger.write_log("\t\t\t{0} = {4}*({1}/{2})*{3}".format( + self.__logger.write_log("\t\t\t{0} = {4}*({1}/{2})*{3}".format( out_pollutant, input_pollutant, self.molecular_weights[input_pollutant], self.speciation_profile.loc[code, out_pollutant], self.speciation_profile.loc[code, 'VOCtoTOG']), message_level=3) @@ -916,7 +916,7 @@ class PointSourceSector(Sector): self.molecular_weights[input_pollutant]) * self.speciation_profile.loc[code, out_pollutant] else: if out_pollutant != 'PMC': - self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, input_pollutant, self.molecular_weights[input_pollutant], self.speciation_profile.loc[code, out_pollutant]), message_level=3) @@ -925,7 +925,7 @@ class PointSourceSector(Sector): self.molecular_weights[input_pollutant]) * \ self.speciation_profile.loc[code, out_pollutant] else: - self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc[code, out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) @@ -934,7 +934,7 @@ class PointSourceSector(Sector): ((dataframe['pm10'] / self.molecular_weights['pm10']) - (dataframe['pm25'] / self.molecular_weights['pm25'])) * \ self.speciation_profile.loc[code, out_pollutant] - self.logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def point_source_to_fid(self, catalog): @@ -957,12 +957,12 @@ class PointSourceSector(Sector): def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') emissions = self.add_dates(self.catalog, drop_utc=False) emissions = self.calculate_hourly_emissions(emissions) - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: emissions_measured = self.calculate_measured_emissions(self.catalog_measured) else: emissions_measured = None @@ -980,7 +980,7 @@ class PointSourceSector(Sector): # From kmol/h or kg/h to mol/h or g/h emissions = emissions.mul(1000.0) - self.logger.write_log('\t\tPoint sources emissions calculated', message_level=2) - self.logger.write_time_log('PointSourceSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tPoint sources emissions calculated', message_level=2) + self.__logger.write_time_log('PointSourceSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/recreational_boats_sector.py b/hermesv3_bu/sectors/recreational_boats_sector.py index 4b7c5c7..205cd11 100755 --- a/hermesv3_bu/sectors/recreational_boats_sector.py +++ b/hermesv3_bu/sectors/recreational_boats_sector.py @@ -35,14 +35,14 @@ class RecreationalBoatsSector(Sector): self.boats_data_path = boats_data_path self.ef_file_path = ef_file_path - self.logger.write_time_log('RecreationalBoatsSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', '__init__', timeit.default_timer() - spent_time) def create_density_map(self, density_map_path): spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: density_map_auxpath = os.path.join(self.auxiliary_dir, 'recreational_boats', 'density_map.shp') if not os.path.exists(density_map_auxpath): - src_density_map = IoRaster(self.comm).to_shapefile_serie(density_map_path, nodata=0) + src_density_map = IoRaster(self.__comm).to_shapefile_serie(density_map_path, nodata=0) src_density_map = src_density_map.loc[src_density_map['data'] > 0] src_density_map['data'] = src_density_map['data'] / src_density_map['data'].sum() src_density_map.to_crs(self.grid.shapefile.crs, inplace=True) @@ -59,15 +59,15 @@ class RecreationalBoatsSector(Sector): geometry=self.grid.shapefile.loc[src_density_map.index, 'geometry']) src_density_map.reset_index(inplace=True) - IoShapefile(self.comm).write_shapefile_serial(src_density_map, density_map_auxpath) + IoShapefile(self.__comm).write_shapefile_serial(src_density_map, density_map_auxpath) else: - src_density_map = IoShapefile(self.comm).read_shapefile_serial(density_map_auxpath) + src_density_map = IoShapefile(self.__comm).read_shapefile_serial(density_map_auxpath) else: src_density_map = None - src_density_map = IoShapefile(self.comm).split_shapefile(src_density_map) + src_density_map = IoShapefile(self.__comm).split_shapefile(src_density_map) src_density_map.set_index('FID', inplace=True) - self.logger.write_time_log('RecreationalBoatsSector', 'create_density_map', timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'create_density_map', timeit.default_timer() - spent_time) return src_density_map def speciate_dict(self, annual_emissions_dict): @@ -76,7 +76,7 @@ class RecreationalBoatsSector(Sector): speciated_emissions = {} for out_pollutant in self.output_pollutants: if out_pollutant != 'PMC': - self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, self.speciation_map[out_pollutant], self.molecular_weights[self.speciation_map[out_pollutant]], self.speciation_profile.loc['default', out_pollutant]), message_level=3) @@ -85,14 +85,14 @@ class RecreationalBoatsSector(Sector): self.molecular_weights[self.speciation_map[out_pollutant]] ) * self.speciation_profile.loc['default', out_pollutant] else: - self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc['default', out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) speciated_emissions[out_pollutant] = ((annual_emissions_dict['pm10'] / self.molecular_weights['pm10']) - (annual_emissions_dict['pm25'] / self.molecular_weights['pm25']) ) * self.speciation_profile.loc['default', out_pollutant] - self.logger.write_time_log('RecreationalBoatsSector', 'speciate_dict', timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'speciate_dict', timeit.default_timer() - spent_time) return speciated_emissions def get_annual_emissions(self): @@ -108,8 +108,8 @@ class RecreationalBoatsSector(Sector): for in_p in self.source_pollutants: emissions_dict[in_p] = dataframe['AF'].multiply(dataframe['EF_{0}'.format(in_p)]).sum() - self.logger.write_time_log('RecreationalBoatsSector', 'get_annual_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'get_annual_emissions', + timeit.default_timer() - spent_time) return emissions_dict def calculate_yearly_emissions(self, annual_emissions): @@ -121,8 +121,8 @@ class RecreationalBoatsSector(Sector): for pollutant, annual_value in annual_emissions.items(): new_dataframe[pollutant] = self.density_map['data'] * annual_value - self.logger.write_time_log('RecreationalBoatsSector', 'calculate_yearly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_yearly_emissions', + timeit.default_timer() - spent_time) return new_dataframe def dates_to_month_weekday_hour(self, dataframe): @@ -131,8 +131,8 @@ class RecreationalBoatsSector(Sector): dataframe['weekday'] = dataframe['date'].dt.weekday dataframe['hour'] = dataframe['date'].dt.hour - self.logger.write_time_log('RecreationalBoatsSector', 'dates_to_month_weekday_hour', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'dates_to_month_weekday_hour', + timeit.default_timer() - spent_time) return dataframe def calculate_hourly_emissions(self, annual_distribution): @@ -174,13 +174,13 @@ class RecreationalBoatsSector(Sector): dataframe[self.output_pollutants] = dataframe[self.output_pollutants].mul(dataframe['HF'], axis=0) dataframe.drop(columns=['hour', 'HF'], inplace=True) - self.logger.write_time_log('RecreationalBoatsSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return dataframe def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') annual_emissions = self.get_annual_emissions() annual_emissions = self.speciate_dict(annual_emissions) @@ -190,7 +190,7 @@ class RecreationalBoatsSector(Sector): distribution.drop(columns=['geometry'], inplace=True) distribution['layer'] = 0 distribution.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.logger.write_log('\t\tRecreational boats emissions calculated', message_level=2) - self.logger.write_time_log('RecreationalBoatsSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tRecreational boats emissions calculated', message_level=2) + self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return distribution diff --git a/hermesv3_bu/sectors/residential_sector.py b/hermesv3_bu/sectors/residential_sector.py index 4f34981..ce8c0c1 100755 --- a/hermesv3_bu/sectors/residential_sector.py +++ b/hermesv3_bu/sectors/residential_sector.py @@ -55,7 +55,7 @@ class ResidentialSector(Sector): prov_shapefile, ccaa_shapefile, population_density_map, population_type_map, create_pop_csv=False) self.heating_degree_day_path = heating_degree_day_path self.temperature_path = temperature_path - self.logger.write_time_log('ResidentialSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', '__init__', timeit.default_timer() - spent_time) def read_ef_file(self, path): """ @@ -73,7 +73,7 @@ class ResidentialSector(Sector): df_ef = pd.read_csv(path) df_ef = df_ef.loc[df_ef['fuel_type'].isin(self.fuel_list), ['fuel_type'] + self.source_pollutants] - self.logger.write_time_log('ResidentialSector', 'read_ef_file', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'read_ef_file', timeit.default_timer() - spent_time) return df_ef def calculate_num_days(self): @@ -86,7 +86,7 @@ class ResidentialSector(Sector): for key, value in zip(days, num_days): day_dict[key] = value - self.logger.write_time_log('ResidentialSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict def read_residential_spatial_proxies(self, path): @@ -95,8 +95,8 @@ class ResidentialSector(Sector): spatial_proxies = pd.read_csv(path) spatial_proxies = spatial_proxies.loc[spatial_proxies['fuel_type'].isin(self.fuel_list), :] - self.logger.write_time_log('ResidentialSector', 'read_residential_spatial_proxies', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'read_residential_spatial_proxies', + timeit.default_timer() - spent_time) return spatial_proxies def get_spatial_proxy(self, fuel_type): @@ -119,7 +119,7 @@ class ResidentialSector(Sector): else: proxy_type = proxy[1] - self.logger.write_time_log('ResidentialSector', 'get_spatial_proxy', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'get_spatial_proxy', timeit.default_timer() - spent_time) return {'nut_level': nut_level, 'proxy_type': proxy_type} def to_dst_resolution(self, src_distribution): @@ -144,29 +144,29 @@ class ResidentialSector(Sector): geometry=self.grid.shapefile.loc[src_distribution.index, 'geometry']) src_distribution.reset_index(inplace=True) - self.logger.write_time_log('ResidentialSector', 'to_dst_resolution', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'to_dst_resolution', timeit.default_timer() - spent_time) return src_distribution def get_fuel_distribution(self, prov_shapefile, ccaa_shapefile, population_density_map, population_type_map, create_pop_csv=False): spent_time = timeit.default_timer() - self.logger.write_log('Calculating fuel distribution', message_level=2) + self.__logger.write_log('Calculating fuel distribution', message_level=2) fuel_distribution_path = os.path.join(self.auxiliary_dir, 'residential', 'fuel_distribution.shp') if not os.path.exists(fuel_distribution_path): population_density = os.path.join(self.auxiliary_dir, 'residential', 'population_density.tif') - if self.comm.Get_rank() == 0: - population_density = IoRaster(self.comm).clip_raster_with_shapefile_poly( + if self.__comm.Get_rank() == 0: + population_density = IoRaster(self.__comm).clip_raster_with_shapefile_poly( population_density_map, self.clip.shapefile, population_density) - population_density = IoRaster(self.comm).to_shapefile_parallel(population_density) + population_density = IoRaster(self.__comm).to_shapefile_parallel(population_density) population_density.rename(columns={'data': 'pop'}, inplace=True) population_type = os.path.join(self.auxiliary_dir, 'residential', 'population_type.tif') - if self.comm.Get_rank() == 0: - population_type = IoRaster(self.comm).clip_raster_with_shapefile_poly( + if self.__comm.Get_rank() == 0: + population_type = IoRaster(self.__comm).clip_raster_with_shapefile_poly( population_type_map, self.clip.shapefile, population_type) - population_type = IoRaster(self.comm).to_shapefile_parallel(population_type) + population_type = IoRaster(self.__comm).to_shapefile_parallel(population_type) population_type.rename(columns={'data': 'type'}, inplace=True) population_type['type'] = population_type['type'].astype(np.int16) @@ -180,7 +180,7 @@ class ResidentialSector(Sector): population_density = self.add_nut_code(population_density, ccaa_shapefile, nut_value='nuts2_id') population_density.rename(columns={'nut_code': 'ccaa'}, inplace=True) population_density = population_density.loc[population_density['ccaa'] != -999, :] - population_density = IoShapefile(self.comm).balance(population_density) + population_density = IoShapefile(self.__comm).balance(population_density) if create_pop_csv: population_density.loc[:, ['prov', 'pop', 'type']].groupby(['prov', 'type']).sum().reset_index().to_csv( @@ -263,18 +263,18 @@ class ResidentialSector(Sector): fuel] = population_density['pop'].multiply( energy_consumption / total_pop) fuel_distribution = self.to_dst_resolution(fuel_distribution) - fuel_distribution = IoShapefile(self.comm).gather_shapefile(fuel_distribution, rank=0) - if self.comm.Get_rank() == 0: + fuel_distribution = IoShapefile(self.__comm).gather_shapefile(fuel_distribution, rank=0) + if self.__comm.Get_rank() == 0: fuel_distribution.groupby('FID').sum() - IoShapefile(self.comm).write_shapefile_serial(fuel_distribution, fuel_distribution_path) + IoShapefile(self.__comm).write_shapefile_serial(fuel_distribution, fuel_distribution_path) else: fuel_distribution = None - fuel_distribution = IoShapefile(self.comm).split_shapefile(fuel_distribution) + fuel_distribution = IoShapefile(self.__comm).split_shapefile(fuel_distribution) else: - fuel_distribution = IoShapefile(self.comm).read_shapefile_parallel(fuel_distribution_path) + fuel_distribution = IoShapefile(self.__comm).read_shapefile_parallel(fuel_distribution_path) fuel_distribution.set_index('FID', inplace=True) - self.logger.write_time_log('ResidentialSector', 'get_fuel_distribution', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'get_fuel_distribution', timeit.default_timer() - spent_time) return fuel_distribution def calculate_daily_distribution(self, day): @@ -292,7 +292,7 @@ class ResidentialSector(Sector): geometry_shp['centroid'] = geometry_shp.centroid geometry_shp.drop(columns='geometry', inplace=True) - meteo = IoNetcdf(self.comm).get_data_from_netcdf( + meteo = IoNetcdf(self.__comm).get_data_from_netcdf( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) # From K to Celsius degrees @@ -302,7 +302,7 @@ class ResidentialSector(Sector): meteo['hdd'] = np.maximum(self.hdd_base_temperature - meteo['tas'], 1) meteo.drop('tas', axis=1, inplace=True) - meteo['hdd_mean'] = IoNetcdf(self.comm).get_data_from_netcdf(self.heating_degree_day_path.replace( + meteo['hdd_mean'] = IoNetcdf(self.__comm).get_data_from_netcdf(self.heating_degree_day_path.replace( '', str(day.year)), 'HDD', 'yearly', day, geometry_shp).loc[:, 'HDD'] daily_distribution = self.fuel_distribution.copy() @@ -332,8 +332,8 @@ class ResidentialSector(Sector): daily_distribution.drop(['hdd', 'hdd_mean'], axis=1, inplace=True) - self.logger.write_time_log('ResidentialSector', 'calculate_daily_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_daily_distribution', + timeit.default_timer() - spent_time) return daily_distribution def get_fuel_distribution_by_day(self): @@ -343,8 +343,8 @@ class ResidentialSector(Sector): for day in self.day_dict.keys(): daily_distribution[day] = self.calculate_daily_distribution(day) - self.logger.write_time_log('ResidentialSector', 'get_fuel_distribution_by_day', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'get_fuel_distribution_by_day', + timeit.default_timer() - spent_time) return daily_distribution def calculate_hourly_distribution(self, fuel_distribution): @@ -362,8 +362,8 @@ class ResidentialSector(Sector): ) fuel_distribution.drop('hour', axis=1, inplace=True) - self.logger.write_time_log('ResidentialSector', 'calculate_hourly_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_hourly_distribution', + timeit.default_timer() - spent_time) return fuel_distribution def add_dates(self, df_by_day): @@ -380,7 +380,7 @@ class ResidentialSector(Sector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.logger.write_time_log('ResidentialSector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -395,8 +395,8 @@ class ResidentialSector(Sector): fuel_distribution = self.calculate_hourly_distribution(fuel_distribution_by_day) - self.logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', + timeit.default_timer() - spent_time) return fuel_distribution def calculate_emissions_from_fuel_distribution(self, fuel_distribution): @@ -407,8 +407,8 @@ class ResidentialSector(Sector): emissions[in_p] = 0 for i, fuel_type_ef in self.ef_profiles.iterrows(): emissions[in_p] += fuel_distribution.loc[:, fuel_type_ef['fuel_type']].multiply(fuel_type_ef[in_p]) - self.logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', + timeit.default_timer() - spent_time) return emissions @@ -467,13 +467,13 @@ class ResidentialSector(Sector): fuel_type_ef[in_p] * speciation_factor) emissions[out_p] = in_df.divide(self.molecular_weights[in_p]) - self.logger.write_time_log('ResidentialSector', 'calculate_output_emissions_from_fuel_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ResidentialSector', 'calculate_output_emissions_from_fuel_distribution', + timeit.default_timer() - spent_time) return emissions def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') fuel_distribution_by_hour = self.calculate_fuel_distribution_by_hour() emissions = self.calculate_output_emissions_from_fuel_distribution(fuel_distribution_by_hour) @@ -481,6 +481,6 @@ class ResidentialSector(Sector): emissions['layer'] = 0 emissions.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.logger.write_log('\t\tResidential emissions calculated', message_level=2) - self.logger.write_time_log('ResidentialSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tResidential emissions calculated', message_level=2) + self.__logger.write_time_log('ResidentialSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/sector.py b/hermesv3_bu/sectors/sector.py index bac090a..46bc5c3 100755 --- a/hermesv3_bu/sectors/sector.py +++ b/hermesv3_bu/sectors/sector.py @@ -76,8 +76,8 @@ class Sector(object): """ spent_time = timeit.default_timer() - self.comm = comm - self.logger = logger + self.__comm = comm + self.__logger = logger self.auxiliary_dir = auxiliary_dir self.grid = grid self.clip = clip @@ -98,7 +98,7 @@ class Sector(object): self.output_pollutants = list(self.speciation_map.keys()) - self.logger.write_time_log('Sector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', '__init__', timeit.default_timer() - spent_time) def read_speciation_profiles(self, path): """ @@ -124,7 +124,7 @@ class Sector(object): dataframe = pd.read_csv(path) dataframe.set_index('ID', inplace=True) - self.logger.write_time_log('Sector', 'read_speciation_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_speciation_profiles', timeit.default_timer() - spent_time) return dataframe def read_speciation_map(self, path): @@ -161,7 +161,7 @@ class Sector(object): dataframe = dataframe.loc[dataframe['src'].isin(self.source_pollutants), :] dataframe = dict(zip(dataframe['dst'], dataframe['src'])) - self.logger.write_time_log('Sector', 'read_speciation_map', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_speciation_map', timeit.default_timer() - spent_time) return dataframe @@ -189,7 +189,7 @@ class Sector(object): # dataframe = dataframe.loc[dataframe['Specie'].isin(self.source_pollutants)] mol_wei = dict(zip(dataframe['Specie'], dataframe['MW'])) - self.logger.write_time_log('Sector', 'read_molecular_weights', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_molecular_weights', timeit.default_timer() - spent_time) return mol_wei @@ -208,7 +208,7 @@ class Sector(object): """ spent_time = timeit.default_timer() dataframe = pd.read_csv(path, sep=sep) - self.logger.write_time_log('Sector', 'read_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_profiles', timeit.default_timer() - spent_time) return dataframe @@ -234,7 +234,7 @@ class Sector(object): inplace=True) profiles.set_index('P_month', inplace=True) - self.logger.write_time_log('Sector', 'read_monthly_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_monthly_profiles', timeit.default_timer() - spent_time) return profiles def read_weekly_profiles(self, path): @@ -258,7 +258,7 @@ class Sector(object): columns={'Monday': 0, 'Tuesday': 1, 'Wednesday': 2, 'Thursday': 3, 'Friday': 4, 'Saturday': 5, 'Sunday': 6, }, inplace=True) profiles.set_index('P_week', inplace=True) - self.logger.write_time_log('Sector', 'read_weekly_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_weekly_profiles', timeit.default_timer() - spent_time) return profiles def read_hourly_profiles(self, path): @@ -283,7 +283,7 @@ class Sector(object): profiles.columns = profiles.columns.astype(int) profiles.rename(columns={-1: 'P_hour'}, inplace=True) profiles.set_index('P_hour', inplace=True) - self.logger.write_time_log('Sector', 'read_hourly_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'read_hourly_profiles', timeit.default_timer() - spent_time) return profiles def calculate_rebalanced_weekly_profile(self, profile, date): @@ -306,7 +306,7 @@ class Sector(object): weekdays = self.calculate_weekdays(date) rebalanced_profile = self.calculate_weekday_factor_full_month(profile, weekdays) - self.logger.write_time_log('Sector', 'calculate_rebalanced_weekly_profile', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'calculate_rebalanced_weekly_profile', timeit.default_timer() - spent_time) return rebalanced_profile @@ -332,7 +332,7 @@ class Sector(object): increment = float(num_days - weekdays_factors) / num_days for day in range(7): profile[day] = (increment + profile[day]) / num_days - self.logger.write_time_log('Sector', 'calculate_weekday_factor_full_month', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'calculate_weekday_factor_full_month', timeit.default_timer() - spent_time) return profile @@ -354,7 +354,7 @@ class Sector(object): weekdays_dict = {} for i, day in enumerate(weekdays): weekdays_dict[i] = days.count(day) - self.logger.write_time_log('Sector', 'calculate_weekdays', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'calculate_weekdays', timeit.default_timer() - spent_time) return weekdays_dict def add_dates(self, dataframe, drop_utc=True): @@ -385,7 +385,7 @@ class Sector(object): dataframe = self.to_timezone(dataframe) if drop_utc: dataframe.drop('date_utc', axis=1, inplace=True) - self.logger.write_time_log('Sector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'add_dates', timeit.default_timer() - spent_time) return dataframe @@ -404,7 +404,7 @@ class Sector(object): tzfinder = TimezoneFinder() dataframe['timezone'] = dataframe.centroid.apply(lambda x: tzfinder.timezone_at(lng=x.x, lat=x.y)) dataframe.reset_index(inplace=True) - self.logger.write_time_log('Sector', 'add_timezone', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'add_timezone', timeit.default_timer() - spent_time) return dataframe def to_timezone(self, dataframe): @@ -422,7 +422,7 @@ class Sector(object): lambda x: x.dt.tz_convert(x.name).dt.tz_localize(None)) dataframe.drop('timezone', axis=1, inplace=True) - self.logger.write_time_log('Sector', 'to_timezone', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'to_timezone', timeit.default_timer() - spent_time) return dataframe @@ -456,7 +456,7 @@ class Sector(object): shapefile.rename(columns={nut_value: 'nut_code'}, inplace=True) shapefile.loc[shapefile['nut_code'].isna(), 'nut_code'] = -999 shapefile['nut_code'] = shapefile['nut_code'].astype(np.int64) - self.logger.write_time_log('Sector', 'add_nut_code', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'add_nut_code', timeit.default_timer() - spent_time) return shapefile @@ -516,7 +516,7 @@ class Sector(object): df1 = df1.loc[~df1.geometry.is_empty].copy() df1.drop(['bbox', 'histreg', 'new_g'], axis=1, inplace=True) return_value = df1 - self.logger.write_time_log('Sector', 'spatial_overlays', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'spatial_overlays', timeit.default_timer() - spent_time) return return_value @@ -530,18 +530,18 @@ class Sector(object): nearest = df2[geom2_col] == nearest_points(row[geom1_col], geom_union)[1] # Get the corresponding value from df2 (matching is based on the geometry) value = df2[nearest][src_column].get_values()[0] - self.logger.write_time_log('Sector', 'nearest', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'nearest', timeit.default_timer() - spent_time) return value def speciate(self, dataframe, code='default'): spent_time = timeit.default_timer() - self.logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) + self.__logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) new_dataframe = pd.DataFrame(index=dataframe.index, data=None) for out_pollutant in self.output_pollutants: if out_pollutant != 'PMC': - self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, self.speciation_map[out_pollutant], self.molecular_weights[self.speciation_map[out_pollutant]], self.speciation_profile.loc[code, out_pollutant]), message_level=3) @@ -550,7 +550,7 @@ class Sector(object): self.molecular_weights[self.speciation_map[out_pollutant]]) * \ self.speciation_profile.loc[code, out_pollutant] else: - self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc[code, out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) @@ -559,33 +559,33 @@ class Sector(object): ((dataframe['pm10'] / self.molecular_weights['pm10']) - (dataframe['pm25'] / self.molecular_weights['pm25'])) * \ self.speciation_profile.loc[code, out_pollutant] - self.logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def get_output_pollutants(self, input_pollutant): spent_time = timeit.default_timer() return_value = [outs for outs, ints in self.speciation_map.items() if ints == input_pollutant] - self.logger.write_time_log('Sector', 'get_output_pollutants', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Sector', 'get_output_pollutants', timeit.default_timer() - spent_time) return return_value def calculate_land_use_by_nut(self, land_use_raster_path, nut_shapefile_path, out_land_use_by_nut_path): # 1st Clip the raster lu_raster_path = os.path.join(self.auxiliary_dir, 'clipped_land_use.tif') - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: if not os.path.exists(lu_raster_path): - lu_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + lu_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( land_use_raster_path, self.clip.shapefile, lu_raster_path) # 2nd Raster to shapefile - land_use_shp = IoRaster(self.comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) + land_use_shp = IoRaster(self.__comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) # 3rd Add NUT code land_use_shp.drop(columns='CELL_ID', inplace=True) land_use_shp.rename(columns={'data': 'land_use'}, inplace=True) land_use_shp = self.add_nut_code(land_use_shp, nut_shapefile_path, nut_value='nuts2_id') land_use_shp = land_use_shp[land_use_shp['nut_code'] != -999] - land_use_shp = IoShapefile(self.comm).balance(land_use_shp) + land_use_shp = IoShapefile(self.__comm).balance(land_use_shp) # 4th Calculate land_use percent land_use_shp['area'] = land_use_shp.geometry.area @@ -599,31 +599,31 @@ class Sector(object): (land_use_shp['land_use'] == land_use) & (land_use_shp['nut_code'] == nut_code), 'area'].sum() land_use_by_nut.reset_index(inplace=True) - land_use_by_nut = IoShapefile(self.comm).gather_shapefile(land_use_by_nut, rank=0) + land_use_by_nut = IoShapefile(self.__comm).gather_shapefile(land_use_by_nut, rank=0) - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: land_use_by_nut = land_use_by_nut.groupby(['nuts2_id', 'land_use']).sum() land_use_by_nut.to_csv(out_land_use_by_nut_path) print('DONE -> {0}'.format(out_land_use_by_nut_path)) - self.comm.Barrier() + self.__comm.Barrier() def create_population_by_nut(self, population_raster_path, nut_shapefile_path, output_path, nut_column='nuts3_id'): # 1st Clip the raster - self.logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.comm.Get_rank() == 0: - clipped_population_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.__comm.Get_rank() == 0: + clipped_population_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( population_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) else: clipped_population_path = None # 2nd Raster to shapefile - self.logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.comm).to_shapefile_parallel( + self.__logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.__comm).to_shapefile_parallel( clipped_population_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) @@ -631,10 +631,10 @@ class Sector(object): pop_shp = pop_shp[pop_shp['nut_code'] != -999] pop_shp.rename(columns={'nut_code': nut_column}, inplace=True) - pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) - if self.comm.Get_rank() == 0: + pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) + if self.__comm.Get_rank() == 0: popu_dist = pop_shp.groupby(nut_column).sum() popu_dist.to_csv(output_path) - self.comm.Barrier() + self.__comm.Barrier() return True diff --git a/hermesv3_bu/sectors/sector_manager.py b/hermesv3_bu/sectors/sector_manager.py index a2a79f8..7bb7d0e 100755 --- a/hermesv3_bu/sectors/sector_manager.py +++ b/hermesv3_bu/sectors/sector_manager.py @@ -26,11 +26,11 @@ class SectorManager(object): :type arguments: NameSpace """ spent_time = timeit.default_timer() - self.logger = logger + self.__logger = logger self.sector_list = self.make_sector_list(arguments, comm_world.Get_size()) - self.logger.write_log('Sector process distribution:') + self.__logger.write_log('Sector process distribution:') for sect, procs in self.sector_list.items(): - self.logger.write_log('\t{0}: {1}'.format(sect, procs)) + self.__logger.write_log('\t{0}: {1}'.format(sect, procs)) color = 10 agg_color = 99 @@ -38,7 +38,7 @@ class SectorManager(object): if sector == 'aviation' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.aviation_sector import AviationSector self.sector = AviationSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.aviation_source_pollutants, grid.vertical_desctiption, arguments.airport_list, arguments.plane_list, arguments.airport_shapefile_path, arguments.airport_runways_shapefile_path, @@ -50,7 +50,7 @@ class SectorManager(object): elif sector == 'shipping_port' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.shipping_port_sector import ShippingPortSector self.sector = ShippingPortSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.shipping_port_source_pollutants, grid.vertical_desctiption, arguments.vessel_list, arguments.port_list, arguments.hoteling_shapefile_path, arguments.maneuvering_shapefile_path, @@ -63,7 +63,7 @@ class SectorManager(object): elif sector == 'livestock' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.livestock_sector import LivestockSector self.sector = LivestockSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.livestock_source_pollutants, grid.vertical_desctiption, arguments.animal_list, arguments.gridded_livestock, arguments.correction_split_factors, @@ -128,7 +128,7 @@ class SectorManager(object): elif sector == 'residential' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.residential_sector import ResidentialSector self.sector = ResidentialSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.residential_source_pollutants, grid.vertical_desctiption, arguments.fuel_list, arguments.nuts3_shapefile, arguments.nuts2_shapefile, arguments.population_density_map, @@ -142,7 +142,7 @@ class SectorManager(object): elif sector == 'recreational_boats' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.recreational_boats_sector import RecreationalBoatsSector self.sector = RecreationalBoatsSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.recreational_boats_source_pollutants, grid.vertical_desctiption, arguments.recreational_boats_list, arguments.recreational_boats_density_map, @@ -154,7 +154,7 @@ class SectorManager(object): elif sector == 'point_sources' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.point_source_sector import PointSourceSector self.sector = PointSourceSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.point_source_pollutants, grid.vertical_desctiption, arguments.point_source_catalog, arguments.point_source_monthly_profiles, @@ -175,7 +175,7 @@ class SectorManager(object): elif sector == 'traffic' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.traffic_sector import TrafficSector self.sector = TrafficSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.traffic_pollutants, grid.vertical_desctiption, arguments.road_link_path, arguments.fleet_compo_path, arguments.traffic_speed_hourly_path, arguments.traffic_monthly_profiles, @@ -193,7 +193,7 @@ class SectorManager(object): elif sector == 'traffic_area' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.traffic_area_sector import TrafficAreaSector self.sector = TrafficAreaSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.traffic_area_pollutants, grid.vertical_desctiption, arguments.population_density_map, arguments.speciation_map, arguments.molecular_weights, arguments.do_evaporative, arguments.traffic_area_gas_path, @@ -207,7 +207,7 @@ class SectorManager(object): elif sector == 'solvents' and comm_world.Get_rank() in sector_procs: from hermesv3_bu.sectors.solvents_sector import SolventsSector self.sector = SolventsSector( - comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.logger, + comm_world.Split(color, sector_procs.index(comm_world.Get_rank())), self.__logger, arguments.auxiliary_files_path, grid, clip, date_array, arguments.solvents_pollutants, grid.vertical_desctiption, arguments.speciation_map, arguments.molecular_weights, arguments.solvents_speciation_profiles, arguments.solvents_monthly_profile, @@ -219,12 +219,12 @@ class SectorManager(object): color += 1 - self.logger.write_time_log('SectorManager', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SectorManager', '__init__', timeit.default_timer() - spent_time) def run(self): spent_time = timeit.default_timer() emis = self.sector.calculate_emissions() - self.logger.write_time_log('SectorManager', 'run', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SectorManager', 'run', timeit.default_timer() - spent_time) return emis def make_sector_list(self, arguments, max_procs): @@ -242,5 +242,5 @@ class SectorManager(object): "'{0}': {1}".format( accum, {sector: len(sector_procs) for sector, sector_procs in sector_dict.items()})) - self.logger.write_time_log('SectorManager', 'make_sector_list', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SectorManager', 'make_sector_list', timeit.default_timer() - spent_time) return sector_dict diff --git a/hermesv3_bu/sectors/shipping_port_sector.py b/hermesv3_bu/sectors/shipping_port_sector.py index 88759a2..dc5cd28 100755 --- a/hermesv3_bu/sectors/shipping_port_sector.py +++ b/hermesv3_bu/sectors/shipping_port_sector.py @@ -108,24 +108,24 @@ class ShippingPortSector(Sector): self.tonnage.set_index('code', inplace=True) self.load_factor = self.read_profiles(load_factor_path) self.power_values = self.read_profiles(power_path) - self.logger.write_time_log('ShippingPortSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', '__init__', timeit.default_timer() - spent_time) def get_port_list(self): - if self.comm.Get_rank() == 0: - port_shp = IoShapefile(self.comm).read_shapefile_serial(self.maneuvering_shapefile_path) + if self.__comm.Get_rank() == 0: + port_shp = IoShapefile(self.__comm).read_shapefile_serial(self.maneuvering_shapefile_path) port_shp.drop(columns=['Name', 'Weight'], inplace=True) port_shp = gpd.sjoin(port_shp, self.clip.shapefile.to_crs(port_shp.crs), how='inner', op='intersects') port_list = np.unique(port_shp['code'].values) - if len(port_list) < self.comm.Get_size(): + if len(port_list) < self.__comm.Get_size(): error_exit("The chosen number of processors {0} exceeds the number of involved ports {1}.".format( - self.comm.Get_size(), len(port_list)) + " Set {0} at shipping_port_processors value.".format( + self.__comm.Get_size(), len(port_list)) + " Set {0} at shipping_port_processors value.".format( len(port_list))) - port_list = np.array_split(port_list, self.comm.Get_size()) + port_list = np.array_split(port_list, self.__comm.Get_size()) else: port_list = None - port_list = self.comm.scatter(port_list, root=0) + port_list = self.__comm.scatter(port_list, root=0) return list(port_list) @@ -151,7 +151,7 @@ class ShippingPortSector(Sector): columns={'January': 1, 'February': 2, 'March': 3, 'April': 4, 'May': 5, 'June': 6, 'July': 7, 'August': 8, 'September': 9, 'October': 10, 'November': 11, 'December': 12}, inplace=True) - self.logger.write_time_log('ShippingPortSector', 'read_monthly_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'read_monthly_profiles', timeit.default_timer() - spent_time) return profiles @@ -182,7 +182,7 @@ class ShippingPortSector(Sector): dataframe.reset_index(inplace=True) dataframe = pd.merge(dataframe, shapefile.loc[:, ['code', 'timezone']], on='code') dataframe.set_index(['code', 'vessel'], inplace=True) - self.logger.write_time_log('ShippingPortSector', 'add_timezone', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'add_timezone', timeit.default_timer() - spent_time) return dataframe @@ -218,7 +218,7 @@ class ShippingPortSector(Sector): dataframe.drop('date_utc', axis=1, inplace=True) dataframe.set_index(['code', 'vessel', 'tstep'], inplace=True) # del dataframe['date_utc'] - self.logger.write_time_log('ShippingPortSector', 'add_dates', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe @@ -379,7 +379,7 @@ class ShippingPortSector(Sector): lambda x: get_ef(x, 'main', pollutant)) dataframe['EF_a_{0}'.format(pollutant)] = dataframe.groupby('vessel').apply( lambda x: get_ef(x, 'aux', pollutant)) - self.logger.write_time_log('ShippingPortSector', 'get_constants', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'get_constants', timeit.default_timer() - spent_time) return dataframe @@ -407,8 +407,8 @@ class ShippingPortSector(Sector): hoteling['{0}'.format(pollutant)] += \ constants['P'] * constants['Rae'] * constants['N'] * constants['LF_ha'] * constants['T_h'] * \ constants['EF_a_{0}'.format(pollutant)] - self.logger.write_time_log('ShippingPortSector', 'calculate_yearly_emissions_by_port_vessel', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'calculate_yearly_emissions_by_port_vessel', + timeit.default_timer() - spent_time) return manoeuvring, hoteling @@ -426,8 +426,8 @@ class ShippingPortSector(Sector): dataframe['month'] = dataframe['date'].dt.month dataframe['weekday'] = dataframe['date'].dt.weekday dataframe['hour'] = dataframe['date'].dt.hour - self.logger.write_time_log('ShippingPortSector', 'dates_to_month_weekday_hour', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'dates_to_month_weekday_hour', + timeit.default_timer() - spent_time) return dataframe @@ -472,8 +472,8 @@ class ShippingPortSector(Sector): operations['hour'] = 'max' operations['date'] = 'max' dataframe = dataframe.groupby(level=['code', 'tstep']).agg(operations) - self.logger.write_time_log('ShippingPortSector', 'calculate_monthly_emissions_by_port', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'calculate_monthly_emissions_by_port', + timeit.default_timer() - spent_time) return dataframe @@ -529,8 +529,8 @@ class ShippingPortSector(Sector): dataframe['HF'] = dataframe.groupby('hour').apply(get_hf) dataframe[self.source_pollutants] = dataframe[self.source_pollutants].multiply(dataframe['HF'], axis=0) dataframe.drop(columns=['hour', 'HF'], inplace=True) - self.logger.write_time_log('ShippingPortSector', 'calculate_hourly_emissions_by_port', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'calculate_hourly_emissions_by_port', + timeit.default_timer() - spent_time) return dataframe @@ -565,7 +565,7 @@ class ShippingPortSector(Sector): dataframe[self.source_pollutants] = dataframe[self.source_pollutants].multiply(dataframe['Weight'], axis=0) dataframe.drop(columns=['Weight'], inplace=True) - self.logger.write_time_log('ShippingPortSector', 'to_port_geometry', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'to_port_geometry', timeit.default_timer() - spent_time) return dataframe @@ -597,7 +597,7 @@ class ShippingPortSector(Sector): dataframe['layer'] = 0 dataframe = dataframe.loc[:, ~dataframe.columns.duplicated()] dataframe = dataframe.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_time_log('ShippingPortSector', 'to_grid_geometry', timeit.default_timer() - spent_time) + self.__logger.write_time_log('ShippingPortSector', 'to_grid_geometry', timeit.default_timer() - spent_time) return dataframe @@ -609,9 +609,9 @@ class ShippingPortSector(Sector): :rtype: padas.DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') - self.logger.write_log('\t\tCalculating yearly emissions', message_level=2) + self.__logger.write_log('\t\tCalculating yearly emissions', message_level=2) manoeuvring, hoteling = self.calculate_yearly_emissions_by_port_vessel() manoeuvring = self.add_timezone(manoeuvring, self.maneuvering_shapefile_path) @@ -623,18 +623,18 @@ class ShippingPortSector(Sector): manoeuvring = self.dates_to_month_weekday_hour(manoeuvring) hoteling = self.dates_to_month_weekday_hour(hoteling) - self.logger.write_log('\t\tCalculating monthly emissions', message_level=2) + self.__logger.write_log('\t\tCalculating monthly emissions', message_level=2) manoeuvring = self.calculate_monthly_emissions_by_port(manoeuvring) hoteling = self.calculate_monthly_emissions_by_port(hoteling) - self.logger.write_log('\t\tCalculating hourly emissions', message_level=2) + self.__logger.write_log('\t\tCalculating hourly emissions', message_level=2) manoeuvring = self.calculate_hourly_emissions_by_port(manoeuvring) hoteling = self.calculate_hourly_emissions_by_port(hoteling) # TODO pre-calculate distribution during initialization. - self.logger.write_log('\t\tDistributing emissions', message_level=2) + self.__logger.write_log('\t\tDistributing emissions', message_level=2) manoeuvring = self.to_port_geometry(manoeuvring, self.maneuvering_shapefile_path) hoteling = self.to_port_geometry(hoteling, self.hoteling_shapefile_path) @@ -647,6 +647,6 @@ class ShippingPortSector(Sector): dataframe = self.speciate(dataframe, 'default') - self.logger.write_log('\t\tShipping port emissions calculated', message_level=2) - self.logger.write_time_log('ShippingPortSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tShipping port emissions calculated', message_level=2) + self.__logger.write_time_log('ShippingPortSector', 'calculate_emissions', timeit.default_timer() - spent_time) return dataframe diff --git a/hermesv3_bu/sectors/solvents_sector.py b/hermesv3_bu/sectors/solvents_sector.py index 8468fb7..20b0fd6 100755 --- a/hermesv3_bu/sectors/solvents_sector.py +++ b/hermesv3_bu/sectors/solvents_sector.py @@ -151,7 +151,7 @@ class SolventsSector(Sector): nut2_shapefile_path, point_sources_shapefile_path, point_sources_weight_by_nut2_path) self.yearly_emissions_path = yearly_emissions_by_nut2_path - self.logger.write_time_log('SolventsSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', '__init__', timeit.default_timer() - spent_time) def read_proxies(self, path): """ @@ -179,7 +179,7 @@ class SolventsSector(Sector): proxies_df.loc[proxies_df['spatial_proxy'] == 'shapefile', 'proxy_name'] = \ proxies_df['industry_code'].map(PROXY_NAMES) - self.logger.write_time_log('SolventsSector', 'read_proxies', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'read_proxies', timeit.default_timer() - spent_time) return proxies_df def check_profiles(self): @@ -227,7 +227,7 @@ class SolventsSector(Sector): error_exit("The following speciation profile IDs reported in the solvent proxies CSV file do not appear " + "in the speciation profiles file. {0}".format(spec_res)) - self.logger.write_time_log('SolventsSector', 'check_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'check_profiles', timeit.default_timer() - spent_time) return True def read_yearly_emissions(self, path, nut_list): @@ -256,7 +256,7 @@ class SolventsSector(Sector): year_emis.set_index(['nuts2_id', 'snap'], inplace=True) year_emis.drop(columns=['gnfr_description', 'gnfr', 'snap_description', 'nuts2_na'], inplace=True) - self.logger.write_time_log('SolventsSector', 'read_yearly_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'read_yearly_emissions', timeit.default_timer() - spent_time) return year_emis def get_population_by_nut2(self, path): @@ -275,7 +275,7 @@ class SolventsSector(Sector): pop_by_nut2.set_index('nuts2_id', inplace=True) pop_by_nut2 = pop_by_nut2.to_dict()['pop'] - self.logger.write_time_log('SolventsSector', 'get_pop_by_nut2', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_pop_by_nut2', timeit.default_timer() - spent_time) return pop_by_nut2 def get_point_sources_weights_by_nut2(self, path, proxy_name): @@ -300,8 +300,8 @@ class SolventsSector(Sector): weights_by_nut2.set_index("nuts2_id", inplace=True) weights_by_nut2 = weights_by_nut2.to_dict()['weight'] - self.logger.write_time_log('SolventsSector', 'get_point_sources_weights_by_nut2', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_point_sources_weights_by_nut2', + timeit.default_timer() - spent_time) return weights_by_nut2 def get_land_use_by_nut2(self, path, land_uses, nut_codes): @@ -327,7 +327,7 @@ class SolventsSector(Sector): land_use_by_nut2 = land_use_by_nut2[land_use_by_nut2['land_use'].isin(land_uses)] land_use_by_nut2.set_index(['nuts2_id', 'land_use'], inplace=True) - self.logger.write_time_log('SolventsSector', 'get_land_use_by_nut2', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_land_use_by_nut2', timeit.default_timer() - spent_time) return land_use_by_nut2 def get_population_proxy(self, pop_raster_path, pop_by_nut2_path, nut2_shapefile_path): @@ -349,35 +349,35 @@ class SolventsSector(Sector): spent_time = timeit.default_timer() # 1st Clip the raster - self.logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.__comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'solvents', 'pop.tif')) # 2nd Raster to shapefile - self.logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.comm).to_shapefile_parallel( + self.__logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.__comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut2_shapefile_path, nut_value='nuts2_id') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.comm).balance(pop_shp) + pop_shp = IoShapefile(self.__comm).balance(pop_shp) # pop_shp = IoShapefile(self.comm).split_shapefile(pop_shp) # 4th Calculate population percent - self.logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) + self.__logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) pop_by_nut2 = self.get_population_by_nut2(pop_by_nut2_path) pop_shp['tot_pop'] = pop_shp['nut_code'].map(pop_by_nut2) pop_shp['pop_percent'] = pop_shp['population'] / pop_shp['tot_pop'] pop_shp.drop(columns=['tot_pop', 'population'], inplace=True) # 5th Calculate percent by destiny cell - self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -389,7 +389,7 @@ class SolventsSector(Sector): popu_dist = pop_shp.groupby(['FID', 'nut_code']).sum() popu_dist.rename(columns={'pop_percent': 'population'}, inplace=True) - self.logger.write_time_log('SolventsSector', 'get_population_proxie', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_population_proxie', timeit.default_timer() - spent_time) return popu_dist def get_land_use_proxy(self, land_use_raster, land_use_by_nut2_path, land_uses, nut2_shapefile_path): @@ -413,31 +413,31 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() # 1st Clip the raster - self.logger.write_log("\t\tCreating clipped land use raster", message_level=3) + self.__logger.write_log("\t\tCreating clipped land use raster", message_level=3) lu_raster_path = os.path.join(self.auxiliary_dir, 'solvents', 'lu_{0}.tif'.format( '_'.join([str(x) for x in land_uses]))) - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: if not os.path.exists(lu_raster_path): - lu_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + lu_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( land_use_raster, self.clip.shapefile, lu_raster_path, values=land_uses) # 2nd Raster to shapefile - self.logger.write_log("\t\tRaster to shapefile", message_level=3) - land_use_shp = IoRaster(self.comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) + self.__logger.write_log("\t\tRaster to shapefile", message_level=3) + land_use_shp = IoRaster(self.__comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) # 3rd Add NUT code - self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: land_use_shp.drop(columns='CELL_ID', inplace=True) land_use_shp.rename(columns={'data': 'land_use'}, inplace=True) land_use_shp = self.add_nut_code(land_use_shp, nut2_shapefile_path, nut_value='nuts2_id') land_use_shp = land_use_shp[land_use_shp['nut_code'] != -999] - land_use_shp = IoShapefile(self.comm).balance(land_use_shp) + land_use_shp = IoShapefile(self.__comm).balance(land_use_shp) # land_use_shp = IoShapefile(self.comm).split_shapefile(land_use_shp) # 4th Calculate land_use percent - self.logger.write_log("\t\tCalculating land use percentage on source resolution", message_level=3) + self.__logger.write_log("\t\tCalculating land use percentage on source resolution", message_level=3) land_use_shp['area'] = land_use_shp.geometry.area land_use_by_nut2 = self.get_land_use_by_nut2( @@ -449,7 +449,7 @@ class SolventsSector(Sector): land_use_shp.drop(columns='area', inplace=True) # 5th Calculate percent by dest_cell - self.logger.write_log("\t\tCalculating land use percentage on destiny resolution", message_level=3) + self.__logger.write_log("\t\tCalculating land use percentage on destiny resolution", message_level=3) land_use_shp.to_crs(self.grid.shapefile.crs, inplace=True) land_use_shp['src_inter_fraction'] = land_use_shp.geometry.area @@ -462,7 +462,7 @@ class SolventsSector(Sector): land_use_dist = land_use_shp.groupby(['FID', 'nut_code']).sum() land_use_dist.rename(columns={'fraction': 'lu_{0}'.format('_'.join([str(x) for x in land_uses]))}, inplace=True) - self.logger.write_time_log('SolventsSector', 'get_land_use_proxy', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_land_use_proxy', timeit.default_timer() - spent_time) return land_use_dist def get_point_shapefile_proxy(self, proxy_name, point_shapefile_path, point_sources_weight_by_nut2_path, @@ -488,17 +488,17 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() - point_shapefile = IoShapefile(self.comm).read_shapefile_parallel(point_shapefile_path) + point_shapefile = IoShapefile(self.__comm).read_shapefile_parallel(point_shapefile_path) point_shapefile.drop(columns=['Empresa', 'Empleados', 'Ingresos', 'Consumos', 'LON', 'LAT'], inplace=True) point_shapefile = point_shapefile[point_shapefile['industry_c'] == [key for key, value in PROXY_NAMES.items() if value == proxy_name][0]] - point_shapefile = IoShapefile(self.comm).balance(point_shapefile) + point_shapefile = IoShapefile(self.__comm).balance(point_shapefile) point_shapefile.drop(columns=['industry_c'], inplace=True) point_shapefile = self.add_nut_code(point_shapefile, nut2_shapefile_path, nut_value='nuts2_id') point_shapefile = point_shapefile[point_shapefile['nut_code'] != -999] - point_shapefile = IoShapefile(self.comm).gather_shapefile(point_shapefile, rank=0) - if self.comm.Get_rank() == 0: + point_shapefile = IoShapefile(self.__comm).gather_shapefile(point_shapefile, rank=0) + if self.__comm.Get_rank() == 0: weight_by_nut2 = self.get_point_sources_weights_by_nut2( point_sources_weight_by_nut2_path, [key for key, value in PROXY_NAMES.items() if value == proxy_name][0]) @@ -507,12 +507,12 @@ class SolventsSector(Sector): point_shapefile.drop(columns=['weight'], inplace=True) # print(point_shapefile.groupby('nut_code')['weight'].sum()) - point_shapefile = IoShapefile(self.comm).split_shapefile(point_shapefile) + point_shapefile = IoShapefile(self.__comm).split_shapefile(point_shapefile) point_shapefile = gpd.sjoin(point_shapefile.to_crs(self.grid.shapefile.crs), self.grid.shapefile.reset_index()) point_shapefile.drop(columns=['geometry', 'index_right'], inplace=True) point_shapefile = point_shapefile.groupby(['FID', 'nut_code']).sum() - self.logger.write_time_log('SolventsSector', 'get_point_shapefile_proxy', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_point_shapefile_proxy', timeit.default_timer() - spent_time) return point_shapefile def get_proxy_shapefile(self, population_raster_path, population_nuts2_path, land_uses_raster_path, @@ -551,13 +551,13 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() - self.logger.write_log("Getting proxies shapefile", message_level=1) + self.__logger.write_log("Getting proxies shapefile", message_level=1) proxy_names_list = np.unique(self.proxies_map['proxy_name']) proxy_path = os.path.join(self.auxiliary_dir, 'solvents', 'proxy_distributions.shp') if not os.path.exists(proxy_path): proxy_list = [] for proxy_name in proxy_names_list: - self.logger.write_log("\tGetting proxy for {0}".format(proxy_name), message_level=2) + self.__logger.write_log("\tGetting proxy for {0}".format(proxy_name), message_level=2) if proxy_name == 'population': proxy = self.get_population_proxy(population_raster_path, population_nuts2_path, nut2_shapefile_path) @@ -569,10 +569,10 @@ class SolventsSector(Sector): else: proxy = self.get_point_shapefile_proxy(proxy_name, point_sources_shapefile_path, point_sources_weight_by_nut2_path, nut2_shapefile_path) - proxy = IoShapefile(self.comm).gather_shapefile(proxy.reset_index()) - if self.comm.Get_rank() == 0: + proxy = IoShapefile(self.__comm).gather_shapefile(proxy.reset_index()) + if self.__comm.Get_rank() == 0: proxy_list.append(proxy) - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: proxies = pd.concat(proxy_list, sort=False) proxies['FID'] = proxies['FID'].astype(int) proxies['nut_code'] = proxies['nut_code'].astype(int) @@ -583,18 +583,18 @@ class SolventsSector(Sector): proxies = GeoDataFrame( proxies, geometry=self.grid.shapefile.loc[proxies.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.comm).write_shapefile_serial(proxies.reset_index(), proxy_path) + IoShapefile(self.__comm).write_shapefile_serial(proxies.reset_index(), proxy_path) else: proxies = None else: - if self.comm.Get_rank() == 0: - proxies = IoShapefile(self.comm).read_shapefile_serial(proxy_path) + if self.__comm.Get_rank() == 0: + proxies = IoShapefile(self.__comm).read_shapefile_serial(proxy_path) proxies.set_index(['FID', 'nut_code'], inplace=True) else: proxies = None - proxies = IoShapefile(self.comm).split_shapefile(proxies) + proxies = IoShapefile(self.__comm).split_shapefile(proxies) - self.logger.write_time_log('SolventsSector', 'get_proxy_shapefile', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'get_proxy_shapefile', timeit.default_timer() - spent_time) return proxies def calculate_hourly_emissions(self, yearly_emissions): @@ -628,7 +628,7 @@ class SolventsSector(Sector): spent_time = timeit.default_timer() - self.logger.write_log('\tHourly disaggregation', message_level=2) + self.__logger.write_log('\tHourly disaggregation', message_level=2) emissions = self.add_dates(yearly_emissions, drop_utc=True) emissions['month'] = emissions['date'].dt.month @@ -647,7 +647,7 @@ class SolventsSector(Sector): emissions.drop(columns=['temp_factor'], inplace=True) emissions.set_index(['FID', 'snap', 'tstep'], inplace=True) - self.logger.write_time_log('SolventsSector', 'calculate_hourly_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'calculate_hourly_emissions', timeit.default_timer() - spent_time) return emissions def distribute_yearly_emissions(self): @@ -658,7 +658,7 @@ class SolventsSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\t\tYearly distribution', message_level=2) + self.__logger.write_log('\t\tYearly distribution', message_level=2) yearly_emis = self.read_yearly_emissions( self.yearly_emissions_path, np.unique(self.proxy.index.get_level_values('nut_code'))) @@ -686,7 +686,7 @@ class SolventsSector(Sector): emis = pd.concat(emis_list).sort_index() emis = emis[emis['nmvoc'] > 0] - self.logger.write_time_log('SolventsSector', 'distribute_yearly_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'distribute_yearly_emissions', timeit.default_timer() - spent_time) return emis def speciate(self, dataframe, code='default'): @@ -709,12 +709,12 @@ class SolventsSector(Sector): return x[[out_p]] spent_time = timeit.default_timer() - self.logger.write_log('\tSpeciation emissions', message_level=2) + self.__logger.write_log('\tSpeciation emissions', message_level=2) new_dataframe = gpd.GeoDataFrame(index=dataframe.index, data=None, crs=dataframe.crs, geometry=dataframe.geometry) for out_pollutant in self.output_pollutants: - self.logger.write_log('\t\tSpeciating {0}'.format(out_pollutant), message_level=3) + self.__logger.write_log('\t\tSpeciating {0}'.format(out_pollutant), message_level=3) new_dataframe[out_pollutant] = dataframe.groupby('P_spec').apply( lambda x: calculate_new_pollutant(x, out_pollutant)) new_dataframe.reset_index(inplace=True) @@ -722,7 +722,7 @@ class SolventsSector(Sector): new_dataframe.drop(columns=['snap', 'geometry'], inplace=True) new_dataframe.set_index(['FID', 'tstep'], inplace=True) - self.logger.write_time_log('SolventsSector', 'speciate', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def calculate_emissions(self): @@ -733,7 +733,7 @@ class SolventsSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating emissions') + self.__logger.write_log('\tCalculating emissions') emissions = self.distribute_yearly_emissions() emissions = self.calculate_hourly_emissions(emissions) @@ -743,5 +743,5 @@ class SolventsSector(Sector): emissions['layer'] = 0 emissions = emissions.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_time_log('SolventsSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('SolventsSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/traffic_area_sector.py b/hermesv3_bu/sectors/traffic_area_sector.py index 44e9ae0..5a01651 100755 --- a/hermesv3_bu/sectors/traffic_area_sector.py +++ b/hermesv3_bu/sectors/traffic_area_sector.py @@ -67,7 +67,7 @@ class TrafficAreaSector(Sector): else: self.small_cities = None - self.logger.write_time_log('TrafficAreaSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', '__init__', timeit.default_timer() - spent_time) def get_population_by_nut2(self, path): """ @@ -85,7 +85,7 @@ class TrafficAreaSector(Sector): pop_by_nut3.set_index('nuts3_id', inplace=True) pop_by_nut3 = pop_by_nut3.to_dict()['population'] - self.logger.write_time_log('TrafficAreaSector', 'get_pop_by_nut3', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'get_pop_by_nut3', timeit.default_timer() - spent_time) return pop_by_nut3 def get_population_percent(self, pop_raster_path, pop_by_nut_path, nut_shapefile_path): @@ -109,34 +109,34 @@ class TrafficAreaSector(Sector): pop_percent_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'population_percent') if not os.path.exists(pop_percent_path): # 1st Clip the raster - self.logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.__comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) # 2nd Raster to shapefile - self.logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.comm).to_shapefile_parallel( + self.__logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.__comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut_shapefile_path, nut_value='nuts3_id') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.comm).balance(pop_shp) + pop_shp = IoShapefile(self.__comm).balance(pop_shp) # 4th Calculate population percent - self.logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) + self.__logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) pop_by_nut2 = self.get_population_by_nut2(pop_by_nut_path) pop_shp['tot_pop'] = pop_shp['nut_code'].map(pop_by_nut2) pop_shp['pop_percent'] = pop_shp['population'] / pop_shp['tot_pop'] pop_shp.drop(columns=['tot_pop', 'population'], inplace=True) # 5th Calculate percent by destiny cell - self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -145,22 +145,22 @@ class TrafficAreaSector(Sector): pop_shp['pop_percent'] = pop_shp['pop_percent'] * pop_shp['src_inter_fraction'] pop_shp.drop(columns=['src_inter_fraction'], inplace=True) - pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) - if self.comm.Get_rank() == 0: + pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) + if self.__comm.Get_rank() == 0: popu_dist = pop_shp.groupby(['FID', 'nut_code']).sum() popu_dist = GeoDataFrame( popu_dist, geometry=self.grid.shapefile.loc[popu_dist.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.comm).write_shapefile_serial(popu_dist.reset_index(), pop_percent_path) + IoShapefile(self.__comm).write_shapefile_serial(popu_dist.reset_index(), pop_percent_path) else: popu_dist = None - popu_dist = IoShapefile(self.comm).split_shapefile(popu_dist) + popu_dist = IoShapefile(self.__comm).split_shapefile(popu_dist) else: - popu_dist = IoShapefile(self.comm).read_shapefile_parallel(pop_percent_path) + popu_dist = IoShapefile(self.__comm).read_shapefile_parallel(pop_percent_path) popu_dist.set_index(['FID', 'nut_code'], inplace=True) - self.logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) return popu_dist def get_population(self, pop_raster_path, nut_shapefile_path): @@ -181,28 +181,28 @@ class TrafficAreaSector(Sector): pop_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'population_small') if not os.path.exists(pop_path): # 1st Clip the raster - self.logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( + self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.__comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) # 2nd Raster to shapefile - self.logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.comm).to_shapefile_parallel( + self.__logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.__comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut_shapefile_path, nut_value='ORDER08') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.comm).balance(pop_shp) + pop_shp = IoShapefile(self.__comm).balance(pop_shp) # 4th Calculate percent by destiny cell - self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -211,22 +211,22 @@ class TrafficAreaSector(Sector): pop_shp['population'] = pop_shp['population'] * pop_shp['src_inter_fraction'] pop_shp.drop(columns=['src_inter_fraction', 'nut_code'], inplace=True) - pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) - if self.comm.Get_rank() == 0: + pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) + if self.__comm.Get_rank() == 0: popu_dist = pop_shp.groupby(['FID']).sum() popu_dist = GeoDataFrame( popu_dist, geometry=self.grid.shapefile.loc[popu_dist.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.comm).write_shapefile_serial(popu_dist.reset_index(), pop_path) + IoShapefile(self.__comm).write_shapefile_serial(popu_dist.reset_index(), pop_path) else: popu_dist = None - popu_dist = IoShapefile(self.comm).split_shapefile(popu_dist) + popu_dist = IoShapefile(self.__comm).split_shapefile(popu_dist) else: - popu_dist = IoShapefile(self.comm).read_shapefile_parallel(pop_path) + popu_dist = IoShapefile(self.__comm).read_shapefile_parallel(pop_path) popu_dist.set_index(['FID'], inplace=True) - self.logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) return popu_dist def init_evaporative(self, gasoline_path): @@ -243,20 +243,20 @@ class TrafficAreaSector(Sector): veh_cell_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'vehicle_by_cell') if not os.path.exists(veh_cell_path): veh_cell = self.make_vehicles_by_cell(gasoline_path) - IoShapefile(self.comm).write_shapefile_parallel(veh_cell.reset_index(), veh_cell_path) + IoShapefile(self.__comm).write_shapefile_parallel(veh_cell.reset_index(), veh_cell_path) else: - self.logger.write_log('\t\tReading vehicle shapefile by cell.', message_level=3) - veh_cell = IoShapefile(self.comm).read_shapefile_parallel(veh_cell_path) + self.__logger.write_log('\t\tReading vehicle shapefile by cell.', message_level=3) + veh_cell = IoShapefile(self.__comm).read_shapefile_parallel(veh_cell_path) veh_cell.set_index('FID', inplace=True) - self.logger.write_time_log('TrafficAreaSector', 'init_evaporative', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'init_evaporative', timeit.default_timer() - spent_time) return veh_cell def init_small_cities(self, global_path, small_cities_shapefile): spent_time = timeit.default_timer() pop = self.get_population(global_path, small_cities_shapefile) - self.logger.write_time_log('TrafficAreaSector', 'init_small_cities', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'init_small_cities', timeit.default_timer() - spent_time) return pop def read_vehicles_by_nut(self, path): @@ -268,7 +268,7 @@ class TrafficAreaSector(Sector): vehicles_by_nut = pd.DataFrame(vehicles_by_nut.values.T, index=nut_list, columns=vehicle_list) vehicles_by_nut.index.name = 'nuts3_id' - self.logger.write_time_log('TrafficAreaSector', 'read_vehicles_by_nut', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'read_vehicles_by_nut', timeit.default_timer() - spent_time) return vehicles_by_nut def make_vehicles_by_cell(self, gasoline_path): @@ -282,19 +282,19 @@ class TrafficAreaSector(Sector): vehicle_by_cell[vehicle_list] = vehicle_by_cell[vehicle_list].multiply( vehicle_by_cell['pop_percent'], axis='index') vehicle_by_cell.drop(columns=['pop_percent'], inplace=True) - vehicle_by_cell = IoShapefile(self.comm).gather_shapefile(vehicle_by_cell, rank=0) - if self.comm.Get_rank() == 0: + vehicle_by_cell = IoShapefile(self.__comm).gather_shapefile(vehicle_by_cell, rank=0) + if self.__comm.Get_rank() == 0: vehicle_by_cell = vehicle_by_cell.groupby('FID').sum() else: vehicle_by_cell = None - vehicle_by_cell = IoShapefile(self.comm).split_shapefile(vehicle_by_cell) + vehicle_by_cell = IoShapefile(self.__comm).split_shapefile(vehicle_by_cell) vehicle_by_cell = GeoDataFrame( vehicle_by_cell, geometry=self.grid.shapefile.loc[vehicle_by_cell.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - self.logger.write_time_log('TrafficAreaSector', 'make_vehicles_by_cell', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'make_vehicles_by_cell', timeit.default_timer() - spent_time) return vehicle_by_cell def get_profiles_from_temperature(self, temperature, default=False): @@ -324,8 +324,8 @@ class TrafficAreaSector(Sector): temperature.loc[:, temp_list] = temperature[temp_list].add(second_min, axis=0) temperature.loc[:, temp_list] = temperature[temp_list].div(temperature[temp_list].sum(axis=1), axis=0) - self.logger.write_time_log('TrafficAreaSector', 'get_profiles_from_temperature', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'get_profiles_from_temperature', + timeit.default_timer() - spent_time) return temperature def calculate_evaporative_emissions(self): @@ -347,7 +347,7 @@ class TrafficAreaSector(Sector): self.evaporative['c_lon'] = aux_df.centroid.x self.evaporative['centroid'] = aux_df.centroid - temperature = IoNetcdf(self.comm).get_hourly_data_from_netcdf( + temperature = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( self.evaporative['c_lon'].min(), self.evaporative['c_lon'].max(), self.evaporative['c_lat'].min(), self.evaporative['c_lat'].max(), self.temperature_dir, 'tas', self.date_array) temperature.rename(columns={x: 't_{0}'.format(x) for x in range(len(self.date_array))}, inplace=True) @@ -365,7 +365,7 @@ class TrafficAreaSector(Sector): df1=self.evaporative, df2=temperature_mean, geom1_col='centroid', src_column='REC', axis=1) del self.evaporative['c_lat'], self.evaporative['c_lon'], self.evaporative['centroid'] - IoShapefile(self.comm).write_shapefile_parallel( + IoShapefile(self.__comm).write_shapefile_parallel( self.evaporative, os.path.join(self.auxiliary_dir, 'traffic_area', 'vehicle_by_cell')) else: del self.evaporative['c_lat'], self.evaporative['c_lon'], self.evaporative['centroid'] @@ -397,8 +397,8 @@ class TrafficAreaSector(Sector): self.evaporative.set_index(['FID', 'tstep'], inplace=True) - self.logger.write_time_log('TrafficAreaSector', 'calculate_evaporative_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'calculate_evaporative_emissions', + timeit.default_timer() - spent_time) return self.evaporative def evaporative_temporal_distribution(self, temporal_profiles): @@ -416,8 +416,8 @@ class TrafficAreaSector(Sector): temporal_df_list.append(aux_temporal) df = pd.concat(temporal_df_list) - self.logger.write_time_log('TrafficAreaSector', 'evaporative_temporal_distribution', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'evaporative_temporal_distribution', + timeit.default_timer() - spent_time) return df def speciate_evaporative(self): @@ -430,7 +430,7 @@ class TrafficAreaSector(Sector): # From g/day to mol/day speciated_df[p] = self.evaporative['nmvoc'] * self.speciation_profiles_evaporative.loc['default', p] - self.logger.write_time_log('TrafficAreaSector', 'speciate_evaporative', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'speciate_evaporative', timeit.default_timer() - spent_time) return speciated_df def small_cities_emissions_by_population(self, pop_by_cell): @@ -442,8 +442,8 @@ class TrafficAreaSector(Sector): pop_by_cell[pollutant] = pop_by_cell['population'] * ef_df[pollutant].iloc[0] pop_by_cell.drop(columns=['population'], inplace=True) - self.logger.write_time_log('TrafficAreaSector', 'small_cities_emissions_by_population', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'small_cities_emissions_by_population', + timeit.default_timer() - spent_time) return pop_by_cell def add_timezones(self, grid, default=False): @@ -464,7 +464,7 @@ class TrafficAreaSector(Sector): lambda x: tz.closest_timezone_at(lng=x['lons'], lat=x['lats'], delta_degree=inc), axis=1) inc += 1 - self.logger.write_time_log('TrafficAreaSector', 'add_timezones', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'add_timezones', timeit.default_timer() - spent_time) return grid def temporal_distribution_small(self, small_cities): @@ -509,8 +509,8 @@ class TrafficAreaSector(Sector): small_cities['date'] = small_cities['date'] + pd.to_timedelta(1, unit='h') df = pd.concat(df_list) - self.logger.write_time_log('TrafficAreaSector', 'temporal_distribution_small', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'temporal_distribution_small', + timeit.default_timer() - spent_time) return df def calculate_small_cities_emissions(self): @@ -530,8 +530,8 @@ class TrafficAreaSector(Sector): # default=True) self.small_cities = self.temporal_distribution_small(self.small_cities) - self.logger.write_time_log('TrafficAreaSector', 'calculate_small_cities_emissions', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'calculate_small_cities_emissions', + timeit.default_timer() - spent_time) return True @@ -550,22 +550,22 @@ class TrafficAreaSector(Sector): dataset['layer'] = 0 dataset = dataset.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_time_log('TrafficAreaSector', 'to_grid', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficAreaSector', 'to_grid', timeit.default_timer() - spent_time) return dataset def calculate_emissions(self): spent_time = timeit.default_timer() - self.logger.write_log('\tCalculating traffic area.', message_level=2) + self.__logger.write_log('\tCalculating traffic area.', message_level=2) if self.do_evaporative: - self.logger.write_log('\tCalculating evaporative emissions.', message_level=2) + self.__logger.write_log('\tCalculating evaporative emissions.', message_level=2) self.calculate_evaporative_emissions() if self.do_small_cities: - self.logger.write_log('\tCalculating small cities emissions.', message_level=2) + self.__logger.write_log('\tCalculating small cities emissions.', message_level=2) self.calculate_small_cities_emissions() emissions = self.to_grid() - self.logger.write_log('\t\tTraffic area emissions calculated', message_level=2) - self.logger.write_time_log('TrafficAreaSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\t\tTraffic area emissions calculated', message_level=2) + self.__logger.write_time_log('TrafficAreaSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/traffic_sector.py b/hermesv3_bu/sectors/traffic_sector.py index 6ff9e65..4375fa4 100755 --- a/hermesv3_bu/sectors/traffic_sector.py +++ b/hermesv3_bu/sectors/traffic_sector.py @@ -105,10 +105,10 @@ class TrafficSector(Sector): self.output_dir = output_dir self.link_to_grid_csv = os.path.join(auxiliary_dir, 'traffic', 'link_grid.csv') - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: if not os.path.exists(os.path.dirname(self.link_to_grid_csv)): os.makedirs(os.path.dirname(self.link_to_grid_csv)) - self.comm.Barrier() + self.__comm.Barrier() self.crs = None # crs is the projection of the road links and it is set on the read_road_links function. self.write_rline = write_rline self.road_links = self.read_road_links(road_link_path) @@ -142,7 +142,7 @@ class TrafficSector(Sector): self.do_road_wear = do_road_wear self.do_resuspension = do_resuspension - self.logger.write_time_log('TrafficSector', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', '__init__', timeit.default_timer() - spent_time) def check_profiles(self): spent_time = timeit.default_timer() @@ -193,7 +193,7 @@ class TrafficSector(Sector): error_exit("The following hourly profile IDs reported in the road links shapefile do not appear " + "in the hourly profiles file. {0}".format(hour_res)) - self.logger.write_time_log('TrafficSector', 'check_profiles', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'check_profiles', timeit.default_timer() - spent_time) def read_all_hourly_profiles(self, hourly_mean_profiles_path, hourly_weekday_profiles_path, hourly_saturday_profiles_path, hourly_sunday_profiles_path): @@ -249,7 +249,7 @@ class TrafficSector(Sector): # dataframe['PM10'] = 'pm10' # if 'pm' in self.source_pollutants and 'PM25' in speciation_map[['dst']].values: # dataframe['PM25'] = 'pm25' - self.logger.write_time_log('TrafficSector', 'read_speciation_map', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_speciation_map', timeit.default_timer() - spent_time) return dataframe @@ -271,7 +271,7 @@ class TrafficSector(Sector): self.road_links.drop(columns=['utc', 'timezone'], inplace=True) libc.malloc_trim(0) - self.logger.write_time_log('TrafficSector', 'add_local_date', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'add_local_date', timeit.default_timer() - spent_time) return True def add_timezones(self): @@ -283,7 +283,7 @@ class TrafficSector(Sector): self.road_links['timezone'] = 'Europe/Madrid' - self.logger.write_time_log('TrafficSector', 'add_timezones', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'add_timezones', timeit.default_timer() - spent_time) return True def read_speed_hourly(self, path): @@ -303,7 +303,7 @@ class TrafficSector(Sector): df['P_speed'] = df['P_speed'].astype(int) df.set_index('P_speed', inplace=True) - self.logger.write_time_log('TrafficSector', 'read_speed_hourly', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_speed_hourly', timeit.default_timer() - spent_time) return df def read_fleet_compo(self, path, vehicle_list): @@ -311,7 +311,7 @@ class TrafficSector(Sector): df = pd.read_csv(path, sep=',') if vehicle_list is not None: df = df.loc[df['Code'].isin(vehicle_list), :] - self.logger.write_time_log('TrafficSector', 'read_fleet_compo', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_fleet_compo', timeit.default_timer() - spent_time) return df def read_road_links(self, path): @@ -342,7 +342,7 @@ class TrafficSector(Sector): spent_time = timeit.default_timer() - if self.comm.Get_rank() == 0: + if self.__comm.Get_rank() == 0: df = gpd.read_file(path) try: df.drop(columns=['Adminis', 'CCAA', 'NETWORK_ID', 'Province', 'Road_name', 'aadt_m_sat', 'aadt_m_sun', @@ -374,12 +374,12 @@ class TrafficSector(Sector): df.set_index('Link_ID', inplace=True) libc.malloc_trim(0) - chunks = chunk_road_links(df, self.comm.Get_size()) + chunks = chunk_road_links(df, self.__comm.Get_size()) else: chunks = None - self.comm.Barrier() + self.__comm.Barrier() - df = self.comm.scatter(chunks, root=0) + df = self.__comm.scatter(chunks, root=0) del chunks libc.malloc_trim(0) @@ -409,7 +409,7 @@ class TrafficSector(Sector): if self.write_rline: self.write_rline_roadlinks(df) - self.logger.write_time_log('TrafficSector', 'read_road_links', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_road_links', timeit.default_timer() - spent_time) libc.malloc_trim(0) return df @@ -487,7 +487,7 @@ class TrafficSector(Sector): return df - self.logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) return None def read_mcorr_file(self, pollutant_name): @@ -499,11 +499,11 @@ class TrafficSector(Sector): if 'Copert_V_name' in list(df.columns.values): df.drop(columns=['Copert_V_name'], inplace=True) except IOError: - self.logger.write_log('WARNING! No mileage correction applied to {0}'.format(pollutant_name)) + self.__logger.write_log('WARNING! No mileage correction applied to {0}'.format(pollutant_name)) warnings.warn('No mileage correction applied to {0}'.format(pollutant_name)) df = None - self.logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) return df def calculate_precipitation_factor(self, lon_min, lon_max, lat_min, lat_max, precipitation_dir): @@ -511,7 +511,7 @@ class TrafficSector(Sector): dates_to_extract = [self.date_array[0] + timedelta(hours=x - 47) for x in range(47)] + self.date_array - precipitation = IoNetcdf(self.comm).get_hourly_data_from_netcdf( + precipitation = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( lon_min, lon_max, lat_min, lat_max, precipitation_dir, 'prlr', dates_to_extract) precipitation.set_index('REC', inplace=True, drop=True) @@ -537,8 +537,8 @@ class TrafficSector(Sector): df.loc[:, 'REC'] = df.index - self.logger.write_time_log('TrafficSector', 'calculate_precipitation_factor', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_precipitation_factor', + timeit.default_timer() - spent_time) return df def update_fleet_value(self, df): @@ -562,8 +562,8 @@ class TrafficSector(Sector): aadt = round(aux_df['aadt'].min(), 1) fleet_value = round(aux_df['Fleet_value'].sum(), 1) if aadt != fleet_value: - self.logger.write_log('link_ID: {0} aadt: {1} sum_fleet: {2}'.format(link_id, aadt, fleet_value), - message_level=2) + self.__logger.write_log('link_ID: {0} aadt: {1} sum_fleet: {2}'.format(link_id, aadt, fleet_value), + message_level=2) # Drop 0 values df = df[df['Fleet_value'] > 0] @@ -575,7 +575,7 @@ class TrafficSector(Sector): error_exit(str(e).replace('axis', 'the road links shapefile')) libc.malloc_trim(0) - self.logger.write_time_log('TrafficSector', 'update_fleet_value', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'update_fleet_value', timeit.default_timer() - spent_time) return df def calculate_time_dependent_values(self, df): @@ -675,8 +675,8 @@ class TrafficSector(Sector): error_exit(str(e).replace('axis', 'the road links shapefile')) libc.malloc_trim(0) - self.logger.write_time_log('TrafficSector', 'calculate_time_dependent_values', - timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_time_dependent_values', + timeit.default_timer() - spent_time) return df def expand_road_links(self): @@ -703,7 +703,7 @@ class TrafficSector(Sector): df = self.update_fleet_value(df) df = self.calculate_time_dependent_values(df) - self.logger.write_time_log('TrafficSector', 'expand_road_links', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'expand_road_links', timeit.default_timer() - spent_time) return df @@ -720,7 +720,7 @@ class TrafficSector(Sector): fleet['fleet_comp'] = zone - self.logger.write_time_log('TrafficSector', 'find_fleet', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'find_fleet', timeit.default_timer() - spent_time) return fleet @@ -762,7 +762,7 @@ class TrafficSector(Sector): resta_2 = [item for item in calculated_ef_profiles if item not in original_ef_profile] # Error if len(resta_1) > 0: - self.logger.write_log('WARNING! Exists some fleet codes that not appear on the EF file: {0}'.format( + self.__logger.write_log('WARNING! Exists some fleet codes that not appear on the EF file: {0}'.format( resta_1)) warnings.warn('Exists some fleet codes that not appear on the EF file: {0}'.format(resta_1), Warning) if len(resta_2) > 0: @@ -832,7 +832,7 @@ class TrafficSector(Sector): libc.malloc_trim(0) - self.logger.write_time_log('TrafficSector', 'calculate_hot', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_hot', timeit.default_timer() - spent_time) return expanded_aux @@ -850,7 +850,7 @@ class TrafficSector(Sector): link_lons = cold_links['geometry'].centroid.x link_lats = cold_links['geometry'].centroid.y - temperature = IoNetcdf(self.comm).get_hourly_data_from_netcdf( + temperature = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( link_lons.min(), link_lons.max(), link_lats.min(), link_lats.max(), self.temp_common_path, 'tas', self.date_array) temperature.rename(columns={x: 't_{0}'.format(x) for x in range(len(self.date_array))}, inplace=True) @@ -949,14 +949,14 @@ class TrafficSector(Sector): cold_df.drop(columns=['voc_{0}'.format(tstep)], inplace=True) libc.malloc_trim(0) else: - self.logger.write_log("WARNING! nmvoc emissions cannot be estimated because voc or ch4 are not " + + self.__logger.write_log("WARNING! nmvoc emissions cannot be estimated because voc or ch4 are not " + "selected in the pollutant list.") warnings.warn("nmvoc emissions cannot be estimated because voc or ch4 are not selected in the " + "pollutant list.") cold_df = self.speciate_traffic(cold_df, self.hot_cold_speciation) libc.malloc_trim(0) - self.logger.write_time_log('TrafficSector', 'calculate_cold', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_cold', timeit.default_timer() - spent_time) return cold_df def compact_hot_expanded(self, expanded): @@ -978,14 +978,14 @@ class TrafficSector(Sector): expanded.loc[expanded['nmvoc_{0}'.format(tstep)] < 0, 'nmvoc_{0}'.format(tstep)] = 0 expanded.drop(columns=['voc_{0}'.format(tstep)], inplace=True) else: - self.logger.write_log("nmvoc emissions cannot be estimated because voc or ch4 are not selected in " + + self.__logger.write_log("nmvoc emissions cannot be estimated because voc or ch4 are not selected in " + "the pollutant list.") warnings.warn( "nmvoc emissions cannot be estimated because voc or ch4 are not selected in the pollutant list.") compacted = self.speciate_traffic(expanded, self.hot_cold_speciation) - self.logger.write_time_log('TrafficSector', 'compact_hot_expanded', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'compact_hot_expanded', timeit.default_timer() - spent_time) return compacted def calculate_tyre_wear(self): @@ -1019,7 +1019,7 @@ class TrafficSector(Sector): df.drop(columns=columns_to_delete, inplace=True) df = self.speciate_traffic(df, self.tyre_speciation) - self.logger.write_time_log('TrafficSector', 'calculate_tyre_wear', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_tyre_wear', timeit.default_timer() - spent_time) return df def calculate_brake_wear(self): @@ -1054,7 +1054,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.brake_speciation) - self.logger.write_time_log('TrafficSector', 'calculate_brake_wear', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_brake_wear', timeit.default_timer() - spent_time) return df def calculate_road_wear(self): @@ -1084,7 +1084,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.road_speciation) - self.logger.write_time_log('TrafficSector', 'calculate_road_wear', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_road_wear', timeit.default_timer() - spent_time) return df def calculate_resuspension(self): @@ -1142,7 +1142,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.resuspension_speciation) - self.logger.write_time_log('TrafficSector', 'calculate_resuspension', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'calculate_resuspension', timeit.default_timer() - spent_time) return df def transform_df(self, df): @@ -1164,7 +1164,7 @@ class TrafficSector(Sector): df.drop(columns=pollutants_renamed, inplace=True) df = pd.concat(df_list, ignore_index=True) - self.logger.write_time_log('TrafficSector', 'transform_df', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'transform_df', timeit.default_timer() - spent_time) return df def speciate_traffic(self, df, speciation): @@ -1238,36 +1238,36 @@ class TrafficSector(Sector): df_out = pd.concat(df_out_list, axis=1) - self.logger.write_time_log('TrafficSector', 'speciate_traffic', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'speciate_traffic', timeit.default_timer() - spent_time) return df_out def calculate_emissions(self): spent_time = timeit.default_timer() version = 1 - self.logger.write_log('\tCalculating Road traffic emissions', message_level=1) + self.__logger.write_log('\tCalculating Road traffic emissions', message_level=1) df_accum = pd.DataFrame() if version == 2: if self.do_hot: - self.logger.write_log('\t\tCalculating Hot emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Hot emissions.', message_level=2) df_accum = pd.concat([df_accum, self.compact_hot_expanded(self.calculate_hot())]).groupby( ['tstep', 'Link_ID']).sum() if self.do_cold: - self.logger.write_log('\t\tCalculating Cold emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Cold emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_cold(self.calculate_hot())]).groupby( ['tstep', 'Link_ID']).sum() else: if self.do_hot or self.do_cold: - self.logger.write_log('\t\tCalculating Hot emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Hot emissions.', message_level=2) hot_emis = self.calculate_hot() if self.do_hot: - self.logger.write_log('\t\tCompacting Hot emissions.', message_level=2) + self.__logger.write_log('\t\tCompacting Hot emissions.', message_level=2) df_accum = pd.concat([df_accum, self.compact_hot_expanded(hot_emis.copy())]).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_cold: - self.logger.write_log('\t\tCalculating Cold emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Cold emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_cold(hot_emis)]).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) @@ -1276,20 +1276,20 @@ class TrafficSector(Sector): libc.malloc_trim(0) if self.do_tyre_wear: - self.logger.write_log('\t\tCalculating Tyre wear emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Tyre wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_tyre_wear()], sort=False).groupby(['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_brake_wear: - self.logger.write_log('\t\tCalculating Brake wear emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Brake wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_brake_wear()], sort=False).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_road_wear: - self.logger.write_log('\t\tCalculating Road wear emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Road wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_road_wear()], sort=False).groupby(['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_resuspension: - self.logger.write_log('\t\tCalculating Resuspension emissions.', message_level=2) + self.__logger.write_log('\t\tCalculating Resuspension emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_resuspension()], sort=False).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) @@ -1301,12 +1301,12 @@ class TrafficSector(Sector): if self.write_rline: self.write_rline_output(df_accum.copy()) - self.logger.write_log('\t\tRoad link emissions to grid.', message_level=2) + self.__logger.write_log('\t\tRoad link emissions to grid.', message_level=2) df_accum = self.links_to_grid(df_accum) libc.malloc_trim(0) - self.logger.write_log('\tRoad traffic emissions calculated', message_level=2) - self.logger.write_time_log('TrafficSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.__logger.write_log('\tRoad traffic emissions calculated', message_level=2) + self.__logger.write_time_log('TrafficSector', 'calculate_emissions', timeit.default_timer() - spent_time) return df_accum def links_to_grid(self, link_emissions): @@ -1348,14 +1348,14 @@ class TrafficSector(Sector): link_grid = pd.DataFrame({'Link_ID': link_id_list, 'FID': fid_list, 'length': length_list}) # Writing link to grid file - data = self.comm.gather(link_grid, root=0) - if self.comm.Get_rank() == 0: + data = self.__comm.gather(link_grid, root=0) + if self.__comm.Get_rank() == 0: if not os.path.exists(os.path.dirname(self.link_to_grid_csv)): os.makedirs(os.path.dirname(self.link_to_grid_csv)) data = pd.concat(data) data.to_csv(self.link_to_grid_csv) - self.comm.Barrier() + self.__comm.Barrier() else: link_grid = pd.read_csv(self.link_to_grid_csv) @@ -1376,7 +1376,7 @@ class TrafficSector(Sector): link_grid['layer'] = 0 link_grid = link_grid.groupby(['FID', 'layer', 'tstep']).sum() - self.logger.write_time_log('TrafficSector', 'links_to_grid', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'links_to_grid', timeit.default_timer() - spent_time) return link_grid @@ -1392,8 +1392,8 @@ class TrafficSector(Sector): emissions.reset_index(inplace=True) - emissions_list = self.comm.gather(emissions, root=0) - if self.comm.Get_rank() == 0: + emissions_list = self.__comm.gather(emissions, root=0) + if self.__comm.Get_rank() == 0: emissions = pd.concat(emissions_list) p_list = list(emissions.columns.values) p_list.remove('tstep') @@ -1413,16 +1413,16 @@ class TrafficSector(Sector): out_df.to_csv(os.path.join(self.output_dir, 'rline_{1}_{0}.csv'.format( p, self.date_array[0].strftime('%Y%m%d'))), index=False) - self.comm.Barrier() + self.__comm.Barrier() - self.logger.write_time_log('TrafficSector', 'write_rline_output', timeit.default_timer() - spent_time) + self.__logger.write_time_log('TrafficSector', 'write_rline_output', timeit.default_timer() - spent_time) return True def write_rline_roadlinks(self, df_in): spent_time = timeit.default_timer() - df_in_list = self.comm.gather(df_in, root=0) - if self.comm.Get_rank() == 0: + df_in_list = self.__comm.gather(df_in, root=0) + if self.__comm.Get_rank() == 0: df_in = pd.concat(df_in_list) df_out = pd.DataFrame( @@ -1482,7 +1482,7 @@ class TrafficSector(Sector): df_out.set_index('Link_ID', inplace=True) df_out.sort_index(inplace=True) df_out.to_csv(os.path.join(self.output_dir, 'roads.txt'), index=False, sep=' ') - self.comm.Barrier() - self.logger.write_log('\t\tTraffic emissions calculated', message_level=2) - self.logger.write_time_log('TrafficSector', 'write_rline_roadlinks', timeit.default_timer() - spent_time) + self.__comm.Barrier() + self.__logger.write_log('\t\tTraffic emissions calculated', message_level=2) + self.__logger.write_time_log('TrafficSector', 'write_rline_roadlinks', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/cmaq_writer.py b/hermesv3_bu/writer/cmaq_writer.py index 53c5bec..8d86acd 100755 --- a/hermesv3_bu/writer/cmaq_writer.py +++ b/hermesv3_bu/writer/cmaq_writer.py @@ -79,7 +79,7 @@ class CmaqWriter(Writer): self.global_attributes = self.create_global_attributes(global_attributes_path) self.pollutant_info = self.change_pollutant_attributes() - self.logger.write_time_log('CmaqWriter', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -96,7 +96,7 @@ class CmaqWriter(Writer): # From mol/h g/h to mol/s g/s emissions = emissions / 3600.0 - self.logger.write_time_log('CmaqWriter', 'unit_change', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'unit_change', timeit.default_timer() - spent_time) return emissions def change_pollutant_attributes(self): @@ -123,7 +123,7 @@ class CmaqWriter(Writer): new_pollutant_info.loc[i, 'long_name'] = "{:<16}".format(pollutant) new_pollutant_info.set_index('pollutant', inplace=True) - self.logger.write_time_log('CmaqWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) return new_pollutant_info def create_tflag(self): @@ -144,7 +144,7 @@ class CmaqWriter(Writer): t_flag[i_d, i_p, 0] = y_d t_flag[i_d, i_p, 1] = hms - self.logger.write_time_log('CmaqWriter', 'create_tflag', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'create_tflag', timeit.default_timer() - spent_time) return t_flag def str_var_list(self): @@ -160,7 +160,7 @@ class CmaqWriter(Writer): for var in list(self.pollutant_info.index): str_var_list += "{:<16}".format(var) - self.logger.write_time_log('CmaqWriter', 'str_var_list', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'str_var_list', timeit.default_timer() - spent_time) return str_var_list def read_global_attributes(self, global_attributes_path): @@ -199,13 +199,13 @@ class CmaqWriter(Writer): atts_dict[att] = np.array(df.loc[df['attribute'] == att, 'value'].item().split(), dtype=np.float32) except ValueError: - self.logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + + self.__logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + " Using default value '{0}'".format(atts_dict[att])) if self.comm_write.Get_rank() == 0: warn('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( att, atts_dict[att])) - self.logger.write_time_log('CmaqWriter', 'read_global_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'read_global_attributes', timeit.default_timer() - spent_time) return atts_dict def create_global_attributes(self, global_attributes_path): @@ -256,7 +256,7 @@ class CmaqWriter(Writer): global_attributes['XCELL'] = np.float(self.grid.attributes['inc_x']) global_attributes['YCELL'] = np.float(self.grid.attributes['inc_y']) - self.logger.write_time_log('CmaqWriter', 'create_global_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('CmaqWriter', 'create_global_attributes', timeit.default_timer() - spent_time) return global_attributes def write_netcdf(self, emissions): @@ -276,7 +276,7 @@ class CmaqWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ===== DIMENSIONS ===== - self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('TSTEP', len(self.date_array)) netcdf.createDimension('DATE-TIME', 2) netcdf.createDimension('LAY', len(self.grid.vertical_desctiption)) @@ -285,7 +285,7 @@ class CmaqWriter(Writer): netcdf.createDimension('COL', self.grid.center_longitudes.shape[1]) # ========== VARIABLES ========== - self.logger.write_log('\tCreating NetCDF variables', message_level=2) + self.__logger.write_log('\tCreating NetCDF variables', message_level=2) tflag = netcdf.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME',)) tflag.setncatts({'units': "{:<16}".format(''), 'long_name': "{:<16}".format('TFLAG'), 'var_desc': "{:<80}".format('Timestep-valid flags: (1) YYYYDDD or (2) HHMMSS')}) @@ -294,7 +294,7 @@ class CmaqWriter(Writer): # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: var = netcdf.createVariable(var_name, np.float64, ('TSTEP', 'LAY', 'ROW', 'COL',)) @@ -315,13 +315,13 @@ class CmaqWriter(Writer): var.var_desc = self.pollutant_info.loc[var_name, 'var_desc'] # ========== METADATA ========== - self.logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, self.global_attributes[attribute]) netcdf.close() - self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.logger.write_time_log('CmaqWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.__logger.write_time_log('CmaqWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/default_writer.py b/hermesv3_bu/writer/default_writer.py index b25d1ae..2f1ebb6 100755 --- a/hermesv3_bu/writer/default_writer.py +++ b/hermesv3_bu/writer/default_writer.py @@ -62,7 +62,7 @@ class DefaultWriter(Writer): super(DefaultWriter, self).__init__(comm_world, comm_write, logger, netcdf_path, grid, date_array, pollutant_info, rank_distribution, emission_summary) - self.logger.write_time_log('DefaultWriter', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('DefaultWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -74,7 +74,7 @@ class DefaultWriter(Writer): :return: Same emissions as input :rtype: DataFrame """ - self.logger.write_time_log('DefaultWriter', 'unit_change', 0.0) + self.__logger.write_time_log('DefaultWriter', 'unit_change', 0.0) return emissions @@ -96,7 +96,7 @@ class DefaultWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ========== DIMENSIONS ========== - self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) if self.grid.grid_type == 'Regular Lat-Lon': netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) netcdf.createDimension('lon', self.grid.center_longitudes.shape[0]) @@ -124,8 +124,8 @@ class DefaultWriter(Writer): netcdf.createDimension('time', len(self.date_array)) # ========== VARIABLES ========== - self.logger.write_log('\tCreating NetCDF variables', message_level=2) - self.logger.write_log('\t\tCreating time variable', message_level=3) + self.__logger.write_log('\tCreating NetCDF variables', message_level=2) + self.__logger.write_log('\t\tCreating time variable', message_level=3) time_var = netcdf.createVariable('time', np.float64, ('time',)) time_var.units = 'hours since {0}'.format(self.date_array[0].strftime("%Y-%m-%d %H:%M:%S")) @@ -134,13 +134,13 @@ class DefaultWriter(Writer): time_var.long_name = "time" time_var[:] = date2num(self.date_array, time_var.units, calendar=time_var.calendar) - self.logger.write_log('\t\tCreating lev variable', message_level=3) + self.__logger.write_log('\t\tCreating lev variable', message_level=3) lev = netcdf.createVariable('lev', np.float64, ('lev',)) lev.units = Unit("m").symbol lev.positive = 'up' lev[:] = self.grid.vertical_desctiption - self.logger.write_log('\t\tCreating lat variable', message_level=3) + self.__logger.write_log('\t\tCreating lat variable', message_level=3) lats = netcdf.createVariable('lat', np.float64, lat_dim) lats.units = "degrees_north" lats.axis = "Y" @@ -151,7 +151,7 @@ class DefaultWriter(Writer): lat_bnds = netcdf.createVariable('lat_bnds', np.float64, lat_dim + ('nv',)) lat_bnds[:] = self.grid.boundary_latitudes - self.logger.write_log('\t\tCreating lon variable', message_level=3) + self.__logger.write_log('\t\tCreating lon variable', message_level=3) lons = netcdf.createVariable('lon', np.float64, lon_dim) lons.units = "degrees_east" lons.axis = "X" @@ -163,14 +163,14 @@ class DefaultWriter(Writer): lon_bnds[:] = self.grid.boundary_longitudes if self.grid.grid_type in ['Lambert Conformal Conic', 'Mercator']: - self.logger.write_log('\t\tCreating x variable', message_level=3) + self.__logger.write_log('\t\tCreating x variable', message_level=3) x_var = netcdf.createVariable('x', np.float64, ('x',)) x_var.units = Unit("km").symbol x_var.long_name = "x coordinate of projection" x_var.standard_name = "projection_x_coordinate" x_var[:] = self.grid.x - self.logger.write_log('\t\tCreating y variable', message_level=3) + self.__logger.write_log('\t\tCreating y variable', message_level=3) y_var = netcdf.createVariable('y', np.float64, ('y',)) y_var.units = Unit("km").symbol y_var.long_name = "y coordinate of projection" @@ -178,7 +178,7 @@ class DefaultWriter(Writer): y_var[:] = self.grid.y elif self.grid.grid_type == 'Rotated': - self.logger.write_log('\t\tCreating rlat variable', message_level=3) + self.__logger.write_log('\t\tCreating rlat variable', message_level=3) rlat = netcdf.createVariable('rlat', np.float64, ('rlat',)) rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol @@ -186,7 +186,7 @@ class DefaultWriter(Writer): rlat[:] = self.grid.rlat # Rotated Longitude - self.logger.write_log('\t\tCreating rlon variable', message_level=3) + self.__logger.write_log('\t\tCreating rlon variable', message_level=3) rlon = netcdf.createVariable('rlon', np.float64, ('rlon',)) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol @@ -197,7 +197,7 @@ class DefaultWriter(Writer): # if 'Unnamed: 0' in emissions.columns.values: # emissions.drop(columns=['Unnamed: 0'], inplace=True) for var_name in emissions.columns.values: - self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: if CHUNK: var = netcdf.createVariable(var_name, np.float64, ('time', 'lev',) + var_dim, @@ -230,9 +230,9 @@ class DefaultWriter(Writer): var.grid_mapping = 'mercator' # ========== METADATA ========== - self.logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) - self.logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) + self.__logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) if self.grid.grid_type == 'Regular Lat-Lon': mapping = netcdf.createVariable('Latitude_Longitude', 'i') @@ -262,7 +262,7 @@ class DefaultWriter(Writer): netcdf.setncattr('Conventions', 'CF-1.6') self.comm_write.Barrier() netcdf.close() - self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.logger.write_time_log('DefaultWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.__logger.write_time_log('DefaultWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/monarch_writer.py b/hermesv3_bu/writer/monarch_writer.py index 3e43f0d..4b0d4be 100755 --- a/hermesv3_bu/writer/monarch_writer.py +++ b/hermesv3_bu/writer/monarch_writer.py @@ -71,7 +71,7 @@ class MonarchWriter(Writer): error_exit("'{0}' unit is not supported for CMAQ emission ".format(variable.get('units')) + "input file. Set mol.s-1.m-2 or kg.s-1.m-2 in the speciation_map file.") - self.logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -100,7 +100,7 @@ class MonarchWriter(Writer): if info.get('units') == "kg.s-1.m-2": # From g.s-1.m-2 to kg.s-1.m-2 emissions[[pollutant]] = emissions[[pollutant]].div(10**3) - self.logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) return emissions @@ -121,7 +121,7 @@ class MonarchWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ========== DIMENSIONS ========== - self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('rlat', len(self.grid.rlat)) netcdf.createDimension('rlon', len(self.grid.rlon)) @@ -134,8 +134,8 @@ class MonarchWriter(Writer): netcdf.createDimension('time', len(self.date_array)) # ========== VARIABLES ========== - self.logger.write_log('\tCreating NetCDF variables', message_level=2) - self.logger.write_log('\t\tCreating time variable', message_level=3) + self.__logger.write_log('\tCreating NetCDF variables', message_level=2) + self.__logger.write_log('\t\tCreating time variable', message_level=3) time = netcdf.createVariable('time', np.float64, ('time',)) time.units = 'hours since {0}'.format(self.date_array[0].strftime("%Y-%m-%d %H:%M:%S")) @@ -144,13 +144,13 @@ class MonarchWriter(Writer): time.long_name = "time" time[:] = date2num(self.date_array, time.units, calendar=time.calendar) - self.logger.write_log('\t\tCreating lev variable', message_level=3) + self.__logger.write_log('\t\tCreating lev variable', message_level=3) lev = netcdf.createVariable('lev', np.float64, ('lev',)) lev.units = Unit("m").symbol lev.positive = 'up' lev[:] = self.grid.vertical_desctiption - self.logger.write_log('\t\tCreating lat variable', message_level=3) + self.__logger.write_log('\t\tCreating lat variable', message_level=3) lats = netcdf.createVariable('lat', np.float64, lat_dim) lats.units = "degrees_north" lats.axis = "Y" @@ -161,7 +161,7 @@ class MonarchWriter(Writer): lat_bnds = netcdf.createVariable('lat_bnds', np.float64, lat_dim + ('nv',)) lat_bnds[:] = self.grid.boundary_latitudes - self.logger.write_log('\t\tCreating lon variable', message_level=3) + self.__logger.write_log('\t\tCreating lon variable', message_level=3) lons = netcdf.createVariable('lon', np.float64, lon_dim) lons.units = "degrees_east" lons.axis = "X" @@ -172,7 +172,7 @@ class MonarchWriter(Writer): lon_bnds = netcdf.createVariable('lon_bnds', np.float64, lon_dim + ('nv',)) lon_bnds[:] = self.grid.boundary_longitudes - self.logger.write_log('\t\tCreating rlat variable', message_level=3) + self.__logger.write_log('\t\tCreating rlat variable', message_level=3) rlat = netcdf.createVariable('rlat', np.float64, ('rlat',)) rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol @@ -180,7 +180,7 @@ class MonarchWriter(Writer): rlat[:] = self.grid.rlat # Rotated Longitude - self.logger.write_log('\t\tCreating rlon variable', message_level=3) + self.__logger.write_log('\t\tCreating rlon variable', message_level=3) rlon = netcdf.createVariable('rlon', np.float64, ('rlon',)) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol @@ -189,7 +189,7 @@ class MonarchWriter(Writer): # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) # var = netcdf.createVariable(var_name, np.float64, ('time', 'lev',) + var_dim, # chunksizes=self.rank_distribution[0]['shape']) @@ -215,9 +215,9 @@ class MonarchWriter(Writer): var.grid_mapping = 'rotated_pole' # ========== METADATA ========== - self.logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) - self.logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) + self.__logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' @@ -226,7 +226,7 @@ class MonarchWriter(Writer): netcdf.setncattr('Conventions', 'CF-1.6') netcdf.close() - self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.logger.write_time_log('MonarchWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.__logger.write_time_log('MonarchWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/wrfchem_writer.py b/hermesv3_bu/writer/wrfchem_writer.py index a1fd289..55d2e1c 100755 --- a/hermesv3_bu/writer/wrfchem_writer.py +++ b/hermesv3_bu/writer/wrfchem_writer.py @@ -88,7 +88,7 @@ class WrfChemWriter(Writer): self.global_attributes = self.create_global_attributes(global_attributes_path) self.pollutant_info = self.change_pollutant_attributes() - self.logger.write_time_log('WrfChemWriter', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('WrfChemWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -122,7 +122,7 @@ class WrfChemWriter(Writer): # From mol/m2.h to mol/km2.h emissions[[pollutant]] = emissions[[pollutant]].mul(10**6) - self.logger.write_time_log('WrfChemWriter', 'unit_change', timeit.default_timer() - spent_time) + self.__logger.write_time_log('WrfChemWriter', 'unit_change', timeit.default_timer() - spent_time) return emissions def change_pollutant_attributes(self): @@ -156,7 +156,7 @@ class WrfChemWriter(Writer): new_pollutant_info.loc[i, 'coordinates'] = "XLONG XLAT" new_pollutant_info.set_index('pollutant', inplace=True) - self.logger.write_time_log('WrfChemWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('WrfChemWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) return new_pollutant_info def read_global_attributes(self, global_attributes_path): @@ -239,13 +239,13 @@ class WrfChemWriter(Writer): atts_dict[att] = str(df.loc[df['attribute'] == att, 'value'].item()) except ValueError: - self.logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + + self.__logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + " Using default value '{0}'".format(atts_dict[att])) if self.comm_write.Get_rank() == 0: warn('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( att, atts_dict[att])) - self.logger.write_time_log('WrfChemWriter', 'read_global_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('WrfChemWriter', 'read_global_attributes', timeit.default_timer() - spent_time) return atts_dict def create_global_attributes(self, global_attributes_path): @@ -299,7 +299,7 @@ class WrfChemWriter(Writer): global_attributes['MOAD_CEN_LAT'] = np.float32(self.grid.attributes['lat_ts']) global_attributes['STAND_LON'] = np.float32(self.grid.attributes['lon_0']) - self.logger.write_time_log('WrfChemWriter', 'create_global_attributes', timeit.default_timer() - spent_time) + self.__logger.write_time_log('WrfChemWriter', 'create_global_attributes', timeit.default_timer() - spent_time) return global_attributes def create_times_var(self): @@ -332,7 +332,7 @@ class WrfChemWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ===== DIMENSIONS ===== - self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('Time', len(self.date_array)) netcdf.createDimension('DateStrLen', 19) @@ -341,13 +341,13 @@ class WrfChemWriter(Writer): netcdf.createDimension('emissions_zdim', len(self.grid.vertical_desctiption)) # ========== VARIABLES ========== - self.logger.write_log('\tCreating NetCDF variables', message_level=2) + self.__logger.write_log('\tCreating NetCDF variables', message_level=2) times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen',)) times[:] = self.create_times_var() # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: var = netcdf.createVariable(var_name, np.float64, @@ -373,13 +373,13 @@ class WrfChemWriter(Writer): var.coordinates = self.pollutant_info.loc[var_name, 'coordinates'] # ========== METADATA ========== - self.logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, self.global_attributes[attribute]) netcdf.close() - self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.logger.write_time_log('WrfChemWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.__logger.write_time_log('WrfChemWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/writer.py b/hermesv3_bu/writer/writer.py index 3d5d284..728bc62 100755 --- a/hermesv3_bu/writer/writer.py +++ b/hermesv3_bu/writer/writer.py @@ -273,7 +273,7 @@ class Writer(object): self.comm_world = comm_world self.comm_write = comm_write - self.logger = logger + self.__logger = logger self.netcdf_path = netcdf_path self.grid = grid self.date_array = date_array @@ -290,7 +290,7 @@ class Writer(object): else: self.emission_summary_paths = None - self.logger.write_time_log('Writer', '__init__', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Writer', '__init__', timeit.default_timer() - spent_time) def gather_emissions(self, emissions): """ @@ -307,27 +307,27 @@ class Writer(object): """ spent_time = timeit.default_timer() # Sending - self.logger.write_log('Sending emissions to the writing processors.', message_level=2) + self.__logger.write_log('Sending emissions to the writing processors.', message_level=2) requests = [] for w_rank, info in self.rank_distribution.items(): partial_emis = emissions.loc[(emissions.index.get_level_values(0) >= info['fid_min']) & (emissions.index.get_level_values(0) < info['fid_max'])] - self.logger.write_log('\tFrom {0} sending {1} to {2}'.format( + self.__logger.write_log('\tFrom {0} sending {1} to {2}'.format( self.comm_world.Get_rank(), sys.getsizeof(partial_emis), w_rank), message_level=3) # requests.append(self.comm_world.isend(sys.getsizeof(partial_emis), dest=w_rank, # tag=self.comm_world.Get_rank() + MPI_TAG_CONSTANT)) requests.append(self.comm_world.isend(partial_emis, dest=w_rank, tag=self.comm_world.Get_rank())) # Receiving - self.logger.write_log('Receiving emissions in the writing processors.', message_level=2) + self.__logger.write_log('Receiving emissions in the writing processors.', message_level=2) if self.comm_world.Get_rank() in self.rank_distribution.keys(): - self.logger.write_log("I'm a writing processor.", message_level=3) + self.__logger.write_log("I'm a writing processor.", message_level=3) data_list = [] - self.logger.write_log("Prepared to receive", message_level=3) + self.__logger.write_log("Prepared to receive", message_level=3) for i_rank in range(self.comm_world.Get_size()): - self.logger.write_log( + self.__logger.write_log( '\tFrom {0} to {1}'.format(i_rank, self.comm_world.Get_rank()), message_level=3) req = self.comm_world.irecv(2**27, source=i_rank, tag=i_rank) dataframe = req.wait() @@ -341,12 +341,12 @@ class Writer(object): else: new_emissions = None self.comm_world.Barrier() - self.logger.write_log('All emissions received.', message_level=2) + self.__logger.write_log('All emissions received.', message_level=2) if self.emission_summary and self.comm_world.Get_rank() in self.rank_distribution.keys(): self.make_summary(new_emissions) - self.logger.write_time_log('Writer', 'gather_emissions', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Writer', 'gather_emissions', timeit.default_timer() - spent_time) return new_emissions @@ -369,7 +369,7 @@ class Writer(object): for (layer, tstep), aux_df in dataframe.groupby(['layer', 'tstep']): data[tstep, layer, aux_df['FID']] = aux_df[var_name] - self.logger.write_time_log('Writer', 'dataframe_to_array', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Writer', 'dataframe_to_array', timeit.default_timer() - spent_time) return data.reshape(shape) @@ -390,7 +390,7 @@ class Writer(object): self.write_netcdf(emissions) self.comm_world.Barrier() - self.logger.write_time_log('Writer', 'write', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Writer', 'write', timeit.default_timer() - spent_time) return True @@ -438,4 +438,4 @@ class Writer(object): summary.groupby('tstep').sum().to_csv(self.emission_summary_paths['hourly_summary_path']) summary.drop(columns=['tstep'], inplace=True) pd.DataFrame(summary.sum()).to_csv(self.emission_summary_paths['total_summary_path']) - self.logger.write_time_log('Writer', 'make_summary', timeit.default_timer() - spent_time) + self.__logger.write_time_log('Writer', 'make_summary', timeit.default_timer() - spent_time) -- GitLab From 8214182efb524a0e20253b1ee82b84f4ce3f6ee9 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Wed, 23 Oct 2019 12:12:49 +0200 Subject: [PATCH 4/4] UNDO: comm & logger class variables as hidden variables --- conf/hermes.conf | 4 +- hermesv3_bu/clipping/clip.py | 4 +- hermesv3_bu/clipping/custom_clip.py | 6 +- hermesv3_bu/clipping/default_clip.py | 6 +- hermesv3_bu/clipping/shapefile_clip.py | 6 +- hermesv3_bu/grids/grid.py | 10 +- hermesv3_bu/grids/grid_latlon.py | 8 +- hermesv3_bu/grids/grid_lcc.py | 8 +- hermesv3_bu/grids/grid_mercator.py | 8 +- hermesv3_bu/grids/grid_rotated.py | 12 +- hermesv3_bu/hermes.py | 38 ++--- hermesv3_bu/io_server/io_raster.py | 20 +-- hermesv3_bu/io_server/io_server.py | 2 +- hermesv3_bu/io_server/io_shapefile.py | 44 +++--- .../agricultural_crop_fertilizers_sector.py | 138 +++++++++--------- .../agricultural_crop_operations_sector.py | 28 ++-- .../sectors/agricultural_machinery_sector.py | 40 ++--- hermesv3_bu/sectors/agricultural_sector.py | 66 ++++----- hermesv3_bu/sectors/aviation_sector.py | 106 +++++++------- hermesv3_bu/sectors/livestock_sector.py | 124 ++++++++-------- hermesv3_bu/sectors/point_source_sector.py | 96 ++++++------ .../sectors/recreational_boats_sector.py | 44 +++--- hermesv3_bu/sectors/residential_sector.py | 78 +++++----- hermesv3_bu/sectors/sector.py | 80 +++++----- hermesv3_bu/sectors/shipping_port_sector.py | 56 +++---- hermesv3_bu/sectors/solvents_sector.py | 106 +++++++------- hermesv3_bu/sectors/traffic_area_sector.py | 126 ++++++++-------- hermesv3_bu/sectors/traffic_sector.py | 132 ++++++++--------- hermesv3_bu/writer/cmaq_writer.py | 28 ++-- hermesv3_bu/writer/default_writer.py | 34 ++--- hermesv3_bu/writer/monarch_writer.py | 30 ++-- hermesv3_bu/writer/wrfchem_writer.py | 24 +-- hermesv3_bu/writer/writer.py | 26 ++-- 33 files changed, 769 insertions(+), 769 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index a1fd305..ad9ea14 100755 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -10,8 +10,8 @@ start_date = 2016/11/29 00:00:00 # end_date = 2010/01/01 00:00:00 output_timestep_num = 24 auxiliary_files_path = /scratch/Earth/HERMESv3_BU_aux/__test -first_time = 1 -erase_auxiliary_files = 1 +first_time = 0 +erase_auxiliary_files = 0 [DOMAIN] diff --git a/hermesv3_bu/clipping/clip.py b/hermesv3_bu/clipping/clip.py index e1449dd..5b2ebb6 100755 --- a/hermesv3_bu/clipping/clip.py +++ b/hermesv3_bu/clipping/clip.py @@ -51,8 +51,8 @@ class Clip(object): def __init__(self, logger, auxiliary_path): spent_time = timeit.default_timer() - self.__logger = logger + self.logger = logger self.shapefile = None self.shapefile_path = os.path.join(auxiliary_path, 'clip', 'clip.shp') - self.__logger.write_time_log('Clip', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('Clip', '__init__', timeit.default_timer() - spent_time) diff --git a/hermesv3_bu/clipping/custom_clip.py b/hermesv3_bu/clipping/custom_clip.py index 45cfe12..f6f79b2 100755 --- a/hermesv3_bu/clipping/custom_clip.py +++ b/hermesv3_bu/clipping/custom_clip.py @@ -27,7 +27,7 @@ class CustomClip(Clip): super(CustomClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Custom clip' self.shapefile = self.create_clip(points_str) - self.__logger.write_time_log('CustomClip', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('CustomClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, points_str): """ @@ -64,6 +64,6 @@ class CustomClip(Clip): clip.to_file(self.shapefile_path) else: clip = gpd.read_file(self.shapefile_path) - self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.__logger.write_time_log('CustomClip', 'create_clip', timeit.default_timer() - spent_time) + self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.logger.write_time_log('CustomClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/clipping/default_clip.py b/hermesv3_bu/clipping/default_clip.py index 43f2238..f05bda8 100755 --- a/hermesv3_bu/clipping/default_clip.py +++ b/hermesv3_bu/clipping/default_clip.py @@ -27,7 +27,7 @@ class DefaultClip(Clip): super(DefaultClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Default clip' self.shapefile = self.create_clip(grid) - self.__logger.write_time_log('DefaultClip', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('DefaultClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, grid): """ @@ -49,6 +49,6 @@ class DefaultClip(Clip): clip.to_file(self.shapefile_path) else: clip = gpd.read_file(self.shapefile_path) - self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.__logger.write_time_log('DefaultClip', 'create_clip', timeit.default_timer() - spent_time) + self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.logger.write_time_log('DefaultClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/clipping/shapefile_clip.py b/hermesv3_bu/clipping/shapefile_clip.py index 07bc87b..88792ef 100755 --- a/hermesv3_bu/clipping/shapefile_clip.py +++ b/hermesv3_bu/clipping/shapefile_clip.py @@ -28,7 +28,7 @@ class ShapefileClip(Clip): super(ShapefileClip, self).__init__(logger, auxiliary_path) self.clip_type = 'Shapefile clip' self.shapefile = self.create_clip(clip_input_path) - self.__logger.write_time_log('ShapefileClip', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShapefileClip', '__init__', timeit.default_timer() - spent_time) def create_clip(self, clip_path): """ @@ -52,6 +52,6 @@ class ShapefileClip(Clip): error_exit(" Clip shapefile {0} not found.") else: clip = gpd.read_file(self.shapefile_path) - self.__logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) - self.__logger.write_time_log('ShapefileClip', 'create_clip', timeit.default_timer() - spent_time) + self.logger.write_log("\tClip created at '{0}'".format(self.shapefile_path), 3) + self.logger.write_time_log('ShapefileClip', 'create_clip', timeit.default_timer() - spent_time) return clip diff --git a/hermesv3_bu/grids/grid.py b/hermesv3_bu/grids/grid.py index b454d44..6dea082 100755 --- a/hermesv3_bu/grids/grid.py +++ b/hermesv3_bu/grids/grid.py @@ -83,8 +83,8 @@ class Grid(object): :type vertical_description_path: str """ spent_time = timeit.default_timer() - self.__logger = logger - self.__logger.write_log('\tGrid specifications: {0}'.format(attributes), 3) + self.logger = logger + self.logger.write_log('\tGrid specifications: {0}'.format(attributes), 3) self.attributes = attributes self.netcdf_path = os.path.join(auxiliary_path, 'grid', 'grid.nc') self.shapefile_path = os.path.join(auxiliary_path, 'grid', 'grid.shp') @@ -117,7 +117,7 @@ class Grid(object): df = pd.read_csv(path, sep=',') heights = df.height_magl.values - self.__logger.write_time_log('Grid', 'get_vertical_description', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('Grid', 'get_vertical_description', timeit.default_timer() - spent_time, 3) return heights def write_netcdf(self): @@ -171,7 +171,7 @@ class Grid(object): bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) else: error_exit('The number of vertices of the boundaries must be 2 or 4.') - self.__logger.write_time_log('Grid', 'create_bounds', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('Grid', 'create_bounds', timeit.default_timer() - spent_time, 3) return bound_coords def create_shapefile(self): @@ -235,7 +235,7 @@ class Grid(object): gdf = gpd.read_file(self.shapefile_path) gdf.set_index('FID', inplace=True) - self.__logger.write_time_log('Grid', 'create_shapefile', timeit.default_timer() - spent_time, 2) + self.logger.write_time_log('Grid', 'create_shapefile', timeit.default_timer() - spent_time, 2) return gdf diff --git a/hermesv3_bu/grids/grid_latlon.py b/hermesv3_bu/grids/grid_latlon.py index 8e9b28b..d1c0513 100755 --- a/hermesv3_bu/grids/grid_latlon.py +++ b/hermesv3_bu/grids/grid_latlon.py @@ -56,7 +56,7 @@ class LatLonGrid(Grid): self.shape = (tstep_num, len(self.vertical_desctiption), n_lat, n_lon) - self.__logger.write_time_log('LatLonGrid', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('LatLonGrid', '__init__', timeit.default_timer() - spent_time) def create_coords(self): """ @@ -81,7 +81,7 @@ class LatLonGrid(Grid): self.boundary_latitudes = self.boundary_latitudes.reshape((1,) + self.boundary_latitudes.shape) self.boundary_longitudes = self.boundary_longitudes.reshape((1,) + self.boundary_longitudes.shape) - self.__logger.write_time_log('LatLonGrid', 'create_coords', timeit.default_timer() - spent_time, 2) + self.logger.write_time_log('LatLonGrid', 'create_coords', timeit.default_timer() - spent_time, 2) def write_netcdf(self): """ @@ -98,5 +98,5 @@ class LatLonGrid(Grid): boundary_longitudes=self.boundary_longitudes, regular_latlon=True) - self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.__logger.write_time_log('LatLonGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.logger.write_time_log('LatLonGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) diff --git a/hermesv3_bu/grids/grid_lcc.py b/hermesv3_bu/grids/grid_lcc.py index 33432e0..ee6fdee 100755 --- a/hermesv3_bu/grids/grid_lcc.py +++ b/hermesv3_bu/grids/grid_lcc.py @@ -79,7 +79,7 @@ class LccGrid(Grid): # Initialises with parent class super(LccGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), ny, nx) - self.__logger.write_time_log('LccGrid', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('LccGrid', '__init__', timeit.default_timer() - spent_time) def write_netcdf(self): """ @@ -99,8 +99,8 @@ class LccGrid(Grid): lat_1_2="{0}, {1}".format(self.attributes['lat_1'], self.attributes['lat_2']), lon_0=self.attributes['lon_0'], lat_0=self.attributes['lat_0']) - self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.__logger.write_time_log('LccGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.logger.write_time_log('LccGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True def create_coords(self): @@ -143,5 +143,5 @@ class LccGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - self.__logger.write_time_log('LccGrid', 'create_coords', timeit.default_timer() - spent_time, 2) + self.logger.write_time_log('LccGrid', 'create_coords', timeit.default_timer() - spent_time, 2) return True diff --git a/hermesv3_bu/grids/grid_mercator.py b/hermesv3_bu/grids/grid_mercator.py index 1f6fc54..24faf79 100755 --- a/hermesv3_bu/grids/grid_mercator.py +++ b/hermesv3_bu/grids/grid_mercator.py @@ -69,7 +69,7 @@ class MercatorGrid(Grid): super(MercatorGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), ny, nx) - self.__logger.write_time_log('MercatorGrid', '__init__', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('MercatorGrid', '__init__', timeit.default_timer() - spent_time, 3) def write_netcdf(self): """ @@ -88,8 +88,8 @@ class MercatorGrid(Grid): boundary_longitudes=self.boundary_longitudes, mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.attributes['lon_0'], lat_ts=self.attributes['lat_ts']) - self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.__logger.write_time_log('MercatorGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.logger.write_time_log('MercatorGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True def create_coords(self): @@ -120,6 +120,6 @@ class MercatorGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - self.__logger.write_time_log('MercatorGrid', 'create_coords', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('MercatorGrid', 'create_coords', timeit.default_timer() - spent_time, 3) return True diff --git a/hermesv3_bu/grids/grid_rotated.py b/hermesv3_bu/grids/grid_rotated.py index 8cf8bd2..2195707 100755 --- a/hermesv3_bu/grids/grid_rotated.py +++ b/hermesv3_bu/grids/grid_rotated.py @@ -44,7 +44,7 @@ class RotatedGrid(Grid): super(RotatedGrid, self).__init__(logger, attributes, auxiliary_path, vertical_description_path) self.shape = (tstep_num, len(self.vertical_desctiption), attributes['n_lat'], attributes['n_lon']) - self.__logger.write_time_log('RotatedGrid', '__init__', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('RotatedGrid', '__init__', timeit.default_timer() - spent_time, 3) def create_regular_rotated(self): """ @@ -66,7 +66,7 @@ class RotatedGrid(Grid): inverse=True) corner_longitudes = self.create_bounds(center_longitudes, self.attributes['inc_rlon'], number_vertices=4) - self.__logger.write_time_log('RotatedGrid', 'create_regular_rotated', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('RotatedGrid', 'create_regular_rotated', timeit.default_timer() - spent_time, 3) return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes def create_coords(self): @@ -90,7 +90,7 @@ class RotatedGrid(Grid): self.boundary_longitudes, self.boundary_latitudes = self.rotated2latlon(b_lons, b_lats) self.center_longitudes, self.center_latitudes = self.rotated2latlon(c_lons, c_lats) - self.__logger.write_time_log('RotatedGrid', 'create_coords', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('RotatedGrid', 'create_coords', timeit.default_timer() - spent_time, 3) return True def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180): @@ -153,7 +153,7 @@ class RotatedGrid(Grid): almd[almd > (lon_min + 360)] -= 360 almd[almd < lon_min] += 360 - self.__logger.write_time_log('RotatedGrid', 'rotated2latlon', timeit.default_timer() - spent_time, 3) + self.logger.write_time_log('RotatedGrid', 'rotated2latlon', timeit.default_timer() - spent_time, 3) return almd, aphd @@ -174,6 +174,6 @@ class RotatedGrid(Grid): rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, north_pole_lat=90 - self.attributes['new_pole_latitude_degrees'], north_pole_lon=self.attributes['new_pole_longitude_degrees']) - self.__logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) - self.__logger.write_time_log('RotatedGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) + self.logger.write_log("\tGrid created at '{0}'".format(self.netcdf_path), 3) + self.logger.write_time_log('RotatedGrid', 'write_netcdf', timeit.default_timer() - spent_time, 3) return True diff --git a/hermesv3_bu/hermes.py b/hermesv3_bu/hermes.py index 4eafb06..e355959 100755 --- a/hermesv3_bu/hermes.py +++ b/hermesv3_bu/hermes.py @@ -30,27 +30,27 @@ class Hermes(object): self.__initial_time = timeit.default_timer() if comm is None: comm = MPI.COMM_WORLD - self.__comm = comm + self.comm = comm self.arguments = config.arguments - self.__logger = Log(self.arguments) - self.__logger.write_log('====== Starting HERMESv3_BU simulation =====') - self.grid = select_grid(self.__comm, self.__logger, self.arguments) - self.clip = select_clip(self.__comm, self.__logger, self.arguments.auxiliary_files_path, self.arguments.clipping, + self.logger = Log(self.arguments) + self.logger.write_log('====== Starting HERMESv3_BU simulation =====') + self.grid = select_grid(self.comm, self.logger, self.arguments) + self.clip = select_clip(self.comm, self.logger, self.arguments.auxiliary_files_path, self.arguments.clipping, self.grid) self.date_array = [self.arguments.start_date + timedelta(hours=hour) for hour in range(self.arguments.output_timestep_num)] - self.__logger.write_log('Dates to simulate:', message_level=3) + self.logger.write_log('Dates to simulate:', message_level=3) for aux_date in self.date_array: - self.__logger.write_log('\t{0}'.format(aux_date.strftime("%Y/%m/%d, %H:%M:%S")), message_level=3) + self.logger.write_log('\t{0}'.format(aux_date.strftime("%Y/%m/%d, %H:%M:%S")), message_level=3) self.sector_manager = SectorManager( - self.__comm, self.__logger, self.grid, self.clip, self.date_array, self.arguments) + self.comm, self.logger, self.grid, self.clip, self.date_array, self.arguments) - self.writer = select_writer(self.__logger, self.arguments, self.grid, self.date_array) + self.writer = select_writer(self.logger, self.arguments, self.grid, self.date_array) - self.__logger.write_time_log('Hermes', '__init__', timeit.default_timer() - self.__initial_time) + self.logger.write_time_log('Hermes', '__init__', timeit.default_timer() - self.__initial_time) def main(self): """ @@ -58,21 +58,21 @@ class Hermes(object): """ from datetime import timedelta - if self.arguments.fist_time: - self.__logger.write_log('***** HERMESv3_BU First Time finished successfully *****') + if self.arguments.first_time: + self.logger.write_log('***** HERMESv3_BU First Time finished successfully *****') else: emis = self.sector_manager.run() waiting_time = timeit.default_timer() - self.__comm.Barrier() - self.__logger.write_log('All emissions calculated!') - self.__logger.write_time_log('Hermes', 'Waiting_to_write', timeit.default_timer() - waiting_time) + self.comm.Barrier() + self.logger.write_log('All emissions calculated!') + self.logger.write_time_log('Hermes', 'Waiting_to_write', timeit.default_timer() - waiting_time) self.writer.write(emis) - self.__comm.Barrier() + self.comm.Barrier() - self.__logger.write_log('***** HERMESv3_BU simulation finished successfully *****') - self.__logger.write_time_log('Hermes', 'TOTAL', timeit.default_timer() - self.__initial_time) - self.__logger.finish_logs() + self.logger.write_log('***** HERMESv3_BU simulation finished successfully *****') + self.logger.write_time_log('Hermes', 'TOTAL', timeit.default_timer() - self.__initial_time) + self.logger.finish_logs() if self.arguments.start_date < self.arguments.end_date: return self.arguments.start_date + timedelta(days=1) diff --git a/hermesv3_bu/io_server/io_raster.py b/hermesv3_bu/io_server/io_raster.py index f74b041..de05202 100755 --- a/hermesv3_bu/io_server/io_raster.py +++ b/hermesv3_bu/io_server/io_raster.py @@ -186,13 +186,13 @@ class IoRaster(IoServer): :return: """ - if self.__comm.Get_rank() == rank: + if self.comm.Get_rank() == rank: gdf = self.to_shapefile_serie(raster_path, out_path=out_path, write=write, crs=crs, nodata=nodata) else: gdf = None - if self.__comm.Get_size() > 1: - gdf = self.__comm.bcast(gdf, root=0) + if self.comm.Get_size() > 1: + gdf = self.comm.bcast(gdf, root=0) return gdf @@ -316,7 +316,7 @@ class IoRaster(IoServer): def to_shapefile_parallel(self, raster_path, gather=False, bcast=False, crs=None, nodata=0): spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: ds = rasterio.open(raster_path) grid_info = ds.transform @@ -357,11 +357,11 @@ class IoRaster(IoServer): gdf = None b_lons = None b_lats = None - self.__comm.Barrier() - gdf = IoShapefile(self.__comm).split_shapefile(gdf) + self.comm.Barrier() + gdf = IoShapefile(self.comm).split_shapefile(gdf) - b_lons = IoShapefile(self.__comm).split_shapefile(b_lons) - b_lats = IoShapefile(self.__comm).split_shapefile(b_lats) + b_lons = IoShapefile(self.comm).split_shapefile(b_lons) + b_lats = IoShapefile(self.comm).split_shapefile(b_lats) i = 0 for j, df_aux in gdf.iterrows(): @@ -379,7 +379,7 @@ class IoRaster(IoServer): gdf = gdf.to_crs(crs) if gather and not bcast: - gdf = IoShapefile(self.__comm).gather_shapefile(gdf) + gdf = IoShapefile(self.comm).gather_shapefile(gdf) elif gather and bcast: - gdf = IoShapefile(self.__comm).gather_bcast_shapefile(gdf) + gdf = IoShapefile(self.comm).gather_bcast_shapefile(gdf) return gdf diff --git a/hermesv3_bu/io_server/io_server.py b/hermesv3_bu/io_server/io_server.py index 77dae06..46bd918 100755 --- a/hermesv3_bu/io_server/io_server.py +++ b/hermesv3_bu/io_server/io_server.py @@ -9,4 +9,4 @@ class IoServer(object): :type comm: MPI.Comm """ def __init__(self, comm): - self.__comm = comm + self.comm = comm diff --git a/hermesv3_bu/io_server/io_shapefile.py b/hermesv3_bu/io_server/io_shapefile.py index b995773..59ece64 100755 --- a/hermesv3_bu/io_server/io_shapefile.py +++ b/hermesv3_bu/io_server/io_shapefile.py @@ -49,14 +49,14 @@ class IoShapefile(IoServer): :return: True when the writing is finished. :rtype: bool """ - data = self.__comm.gather(data, root=rank) - if self.__comm.Get_rank() == rank: + data = self.comm.gather(data, root=rank) + if self.comm.Get_rank() == rank: if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) data = pd.concat(data) data.to_file(path) - self.__comm.Barrier() + self.comm.Barrier() return True @@ -67,19 +67,19 @@ class IoShapefile(IoServer): return gdf def read_shapefile(self, path, rank=0): - if self.__comm.Get_rank() == rank: + if self.comm.Get_rank() == rank: check_files(path) gdf = gpd.read_file(path) - gdf = np.array_split(gdf, self.__comm.Get_size()) + gdf = np.array_split(gdf, self.comm.Get_size()) else: gdf = None - gdf = self.__comm.scatter(gdf, root=rank) + gdf = self.comm.scatter(gdf, root=rank) return gdf def read_shapefile_parallel(self, path, rank=0): - if self.__comm.Get_rank() == rank: + if self.comm.Get_rank() == rank: data = self.read_shapefile_serial(path) else: data = None @@ -97,38 +97,38 @@ class IoShapefile(IoServer): :rtype: GeoDataFrame """ - if self.__comm.Get_size() == 1: + if self.comm.Get_size() == 1: data = data else: - if self.__comm.Get_rank() == rank: - data = np.array_split(data, self.__comm.Get_size()) + if self.comm.Get_rank() == rank: + data = np.array_split(data, self.comm.Get_size()) else: data = None - data = self.__comm.scatter(data, root=rank) + data = self.comm.scatter(data, root=rank) return data def gather_bcast_shapefile(self, data, rank=0): - if self.__comm.Get_size() == 1: + if self.comm.Get_size() == 1: data = data else: - data = self.__comm.gather(data, root=rank) - if self.__comm.Get_rank() == rank: + data = self.comm.gather(data, root=rank) + if self.comm.Get_rank() == rank: data = pd.concat(data) else: data = None - data = self.__comm.bcast(data, root=rank) + data = self.comm.bcast(data, root=rank) return data def gather_shapefile(self, data, rank=0): - if self.__comm.Get_size() == 1: + if self.comm.Get_size() == 1: data = data else: - data = self.__comm.gather(data, root=rank) - if self.__comm.Get_rank() == rank: + data = self.comm.gather(data, root=rank) + if self.comm.Get_rank() == rank: data = pd.concat(data) else: data = None @@ -136,13 +136,13 @@ class IoShapefile(IoServer): def balance(self, data, rank=0): - data = self.__comm.gather(data, root=rank) - if self.__comm.Get_rank() == rank: + data = self.comm.gather(data, root=rank) + if self.comm.Get_rank() == rank: data = pd.concat(data) - data = np.array_split(data, self.__comm.Get_size()) + data = np.array_split(data, self.comm.Get_size()) else: data = None - data = self.__comm.scatter(data, root=rank) + data = self.comm.scatter(data, root=rank) return data diff --git a/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py b/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py index 10f3ed0..2b0be93 100755 --- a/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py +++ b/hermesv3_bu/sectors/agricultural_crop_fertilizers_sector.py @@ -53,7 +53,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): self.temperature_path = temperature_path self.wind_speed_path = wind_speed_path self.crop_growing_degree_day_path = crop_growing_degree_day_path - self.__logger.write_time_log('AgriculturalCropFertilizersSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', '__init__', timeit.default_timer() - spent_time) def get_ftype_fcrop_fmode_by_nut(self, crop, nut_list): spent_time = timeit.default_timer() @@ -71,8 +71,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): filtered_crop_f_parameter['f_crop'], filtered_crop_f_parameter['f_mode']], axis=1).reset_index() f_by_nut.rename(columns={0: 'f_type'}, inplace=True) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ftype_fcrop_fmode_by_nut', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ftype_fcrop_fmode_by_nut', + timeit.default_timer() - spent_time) return f_by_nut @@ -114,8 +114,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): total_crop_df['EF_{0}'.format(crop)] = crop_ef['f_sum'] - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ef_by_crop', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_ef_by_crop', + timeit.default_timer() - spent_time) return total_crop_df def to_dst_resolution(self, src_shapefile, value): @@ -143,8 +143,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): # dst_shapefile.drop('involved_area', axis=1, inplace=True) dst_shapefile.dropna(inplace=True) - dst_shapefile = IoShapefile(self.__comm).gather_shapefile(dst_shapefile.reset_index()) - if self.__comm.Get_rank() == 0: + dst_shapefile = IoShapefile(self.comm).gather_shapefile(dst_shapefile.reset_index()) + if self.comm.Get_rank() == 0: # dst_shapefile['FID_involved_area'] = dst_shapefile.groupby('FID')['involved_area'].sum() # dst_shapefile['involved_area'] = dst_shapefile['involved_area'] / dst_shapefile['FID_involved_area'] # dst_shapefile[value] = dst_shapefile[value] * dst_shapefile['involved_area'] @@ -154,55 +154,55 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): dst_shapefile = dst_shapefile.groupby(['FID'])[value].mean() else: dst_shapefile = None - dst_shapefile = IoShapefile(self.__comm).split_shapefile(dst_shapefile) + dst_shapefile = IoShapefile(self.comm).split_shapefile(dst_shapefile) # print('Rank {0} -Z {1}: \n{2}\n'.format(self.comm.Get_rank(), value, dst_shapefile)) # sys.stdout.flush() - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'to_dst_resolution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'to_dst_resolution', + timeit.default_timer() - spent_time) return dst_shapefile def get_gridded_constants(self, ph_path, cec_path): spent_time = timeit.default_timer() - self.__logger.write_log('Getting gridded constants', message_level=2) + self.logger.write_log('Getting gridded constants', message_level=2) gridded_ph_cec_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_constants') if not os.path.exists(gridded_ph_cec_path): - self.__logger.write_log('Getting PH from {0}'.format(ph_path), message_level=2) + self.logger.write_log('Getting PH from {0}'.format(ph_path), message_level=2) clipped_ph_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_PH.tiff') - if self.__comm.Get_rank() == 0: - IoRaster(self.__comm).clip_raster_with_shapefile_poly(ph_path, self.clip.shapefile, clipped_ph_path, - nodata=255) - self.__logger.write_log('PH clipped done!', message_level=3) - ph_gridded = IoRaster(self.__comm).to_shapefile_parallel(clipped_ph_path, nodata=255) - self.__logger.write_log('PH to shapefile done!', message_level=3) + if self.comm.Get_rank() == 0: + IoRaster(self.comm).clip_raster_with_shapefile_poly(ph_path, self.clip.shapefile, clipped_ph_path, + nodata=255) + self.logger.write_log('PH clipped done!', message_level=3) + ph_gridded = IoRaster(self.comm).to_shapefile_parallel(clipped_ph_path, nodata=255) + self.logger.write_log('PH to shapefile done!', message_level=3) ph_gridded.set_index('CELL_ID', inplace=True) ph_gridded.rename(columns={'data': 'ph'}, inplace=True) - ph_gridded = IoShapefile(self.__comm).balance(ph_gridded) + ph_gridded = IoShapefile(self.comm).balance(ph_gridded) # To correct input data ph_gridded['ph'] = ph_gridded['ph'] / 10 - self.__logger.write_log('PH to destiny resolution ...', message_level=3) + self.logger.write_log('PH to destiny resolution ...', message_level=3) ph_gridded = self.to_dst_resolution(ph_gridded, value='ph') - self.__logger.write_log('PH to destiny resolution done!', message_level=3) + self.logger.write_log('PH to destiny resolution done!', message_level=3) - self.__logger.write_log('Getting CEC from {0}'.format(cec_path), message_level=2) + self.logger.write_log('Getting CEC from {0}'.format(cec_path), message_level=2) clipped_cec_path = os.path.join(self.auxiliary_dir, 'fertilizers', 'gridded_CEC.tiff') - if self.__comm.Get_rank() == 0: - IoRaster(self.__comm).clip_raster_with_shapefile_poly(cec_path, self.clip.shapefile, clipped_cec_path, - nodata=-32768) - self.__logger.write_log('CEC clipped done!', message_level=3) - cec_gridded = IoRaster(self.__comm).to_shapefile_parallel(clipped_cec_path, nodata=-32768) - self.__logger.write_log('CEC to shapefile done!', message_level=3) + if self.comm.Get_rank() == 0: + IoRaster(self.comm).clip_raster_with_shapefile_poly(cec_path, self.clip.shapefile, clipped_cec_path, + nodata=-32768) + self.logger.write_log('CEC clipped done!', message_level=3) + cec_gridded = IoRaster(self.comm).to_shapefile_parallel(clipped_cec_path, nodata=-32768) + self.logger.write_log('CEC to shapefile done!', message_level=3) cec_gridded.rename(columns={'data': 'cec'}, inplace=True) cec_gridded.set_index('CELL_ID', inplace=True) - cec_gridded = IoShapefile(self.__comm).balance(cec_gridded) - self.__logger.write_log('CEC to destiny resolution ...', message_level=3) + cec_gridded = IoShapefile(self.comm).balance(cec_gridded) + self.logger.write_log('CEC to destiny resolution ...', message_level=3) cec_gridded = self.to_dst_resolution(cec_gridded.reset_index(), value='cec') - self.__logger.write_log('CEC to destiny resolution done!', message_level=3) + self.logger.write_log('CEC to destiny resolution done!', message_level=3) - ph_gridded = IoShapefile(self.__comm).gather_shapefile(ph_gridded.reset_index()) - cec_gridded = IoShapefile(self.__comm).gather_shapefile(cec_gridded.reset_index()) - if self.__comm.Get_rank() == 0: + ph_gridded = IoShapefile(self.comm).gather_shapefile(ph_gridded.reset_index()) + cec_gridded = IoShapefile(self.comm).gather_shapefile(cec_gridded.reset_index()) + if self.comm.Get_rank() == 0: gridded_ph_cec = ph_gridded # gridded_ph_cec = ph_gridded.groupby('FID').mean() # cec_gridded = cec_gridded.groupby('FID').mean() @@ -216,7 +216,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): crs=self.grid.shapefile.crs) else: gridded_ph_cec = None - gridded_ph_cec = IoShapefile(self.__comm).split_shapefile(gridded_ph_cec) + gridded_ph_cec = IoShapefile(self.comm).split_shapefile(gridded_ph_cec) # print('Rank {0} -Z PH: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec['ph']))) # print('Rank {0} -Z CEC: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec['cec']))) # print('Rank {0} -Z FID: \n{1}\n'.format(self.comm.Get_rank(), np.unique(gridded_ph_cec.index))) @@ -226,19 +226,19 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): gridded_ph_cec = gridded_ph_cec[gridded_ph_cec['nut_code'] != -999] gridded_ph_cec.set_index('FID', inplace=True) - IoShapefile(self.__comm).write_shapefile_parallel(gridded_ph_cec.reset_index(), gridded_ph_cec_path) + IoShapefile(self.comm).write_shapefile_parallel(gridded_ph_cec.reset_index(), gridded_ph_cec_path) - gridded_ph_cec = IoShapefile(self.__comm).gather_bcast_shapefile(gridded_ph_cec) + gridded_ph_cec = IoShapefile(self.comm).gather_bcast_shapefile(gridded_ph_cec) else: - gridded_ph_cec = IoShapefile(self.__comm).read_shapefile_serial(gridded_ph_cec_path) + gridded_ph_cec = IoShapefile(self.comm).read_shapefile_serial(gridded_ph_cec_path) gridded_ph_cec.set_index('FID', inplace=True) # Selecting only PH and CEC cells that have also some crop. gridded_ph_cec = gridded_ph_cec.loc[self.crop_distribution.index, :] # gridded_ph_cec = gridded_ph_cec.loc[(gridded_ph_cec['ph'] > 0) & (gridded_ph_cec['cec'] > 0)] - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_gridded_constants', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_gridded_constants', + timeit.default_timer() - spent_time) return gridded_ph_cec def get_daily_inputs(self, yearly_emissions): @@ -254,34 +254,34 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): for day in self.day_dict.keys(): aux_df = yearly_emissions.copy().reset_index() - self.__logger.write_log('Getting temperature from {0}'.format( + self.logger.write_log('Getting temperature from {0}'.format( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))))) - meteo_df = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo_df = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) meteo_df['tas'] = meteo_df['tas'] - 273.15 - self.__logger.write_log('Getting surface wind speed from {0}'.format( + self.logger.write_log('Getting surface wind speed from {0}'.format( os.path.join(self.wind_speed_path, 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))))) - meteo_df['sfcWind'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo_df['sfcWind'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.wind_speed_path, 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'sfcWind', 'daily', day, geometry_shp).loc[:, 'sfcWind'] for crop in self.crop_list: - self.__logger.write_log('Getting fertilizer denominator yearly factor from {0}'.format( + self.logger.write_log('Getting fertilizer denominator yearly factor from {0}'.format( self.fertilizer_denominator_yearly_factor_path.replace('', crop).replace( '', str(day.year)))) - meteo_df['d_{0}'.format(crop)] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo_df['d_{0}'.format(crop)] = IoNetcdf(self.comm).get_data_from_netcdf( self.fertilizer_denominator_yearly_factor_path.replace('', crop).replace( '', str(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - self.__logger.write_log('Getting growing degree day from {0}'.format( + self.logger.write_log('Getting growing degree day from {0}'.format( self.crop_growing_degree_day_path.replace('', 'winter').replace('', str(day.year)))) - meteo_df['winter'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo_df['winter'] = IoNetcdf(self.comm).get_data_from_netcdf( self.crop_growing_degree_day_path.replace('', 'winter').replace('', str(day.year)), 'Tsum', 'yearly', day, geometry_shp).loc[:, 'Tsum'].astype(np.int16) - self.__logger.write_log('Getting growing degree day from {0}'.format( + self.logger.write_log('Getting growing degree day from {0}'.format( self.crop_growing_degree_day_path.replace('', 'spring').replace('', str(day.year)))) - meteo_df['spring'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo_df['spring'] = IoNetcdf(self.comm).get_data_from_netcdf( self.crop_growing_degree_day_path.replace('', 'spring').replace('', str(day.year)), 'Tsum', 'yearly', day, geometry_shp).loc[:, 'Tsum'].astype(np.int16) @@ -297,14 +297,14 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): aux_df.set_index('FID', inplace=True) daily_inputs[day] = aux_df - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'get_daily_inputs', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'get_daily_inputs', + timeit.default_timer() - spent_time) return daily_inputs def calculate_yearly_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('Calculating yearly emissions') + self.logger.write_log('Calculating yearly emissions') self.crop_distribution = pd.merge(self.crop_distribution.reset_index(), self.ef_by_crop.loc[:, ['nut_code']].reset_index(), how='left', on='FID') @@ -317,8 +317,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): self.fertilizer_rate.loc[self.fertilizer_rate['code'] == x.name, crop].values[0])) self.crop_distribution[crop] = self.crop_distribution[crop] * self.ef_by_crop['EF_{0}'.format(crop)] - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_yearly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_yearly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_nh3_emissions(self, day, daily_inputs): @@ -364,8 +364,8 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): # From kg NH3-N to g NH3 daily_emissions['nh3'] = daily_emissions['nh3'].multiply((17. / 14.) * 1000.) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_nh3_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_nh3_emissions', + timeit.default_timer() - spent_time) return daily_emissions def add_dates(self, df_by_day): @@ -383,23 +383,23 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): dataframe_by_day = self.to_timezone(dataframe_by_day) dataframe_by_day.set_index(['FID', 'tstep'], inplace=True) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'add_dates', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'add_dates', + timeit.default_timer() - spent_time) return dataframe_by_day def calculate_daily_emissions(self, emissions): spent_time = timeit.default_timer() - self.__logger.write_log('Calculating daily emissions') + self.logger.write_log('Calculating daily emissions') df_by_day = self.get_daily_inputs(emissions) for day, daily_inputs in df_by_day.items(): df_by_day[day] = self.calculate_nh3_emissions(day, daily_inputs) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_daily_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_daily_emissions', + timeit.default_timer() - spent_time) return df_by_day def calculate_hourly_emissions(self, emissions): spent_time = timeit.default_timer() - self.__logger.write_log('Calculating hourly emissions') + self.logger.write_log('Calculating hourly emissions') emissions['hour'] = emissions['date'].dt.hour emissions['nh3'] = emissions.groupby('hour')['nh3'].apply( lambda x: x.multiply(self.hourly_profiles.loc['nh3', x.name])) @@ -407,13 +407,13 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): emissions['date'] = emissions['date_utc'] emissions.drop(columns=['hour', 'date_utc'], axis=1, inplace=True) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return emissions def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') emissions = self.calculate_yearly_emissions() @@ -426,7 +426,7 @@ class AgriculturalCropFertilizersSector(AgriculturalSector): emissions['layer'] = 0 emissions.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.__logger.write_log('\t\tCrop fertilizers emissions calculated', message_level=2) - self.__logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.logger.write_log('\t\tCrop fertilizers emissions calculated', message_level=2) + self.logger.write_time_log('AgriculturalCropFertilizersSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/agricultural_crop_operations_sector.py b/hermesv3_bu/sectors/agricultural_crop_operations_sector.py index 5457b92..9edb2a9 100755 --- a/hermesv3_bu/sectors/agricultural_crop_operations_sector.py +++ b/hermesv3_bu/sectors/agricultural_crop_operations_sector.py @@ -106,7 +106,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.months = self.get_date_array_by_month() - self.__logger.write_time_log('AgriculturalCropOperationsSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', '__init__', timeit.default_timer() - spent_time) def read_monthly_profiles(self, path): """ @@ -126,8 +126,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): profiles.reset_index(inplace=True) profiles.set_index(['P_month', 'operation'], inplace=True) - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'read_monthly_profiles', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'read_monthly_profiles', + timeit.default_timer() - spent_time) return profiles def get_date_array_by_month(self): @@ -140,8 +140,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): for month in month_list: month_dict[month] = np.array(self.date_array)[month_array == month] - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'get_date_array_by_month', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'get_date_array_by_month', + timeit.default_timer() - spent_time) return month_dict @@ -168,8 +168,8 @@ class AgriculturalCropOperationsSector(AgriculturalSector): # From Kg to g factor *= 1000.0 month_distribution[pollutant] += self.crop_distribution[crop].multiply(factor) - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_distribution_by_month', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_distribution_by_month', + timeit.default_timer() - spent_time) return month_distribution @@ -187,7 +187,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): dataframe_by_day.set_index(['FID', 'tstep'], inplace=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -239,14 +239,14 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.crop_distribution.drop(columns=['month', 'weekday', 'hour', 'WF', 'HF', 'date_as_date'], inplace=True) - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') distribution_by_month = {} for month in self.months.keys(): @@ -259,7 +259,7 @@ class AgriculturalCropOperationsSector(AgriculturalSector): self.crop_distribution['layer'] = 0 - self.__logger.write_log('\t\tCrop operations emissions calculated', message_level=2) - self.__logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.logger.write_log('\t\tCrop operations emissions calculated', message_level=2) + self.logger.write_time_log('AgriculturalCropOperationsSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution diff --git a/hermesv3_bu/sectors/agricultural_machinery_sector.py b/hermesv3_bu/sectors/agricultural_machinery_sector.py index 8a82fed..6d62458 100755 --- a/hermesv3_bu/sectors/agricultural_machinery_sector.py +++ b/hermesv3_bu/sectors/agricultural_machinery_sector.py @@ -53,7 +53,7 @@ class AgriculturalMachinerySector(AgriculturalSector): self.vehicle_power = self.read_profiles(vehicle_power_path) self.emission_factors = self.read_profiles(ef_files_dir) - self.__logger.write_time_log('AgriculturalMachinerySector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', '__init__', timeit.default_timer() - spent_time) def get_crop_distribution_by_nut(self, crop_distribution, nut_shapefile, nut_code=None, write_crop_by_nut=False): spent_time = timeit.default_timer() @@ -92,12 +92,12 @@ class AgriculturalMachinerySector(AgriculturalSector): crop_distribution.drop(columns=self.crop_list, inplace=True) crop_distribution.rename(columns={nut_code: 'NUT_code'}, inplace=True) - IoShapefile(self.__comm).write_shapefile_parallel(crop_distribution, crop_distribution_nut_path) + IoShapefile(self.comm).write_shapefile_parallel(crop_distribution, crop_distribution_nut_path) else: - crop_distribution = IoShapefile(self.__comm).read_shapefile(crop_distribution_nut_path) + crop_distribution = IoShapefile(self.comm).read_shapefile(crop_distribution_nut_path) - self.__logger.write_time_log('AgriculturalMachinerySector', 'get_crop_distribution_by_nut', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'get_crop_distribution_by_nut', + timeit.default_timer() - spent_time) return crop_distribution @@ -110,8 +110,8 @@ class AgriculturalMachinerySector(AgriculturalSector): for month in month_list: month_dict[month] = np.array(self.date_array)[month_array == month] - self.__logger.write_time_log('AgriculturalMachinerySector', 'get_date_array_by_month', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'get_date_array_by_month', + timeit.default_timer() - spent_time) return month_dict def calcualte_yearly_emissions_by_nut_vehicle(self): @@ -221,8 +221,8 @@ class AgriculturalMachinerySector(AgriculturalSector): database.drop(columns=['N', 'S', 'T', 'P', 'LF'], inplace=True) database = database.groupby(['NUT_code', 'vehicle']).sum() - self.__logger.write_time_log('AgriculturalMachinerySector', 'calcualte_yearly_emissions_by_nut_vehicle', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'calcualte_yearly_emissions_by_nut_vehicle', + timeit.default_timer() - spent_time) return database def calculate_monthly_emissions_by_nut(self, month): @@ -242,8 +242,8 @@ class AgriculturalMachinerySector(AgriculturalSector): dataframe = dataframe.groupby('NUT_code').sum() - self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_monthly_emissions_by_nut', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_monthly_emissions_by_nut', + timeit.default_timer() - spent_time) return dataframe def distribute(self, dataframe): @@ -263,8 +263,8 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution['timezone'] = timezones self.crop_distribution.reset_index(inplace=True) - self.__logger.write_time_log('AgriculturalMachinerySector', 'distribute', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'distribute', + timeit.default_timer() - spent_time) return self.crop_distribution def add_dates(self, df_by_month): @@ -281,7 +281,7 @@ class AgriculturalMachinerySector(AgriculturalSector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.__logger.write_time_log('AgriculturalMachinerySector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day def calculate_hourly_emissions(self): @@ -331,13 +331,13 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution['HF'] * self.crop_distribution['WF'], axis=0) self.crop_distribution.drop(columns=['month', 'weekday', 'hour', 'WF', 'HF', 'date_as_date'], inplace=True) - self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') distribution_by_month = {} for month in self.months.keys(): @@ -351,7 +351,7 @@ class AgriculturalMachinerySector(AgriculturalSector): self.crop_distribution = self.crop_distribution.groupby(['FID', 'layer', 'tstep']).sum() self.crop_distribution = self.speciate(self.crop_distribution) - self.__logger.write_log('\t\tAgricultural machinery emissions calculated', message_level=2) - self.__logger.write_time_log('AgriculturalMachinerySector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.logger.write_log('\t\tAgricultural machinery emissions calculated', message_level=2) + self.logger.write_time_log('AgriculturalMachinerySector', 'calculate_emissions', + timeit.default_timer() - spent_time) return self.crop_distribution diff --git a/hermesv3_bu/sectors/agricultural_sector.py b/hermesv3_bu/sectors/agricultural_sector.py index 5cc43e9..07a10e9 100755 --- a/hermesv3_bu/sectors/agricultural_sector.py +++ b/hermesv3_bu/sectors/agricultural_sector.py @@ -133,31 +133,31 @@ class AgriculturalSector(Sector): self.crop_distribution = self.get_crops_by_dst_cell( os.path.join(auxiliary_dir, 'agriculture', 'crops', 'crops.shp')) - self.__logger.write_time_log('AgriculturalSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', '__init__', timeit.default_timer() - spent_time) def involved_grid_cells(self, src_shp): spent_time = timeit.default_timer() - grid_shp = IoShapefile(self.__comm).split_shapefile(self.grid.shapefile) + grid_shp = IoShapefile(self.comm).split_shapefile(self.grid.shapefile) src_union = src_shp.to_crs(grid_shp.crs).geometry.unary_union grid_shp = grid_shp.loc[grid_shp.intersects(src_union), :] - grid_shp_list = self.__comm.gather(grid_shp, root=0) + grid_shp_list = self.comm.gather(grid_shp, root=0) animal_dist_list = [] - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: for small_grid in grid_shp_list: animal_dist_list.append(src_shp.loc[src_shp.intersects( small_grid.to_crs(src_shp.crs).geometry.unary_union), :]) grid_shp = pd.concat(grid_shp_list) - grid_shp = np.array_split(grid_shp, self.__comm.Get_size()) + grid_shp = np.array_split(grid_shp, self.comm.Get_size()) else: grid_shp = None animal_dist_list = None - grid_shp = self.__comm.scatter(grid_shp, root=0) + grid_shp = self.comm.scatter(grid_shp, root=0) - animal_dist = self.__comm.scatter(animal_dist_list, root=0) + animal_dist = self.comm.scatter(animal_dist_list, root=0) - self.__logger.write_time_log('AgriculturalSector', 'involved_grid_cells', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'involved_grid_cells', timeit.default_timer() - spent_time) return grid_shp, animal_dist @@ -170,7 +170,7 @@ class AgriculturalSector(Sector): day_dict = {} for key, value in zip(days, num_days): day_dict[key] = value - self.__logger.write_time_log('AgriculturalSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict def get_crop_from_land_uses(self, crop_from_land_use_path): @@ -207,7 +207,7 @@ class AgriculturalSector(Sector): weights = list(map(float, re.split(' , |, | ,|,| ', element.weight))) crop_dict[element.crop] = list(zip(land_uses, weights)) - self.__logger.write_time_log('AgriculturalSector', 'get_crop_from_land_uses', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_crop_from_land_uses', timeit.default_timer() - spent_time) return crop_dict def get_involved_land_uses(self): @@ -225,7 +225,7 @@ class AgriculturalSector(Sector): land_use = int(land_use_and_weight[0]) if land_use not in land_uses_list: land_uses_list.append(land_use) - self.__logger.write_time_log('AgriculturalSector', 'get_involved_land_uses', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_involved_land_uses', timeit.default_timer() - spent_time) return land_uses_list @@ -251,9 +251,9 @@ class AgriculturalSector(Sector): land_uses_clipped = IoRaster(self.comm_agr).clip_raster_with_shapefile_poly( self.land_uses_path, self.clip.shapefile, land_uses_clipped, values=land_uses) self.comm_agr.Barrier() - self.__logger.write_log('\t\tRaster {0} to_shapefile.'.format(land_uses_clipped), message_level=3) + self.logger.write_log('\t\tRaster {0} to_shapefile.'.format(land_uses_clipped), message_level=3) land_use_src_by_nut = IoRaster(self.comm_agr).to_shapefile_parallel(land_uses_clipped) - self.__logger.write_log('\t\tFiltering shapefile.'.format(land_uses_clipped), message_level=3) + self.logger.write_log('\t\tFiltering shapefile.'.format(land_uses_clipped), message_level=3) land_use_src_by_nut.rename(columns={'data': 'land_use'}, inplace=True) land_use_src_by_nut['land_use'] = land_use_src_by_nut['land_use'].astype(np.int16) @@ -264,14 +264,14 @@ class AgriculturalSector(Sector): land_use_src_by_nut.set_index('CELL_ID', inplace=True) if write: - self.__logger.write_log('\t\tWriting {0} file.'.format(land_use_src_by_nut_path), message_level=3) + self.logger.write_log('\t\tWriting {0} file.'.format(land_use_src_by_nut_path), message_level=3) IoShapefile(self.comm_agr).write_shapefile_parallel(land_use_src_by_nut.reset_index(), land_use_src_by_nut_path) else: land_use_src_by_nut = IoShapefile(self.comm_agr).read_shapefile_parallel(land_use_src_by_nut_path) land_use_src_by_nut.set_index('CELL_ID', inplace=True) - self.__logger.write_time_log('AgriculturalSector', 'get_land_use_src_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_land_use_src_by_nut', timeit.default_timer() - spent_time) return land_use_src_by_nut @@ -293,7 +293,7 @@ class AgriculturalSector(Sector): df['nut_code'] = df['nut_code'].astype(np.int32) df.set_index(['nut_code', 'land_use'], inplace=True) - self.__logger.write_time_log('AgriculturalSector', 'get_tot_land_use_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_tot_land_use_by_nut', timeit.default_timer() - spent_time) return df def get_land_use_by_nut_csv(self, land_use_distribution_src_nut, land_uses): @@ -319,7 +319,7 @@ class AgriculturalSector(Sector): land_use_by_nut['area'] += land_use_distribution_src_nut.groupby(['nut_code', 'land_use'])['area'].sum() land_use_by_nut.fillna(0.0, inplace=True) - self.__logger.write_time_log('AgriculturalSector', 'get_land_use_by_nut_csv', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_land_use_by_nut_csv', timeit.default_timer() - spent_time) return land_use_by_nut def land_use_to_crop_by_nut(self, land_use_by_nut, nuts=None): @@ -351,7 +351,7 @@ class AgriculturalSector(Sector): aux_df.drop(columns=['land_use'], inplace=True) aux_df.set_index('nut_code', inplace=True) new_df[crop] += aux_df['area'] * weight - self.__logger.write_time_log('AgriculturalSector', 'land_use_to_crop_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'land_use_to_crop_by_nut', timeit.default_timer() - spent_time) return new_df @@ -374,7 +374,7 @@ class AgriculturalSector(Sector): for crop in crop_by_nut.columns: crop_share_by_nut[crop] = crop_by_nut[crop] / tot_crop_by_nut[crop] - self.__logger.write_time_log('AgriculturalSector', 'get_crop_shape_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_crop_shape_by_nut', timeit.default_timer() - spent_time) return crop_share_by_nut @@ -401,7 +401,7 @@ class AgriculturalSector(Sector): crop_by_nut = crop_by_nut.loc[crop_share_by_nut.index, :] crop_area_by_nut = crop_share_by_nut * crop_by_nut - self.__logger.write_time_log('AgriculturalSector', 'get_crop_area_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_crop_area_by_nut', timeit.default_timer() - spent_time) return crop_area_by_nut def calculate_crop_distribution_src(self, crop_area_by_nut, land_use_distribution_src_nut): @@ -436,8 +436,8 @@ class AgriculturalSector(Sector): crop_distribution_src.loc[crop_distribution_src['nut_code'] == nut, crop] *= \ crop_area_by_nut.loc[nut, crop] - self.__logger.write_time_log('AgriculturalSector', 'calculate_crop_distribution_src', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'calculate_crop_distribution_src', + timeit.default_timer() - spent_time) crop_distribution_src = IoShapefile(self.comm_agr).balance(crop_distribution_src) return crop_distribution_src @@ -473,8 +473,8 @@ class AgriculturalSector(Sector): crop_distribution.reset_index(inplace=True) crop_distribution.set_index('FID', inplace=True) - self.__logger.write_time_log('AgriculturalSector', 'get_crop_distribution_in_dst_cells', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_crop_distribution_in_dst_cells', + timeit.default_timer() - spent_time) return crop_distribution def get_crops_by_dst_cell(self, file_path): @@ -492,15 +492,15 @@ class AgriculturalSector(Sector): """ spent_time = timeit.default_timer() if not os.path.exists(file_path): - self.__logger.write_log('Creating the crop distribution shapefile.', message_level=2) + self.logger.write_log('Creating the crop distribution shapefile.', message_level=2) - self.__logger.write_log('\tCreating land use distribution on the source resolution.', message_level=3) + self.logger.write_log('\tCreating land use distribution on the source resolution.', message_level=3) involved_land_uses = self.get_involved_land_uses() land_use_distribution_src_nut = self.get_land_use_src_by_nut(involved_land_uses, write=False) land_use_by_nut = self.get_land_use_by_nut_csv(land_use_distribution_src_nut, involved_land_uses) - self.__logger.write_log('\tCreating the crop distribution on the source resolution.', message_level=3) + self.logger.write_log('\tCreating the crop distribution on the source resolution.', message_level=3) crop_by_nut = self.land_use_to_crop_by_nut(land_use_by_nut) tot_land_use_by_nut = self.get_tot_land_use_by_nut(involved_land_uses) tot_crop_by_nut = self.land_use_to_crop_by_nut( @@ -510,9 +510,9 @@ class AgriculturalSector(Sector): crop_distribution_src = self.calculate_crop_distribution_src( crop_area_by_nut, land_use_distribution_src_nut) - self.__logger.write_log('\tCreating the crop distribution on the grid resolution.', message_level=3) + self.logger.write_log('\tCreating the crop distribution on the grid resolution.', message_level=3) crop_distribution_dst = self.get_crop_distribution_in_dst_cells(crop_distribution_src) - self.__logger.write_log('\tCreating the crop distribution shapefile.', message_level=3) + self.logger.write_log('\tCreating the crop distribution shapefile.', message_level=3) crop_distribution_dst = IoShapefile(self.comm_agr).gather_shapefile(crop_distribution_dst.reset_index()) if self.comm_agr.Get_rank() == 0: crop_distribution_dst = crop_distribution_dst.groupby('FID').sum() @@ -524,19 +524,19 @@ class AgriculturalSector(Sector): else: crop_distribution_dst = None - self.__logger.write_log('\tAdding timezone to the shapefile.', message_level=3) + self.logger.write_log('\tAdding timezone to the shapefile.', message_level=3) crop_distribution_dst = IoShapefile(self.comm_agr).split_shapefile(crop_distribution_dst) crop_distribution_dst = self.add_timezone(crop_distribution_dst) - self.__logger.write_log('\tWriting the crop distribution shapefile.', message_level=3) + self.logger.write_log('\tWriting the crop distribution shapefile.', message_level=3) IoShapefile(self.comm_agr).write_shapefile_parallel(crop_distribution_dst, file_path) - crop_distribution_dst = IoShapefile(self.__comm).read_shapefile_parallel(file_path) + crop_distribution_dst = IoShapefile(self.comm).read_shapefile_parallel(file_path) crop_distribution_dst.set_index('FID', inplace=True, drop=True) # Filtering crops by used on the sub-sector (operations, fertilizers, machinery) crop_distribution_dst = crop_distribution_dst.loc[:, self.crop_list + ['timezone', 'geometry']] - self.__logger.write_time_log('AgriculturalSector', 'get_crops_by_dst_cell', timeit.default_timer() - spent_time) + self.logger.write_time_log('AgriculturalSector', 'get_crops_by_dst_cell', timeit.default_timer() - spent_time) return crop_distribution_dst @staticmethod diff --git a/hermesv3_bu/sectors/aviation_sector.py b/hermesv3_bu/sectors/aviation_sector.py index 98d1b1e..b5a6101 100755 --- a/hermesv3_bu/sectors/aviation_sector.py +++ b/hermesv3_bu/sectors/aviation_sector.py @@ -172,17 +172,17 @@ class AviationSector(Sector): self.trajectory_arrival_distribution = self.calculate_trajectories_distribution( airport_trajectories_shapefile, 'arrival') comm.Barrier() - self.__logger.write_time_log('AviationSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', '__init__', timeit.default_timer() - spent_time) def read_ef_files(self, ef_path): - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: ef_files = {} for phase in PHASE_TYPE.keys(): ef_files[phase] = pd.read_csv(os.path.join(ef_path, PHASE_EF_FILE[phase])) else: ef_files = None - ef_files = self.__comm.bcast(ef_files, root=0) + ef_files = self.comm.bcast(ef_files, root=0) return ef_files @@ -219,7 +219,7 @@ class AviationSector(Sector): trajectories.drop(columns=['arrival_f', 'departure_f'], inplace=True) trajectories.set_index(['runway_id', 'operation'], inplace=True) - self.__logger.write_time_log('AviationSector', 'read_trajectories_shapefile', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_trajectories_shapefile', timeit.default_timer() - spent_time) return trajectories @@ -234,7 +234,7 @@ class AviationSector(Sector): :rtype: GeoDataFrame, None """ spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: runway_shapefile = gpd.read_file(airport_runways_shapefile_path) runway_shapefile.set_index(['airport_id', 'runway_id'], inplace=True) runway_shapefile = runway_shapefile.loc[self.airport_list_full, :] @@ -243,7 +243,7 @@ class AviationSector(Sector): runway_shapefile.rename(columns={'approach_f': 'arrival_f', 'climbout_f': 'departure_f'}, inplace=True) else: runway_shapefile = None - self.__logger.write_time_log('AviationSector', 'read_runway_shapefile', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_runway_shapefile', timeit.default_timer() - spent_time) return runway_shapefile @@ -273,7 +273,7 @@ class AviationSector(Sector): profiles.rename(columns={-1: 'P_hour', -3: "operation", -2: "day_type"}, inplace=True) profiles.set_index(["P_hour", "operation", "day_type"], inplace=True) - self.__logger.write_time_log('AviationSector', 'read_hourly_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_hourly_profiles', timeit.default_timer() - spent_time) return profiles @@ -308,8 +308,8 @@ class AviationSector(Sector): operations.set_index(['airport_id', 'plane_id', 'operation'], inplace=True) operations.rename(columns={'1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, '10': 10, '11': 11, '12': 12}, inplace=True) - self.__logger.write_time_log('AviationSector', 'read_operations_update_plane_list', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_operations_update_plane_list', + timeit.default_timer() - spent_time) return operations @@ -329,7 +329,7 @@ class AviationSector(Sector): dataframe = dataframe.loc[dataframe['plane_id'].isin(self.plane_list)] dataframe.set_index('plane_id', inplace=True) - self.__logger.write_time_log('AviationSector', 'read_planes', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_planes', timeit.default_timer() - spent_time) return dataframe @@ -347,7 +347,7 @@ class AviationSector(Sector): dataframe = pd.read_csv(times_path) dataframe = dataframe.loc[dataframe['airport_id'].isin(self.airport_list)] dataframe.set_index(['airport_id', 'plane_type'], inplace=True) - self.__logger.write_time_log('AviationSector', 'read_times_info', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'read_times_info', timeit.default_timer() - spent_time) return dataframe @@ -369,7 +369,7 @@ class AviationSector(Sector): :rtype: list """ spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: airport_shapefile = airport_shapefile.reset_index() airport_shapefile = gpd.sjoin(airport_shapefile.to_crs(self.grid.shapefile.crs), self.clip.shapefile.to_crs(self.grid.shapefile.crs), how='inner', @@ -395,8 +395,8 @@ class AviationSector(Sector): # Only for master (rank == 0) self.airport_list_full = new_list - new_list = [new_list[i * len(new_list) // self.__comm.size: (i + 1) * len(new_list) // self.__comm.size] - for i in range(self.__comm.size)] + new_list = [new_list[i * len(new_list) // self.comm.size: (i + 1) * len(new_list) // self.comm.size] + for i in range(self.comm.size)] for sublist in new_list: if len(sublist) == 0: error_exit("The selected number of processors is to high. " + @@ -405,8 +405,8 @@ class AviationSector(Sector): else: new_list = None - new_list = self.__comm.scatter(new_list, root=0) - self.__logger.write_time_log('AviationSector', 'get_airport_list', timeit.default_timer() - spent_time) + new_list = self.comm.scatter(new_list, root=0) + self.logger.write_time_log('AviationSector', 'get_airport_list', timeit.default_timer() - spent_time) return new_list @@ -424,11 +424,11 @@ class AviationSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\t\tCalculating airport distribution', message_level=2) + self.logger.write_log('\t\tCalculating airport distribution', message_level=2) airport_distribution_path = os.path.join(self.auxiliary_dir, 'aviation', 'airport_distribution.csv') if not os.path.exists(airport_distribution_path): - if self.__comm.rank == 0: + if self.comm.rank == 0: airport_shapefile = airport_shapefile.loc[self.airport_list_full, :].copy() if not os.path.exists(os.path.dirname(airport_distribution_path)): os.makedirs(os.path.dirname(airport_distribution_path)) @@ -445,12 +445,12 @@ class AviationSector(Sector): airport_distribution.to_csv(airport_distribution_path) else: airport_distribution = None - airport_distribution = self.__comm.bcast(airport_distribution, root=0) + airport_distribution = self.comm.bcast(airport_distribution, root=0) else: airport_distribution = pd.read_csv(airport_distribution_path) airport_distribution.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.__logger.write_time_log('AviationSector', 'calculate_airport_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'calculate_airport_distribution', + timeit.default_timer() - spent_time) return airport_distribution @@ -482,13 +482,13 @@ class AviationSector(Sector): return df.loc[:, ['{0}_f'.format(phase_type)]] - self.__logger.write_log('\t\tCalculating runway distribution for {0}'.format(phase_type), message_level=2) + self.logger.write_log('\t\tCalculating runway distribution for {0}'.format(phase_type), message_level=2) runway_distribution_path = os.path.join( self.auxiliary_dir, 'aviation', 'runway_{0}_distribution.csv'.format(phase_type)) if not os.path.exists(runway_distribution_path): - if self.__comm.rank == 0: + if self.comm.rank == 0: runway_shapefile['{0}_f'.format(phase_type)] = runway_shapefile.groupby('airport_id').apply(normalize) runway_shapefile.to_crs(self.grid.shapefile.crs, inplace=True) @@ -516,12 +516,12 @@ class AviationSector(Sector): runway_shapefile.to_csv(runway_distribution_path) else: runway_shapefile = None - runway_shapefile = self.__comm.bcast(runway_shapefile, root=0) + runway_shapefile = self.comm.bcast(runway_shapefile, root=0) else: runway_shapefile = pd.read_csv(runway_distribution_path) runway_shapefile.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.__logger.write_time_log('AviationSector', 'calculate_runway_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'calculate_runway_distribution', + timeit.default_timer() - spent_time) return runway_shapefile @@ -566,12 +566,12 @@ class AviationSector(Sector): df['fraction'] = df['fraction'] / total_fraction return df.loc[:, ['fraction']] - self.__logger.write_log('\t\tCalculating trajectories distribution for {0}'.format(phase_type), message_level=2) + self.logger.write_log('\t\tCalculating trajectories distribution for {0}'.format(phase_type), message_level=2) trajectories_distribution_path = os.path.join( self.auxiliary_dir, 'aviation', 'trajectories_{0}_distribution.csv'.format(phase_type)) if not os.path.exists(trajectories_distribution_path): - if self.__comm.rank == 0: + if self.comm.rank == 0: if not os.path.exists(os.path.dirname(trajectories_distribution_path)): os.makedirs(os.path.dirname(trajectories_distribution_path)) # Filtering shapefile @@ -625,12 +625,12 @@ class AviationSector(Sector): trajectories_distr.to_csv(trajectories_distribution_path) else: trajectories_distr = None - trajectories_distr = self.__comm.bcast(trajectories_distr, root=0) + trajectories_distr = self.comm.bcast(trajectories_distr, root=0) else: trajectories_distr = pd.read_csv(trajectories_distribution_path) trajectories_distr.set_index(['airport_id', 'FID', 'layer'], inplace=True) - self.__logger.write_time_log('AviationSector', 'calculate_trajectories_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'calculate_trajectories_distribution', + timeit.default_timer() - spent_time) return trajectories_distr @@ -737,7 +737,7 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.__logger.write_time_log('AviationSector', 'get_main_engine_emission', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'get_main_engine_emission', timeit.default_timer() - spent_time) return dataframe @@ -835,8 +835,8 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.__logger.write_time_log('AviationSector', 'get_tyre_and_brake_wear_emission', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'get_tyre_and_brake_wear_emission', + timeit.default_timer() - spent_time) return dataframe @@ -938,8 +938,8 @@ class AviationSector(Sector): dataframe.drop(columns=['f', 'plane_id', 'geometry'], inplace=True) dataframe = dataframe.groupby(['airport_id', 'tstep']).sum() - self.__logger.write_time_log('AviationSector', 'get_auxiliary_power_unit_emission', - timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'get_auxiliary_power_unit_emission', + timeit.default_timer() - spent_time) return dataframe @@ -966,7 +966,7 @@ class AviationSector(Sector): dataframe[pollutants] = dataframe[pollutants].multiply(dataframe['fraction'], axis=0) dataframe.drop(columns=['airport_id', 'fraction'], inplace=True) dataframe = dataframe.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_time_log('AviationSector', 'distribute', timeit.default_timer() - spent_time) + self.logger.write_time_log('AviationSector', 'distribute', timeit.default_timer() - spent_time) return dataframe @@ -978,43 +978,43 @@ class AviationSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') taxi_out = self.get_main_engine_emission('taxi_out') - self.__logger.write_log('\t\tTaxi out emissions calculated.', message_level=2) + self.logger.write_log('\t\tTaxi out emissions calculated.', message_level=2) taxi_in = self.get_main_engine_emission('taxi_in') - self.__logger.write_log('\t\tTaxi in emissions calculated.', message_level=2) + self.logger.write_log('\t\tTaxi in emissions calculated.', message_level=2) takeoff = self.get_main_engine_emission('takeoff') - self.__logger.write_log('\t\tTake off emissions calculated.', message_level=2) + self.logger.write_log('\t\tTake off emissions calculated.', message_level=2) climbout = self.get_main_engine_emission('climbout') - self.__logger.write_log('\t\tClimb out emissions calculated.', message_level=2) + self.logger.write_log('\t\tClimb out emissions calculated.', message_level=2) approach = self.get_main_engine_emission('approach') - self.__logger.write_log('\t\tApproach emissions calculated.', message_level=2) + self.logger.write_log('\t\tApproach emissions calculated.', message_level=2) landing = self.get_main_engine_emission('landing') - self.__logger.write_log('\t\tLanding emissions calculated.', message_level=2) + self.logger.write_log('\t\tLanding emissions calculated.', message_level=2) landing_wear = self.get_tyre_and_brake_wear_emission('landing_wear') - self.__logger.write_log('\t\tLanding wear emissions calculated.', message_level=2) + self.logger.write_log('\t\tLanding wear emissions calculated.', message_level=2) post_taxi_in = self.get_auxiliary_power_unit_emission('post-taxi_in') - self.__logger.write_log('\t\tPost taxi in emissions calculated.', message_level=2) + self.logger.write_log('\t\tPost taxi in emissions calculated.', message_level=2) pre_taxi_out = self.get_auxiliary_power_unit_emission('pre-taxi_out') - self.__logger.write_log('\t\tPre taxi out emissions calculated.', message_level=2) + self.logger.write_log('\t\tPre taxi out emissions calculated.', message_level=2) airport_emissions = pd.concat([pre_taxi_out, taxi_out, taxi_in, post_taxi_in]) airport_emissions = airport_emissions.groupby(['airport_id', 'tstep']).sum() airport_emissions = self.distribute(airport_emissions, self.airport_distribution) - self.__logger.write_log('\t\tAirport emissions distributed (pre_taxi_out, taxi_out, taxi_in, post_taxi_in)', - message_level=2) + self.logger.write_log('\t\tAirport emissions distributed (pre_taxi_out, taxi_out, taxi_in, post_taxi_in)', + message_level=2) runway_departure_emissions = self.distribute(takeoff, self.runway_departure_distribution) runway_arrival_emissions = self.distribute(landing, self.runway_arrival_distribution) runway_arrival_emissions_wear = self.distribute(landing_wear, self.runway_arrival_distribution) - self.__logger.write_log('\t\tRunway emissions distributed (takeoff, landing, landing_wear)', message_level=2) + self.logger.write_log('\t\tRunway emissions distributed (takeoff, landing, landing_wear)', message_level=2) trajectory_arrival_emissions = self.distribute(approach, self.trajectory_arrival_distribution) trajectory_departure_emisions = self.distribute(climbout, self.trajectory_departure_distribution) - self.__logger.write_log('\t\tTrajectory emissions distributed (approach, climb out)', message_level=2) + self.logger.write_log('\t\tTrajectory emissions distributed (approach, climb out)', message_level=2) emissions = pd.concat([airport_emissions, runway_departure_emissions, trajectory_arrival_emissions, trajectory_departure_emisions, runway_arrival_emissions], sort=False) @@ -1036,6 +1036,6 @@ class AviationSector(Sector): # From kmol/h or kg/h to mol/h or g/h emissions = emissions * 1000 - self.__logger.write_log('\t\tAviation emissions calculated', message_level=2) - self.__logger.write_time_log('AviationSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tAviation emissions calculated', message_level=2) + self.logger.write_time_log('AviationSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/livestock_sector.py b/hermesv3_bu/sectors/livestock_sector.py index 2e2fc21..51c4c69 100755 --- a/hermesv3_bu/sectors/livestock_sector.py +++ b/hermesv3_bu/sectors/livestock_sector.py @@ -191,7 +191,7 @@ class LivestockSector(Sector): # Creating dst resolution shapefile with the amount of animals self.animals_df = self.create_animals_distribution(gridded_livestock_path, nut_shapefile_path, correction_split_factors_path) - self.__logger.write_time_log('LivestockSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', '__init__', timeit.default_timer() - spent_time) def create_animals_distribution(self, gridded_livestock_path, nut_shapefile_path, correction_split_factors_path): """ @@ -224,15 +224,15 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCreating animal distribution', message_level=2) + self.logger.write_log('\tCreating animal distribution', message_level=2) animals_df = self.create_animals_shapefile(gridded_livestock_path) animals_df = self.animal_distribution_by_category(animals_df, nut_shapefile_path, correction_split_factors_path) - self.__logger.write_log('Animal distribution done', message_level=2) - self.__logger.write_time_log('LivestockSector', 'create_animals_distribution', - timeit.default_timer() - spent_time) + self.logger.write_log('Animal distribution done', message_level=2) + self.logger.write_time_log('LivestockSector', 'create_animals_distribution', + timeit.default_timer() - spent_time) return animals_df @@ -250,7 +250,7 @@ class LivestockSector(Sector): day_dict = {} for key, value in zip(days, num_days): day_dict[key] = value - self.__logger.write_time_log('LivestockSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict @@ -275,27 +275,27 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\t\tCreating animal shapefile into source resolution', message_level=3) + self.logger.write_log('\t\tCreating animal shapefile into source resolution', message_level=3) animal_distribution = None # For each one of the animals of the animal list for animal in self.animal_list: - self.__logger.write_log('\t\t\t {0}'.format(animal), message_level=3) + self.logger.write_log('\t\t\t {0}'.format(animal), message_level=3) # Each one of the animal distributions will be stored separately animal_distribution_path = os.path.join(self.auxiliary_dir, 'livestock', animal, '{0}.shp'.format(animal)) if not os.path.exists(animal_distribution_path): # Create clipped raster file clipped_raster_path = os.path.join( self.auxiliary_dir, 'livestock', animal, '{0}_clip.tif'.format(animal)) - if self.__comm.Get_rank() == 0: - clipped_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + if self.comm.Get_rank() == 0: + clipped_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( gridded_livestock_path.replace('', animal), self.clip.shapefile, clipped_raster_path) - animal_df = IoRaster(self.__comm).to_shapefile_parallel(clipped_raster_path) + animal_df = IoRaster(self.comm).to_shapefile_parallel(clipped_raster_path) animal_df.rename(columns={'data': animal}, inplace=True) animal_df.set_index('CELL_ID', inplace=True) - IoShapefile(self.__comm).write_shapefile_parallel(animal_df.reset_index(), animal_distribution_path) + IoShapefile(self.comm).write_shapefile_parallel(animal_df.reset_index(), animal_distribution_path) else: - animal_df = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) + animal_df = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) animal_df.set_index('CELL_ID', inplace=True) # Creating full animal shapefile @@ -311,8 +311,8 @@ class LivestockSector(Sector): # Removing empty data animal_distribution = animal_distribution.loc[(animal_distribution[self.animal_list] != 0).any(axis=1), :] - self.__logger.write_time_log('LivestockSector', 'create_animals_shapefile_src_resolution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'create_animals_shapefile_src_resolution', + timeit.default_timer() - spent_time) return animal_distribution def animals_shapefile_to_dst_resolution(self, animal_distribution): @@ -326,10 +326,10 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\t\tCreating animal shapefile into destiny resolution', message_level=3) + self.logger.write_log('\t\tCreating animal shapefile into destiny resolution', message_level=3) self.grid.shapefile.reset_index(inplace=True) - animal_distribution = IoShapefile(self.__comm).balance(animal_distribution) + animal_distribution = IoShapefile(self.comm).balance(animal_distribution) # Changing coordinates system to the grid one animal_distribution.to_crs(self.grid.shapefile.crs, inplace=True) # Getting src area @@ -347,8 +347,8 @@ class LivestockSector(Sector): # Sum by destiny cell animal_distribution = animal_distribution.loc[:, self.animal_list + ['FID']].groupby('FID').sum() - animal_distribution = IoShapefile(self.__comm).gather_shapefile(animal_distribution.reset_index()) - if self.__comm.Get_rank() == 0: + animal_distribution = IoShapefile(self.comm).gather_shapefile(animal_distribution.reset_index()) + if self.comm.Get_rank() == 0: animal_distribution = animal_distribution.groupby('FID').sum() # Adding geometry and coordinates system from the destiny grid shapefile animal_distribution = gpd.GeoDataFrame( @@ -357,9 +357,9 @@ class LivestockSector(Sector): else: animal_distribution = None - animal_distribution = IoShapefile(self.__comm).split_shapefile(animal_distribution) - self.__logger.write_time_log('LivestockSector', 'animals_shapefile_to_dst_resolution', - timeit.default_timer() - spent_time) + animal_distribution = IoShapefile(self.comm).split_shapefile(animal_distribution) + self.logger.write_time_log('LivestockSector', 'animals_shapefile_to_dst_resolution', + timeit.default_timer() - spent_time) return animal_distribution @@ -384,11 +384,11 @@ class LivestockSector(Sector): if not os.path.exists(animal_distribution_path): dataframe = self.create_animals_shapefile_src_resolution(gridded_livestock_path) dataframe = self.animals_shapefile_to_dst_resolution(dataframe) - IoShapefile(self.__comm).write_shapefile_parallel(dataframe.reset_index(), animal_distribution_path) + IoShapefile(self.comm).write_shapefile_parallel(dataframe.reset_index(), animal_distribution_path) else: - dataframe = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) + dataframe = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) dataframe.set_index('FID', inplace=True) - self.__logger.write_time_log('LivestockSector', 'create_animals_shapefile', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'create_animals_shapefile', timeit.default_timer() - spent_time) return dataframe @@ -428,7 +428,7 @@ class LivestockSector(Sector): splitting_factors.reset_index(inplace=True) splitting_factors['nuts3_id'] = splitting_factors['nuts3_id'].astype(np.int16) - self.__logger.write_time_log('LivestockSector', 'get_splitting_factors', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'get_splitting_factors', timeit.default_timer() - spent_time) return splitting_factors @@ -467,7 +467,7 @@ class LivestockSector(Sector): nut_value='nuts3_id') animal_distribution.rename(columns={'nut_code': 'nuts3_id'}, inplace=True) animal_distribution = animal_distribution[animal_distribution['nuts3_id'] != -999] - animal_distribution = IoShapefile(self.__comm).balance(animal_distribution) + animal_distribution = IoShapefile(self.comm).balance(animal_distribution) animal_distribution.set_index('FID', inplace=True) splitting_factors = self.get_splitting_factors(correction_split_factors_path) @@ -487,12 +487,12 @@ class LivestockSector(Sector): animal_distribution = self.add_timezone(animal_distribution) animal_distribution.set_index('FID', inplace=True) - IoShapefile(self.__comm).write_shapefile_parallel(animal_distribution.reset_index(), animal_distribution_path) + IoShapefile(self.comm).write_shapefile_parallel(animal_distribution.reset_index(), animal_distribution_path) else: - animal_distribution = IoShapefile(self.__comm).read_shapefile_parallel(animal_distribution_path) + animal_distribution = IoShapefile(self.comm).read_shapefile_parallel(animal_distribution_path) animal_distribution.set_index('FID', inplace=True) - self.__logger.write_time_log('LivestockSector', 'animal_distribution_by_category', - timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'animal_distribution_by_category', + timeit.default_timer() - spent_time) return animal_distribution @@ -532,26 +532,26 @@ class LivestockSector(Sector): geometry_shp.drop(columns='geometry', inplace=True) # Extracting temperature - meteo = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['temperature_dir'], 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) meteo['tas'] = meteo['tas'] - 273.15 # From Celsius to Kelvin degrees # Extracting wind speed - meteo['sfcWind'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo['sfcWind'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['wind_speed_dir'], 'sfcWind_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'sfcWind', 'daily', day, geometry_shp).loc[:, 'sfcWind'] # Extracting denominators already calculated for all the emission types - meteo['D_grassing'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo['D_grassing'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'grassing_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_housing_closed'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo['D_housing_closed'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'housing_closed_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_housing_open'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo['D_housing_open'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'housing_open_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] - meteo['D_storage'] = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo['D_storage'] = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.paths['denominator_dir'], 'storage_{0}.nc'.format(day.year)), 'FD', 'yearly', day, geometry_shp).loc[:, 'FD'] @@ -581,7 +581,7 @@ class LivestockSector(Sector): meteo.loc[:, 'FD_grassing'].multiply((1 / (SIGMA * math.sqrt(2 * math.pi))) * math.exp( (float(int(day.strftime('%j')) - TAU) ** 2) / (-2 * (SIGMA ** 2)))) - self.__logger.write_time_log('LivestockSector', 'get_daily_factors', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'get_daily_factors', timeit.default_timer() - spent_time) return meteo.loc[:, ['REC', 'FD_housing_open', 'FD_housing_closed', 'FD_storage', 'FD_grassing', 'geometry']] @@ -624,7 +624,7 @@ class LivestockSector(Sector): new_df['EF_storage'] = new_df['Estorage_sd_l'] + new_df['Estorage_sl_l'] new_df.drop(['Estorage_sd_l', 'Estorage_sl_l'], axis=1, inplace=True) - self.__logger.write_time_log('LivestockSector', 'get_nh3_ef', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'get_nh3_ef', timeit.default_timer() - spent_time) return new_df @@ -661,7 +661,7 @@ class LivestockSector(Sector): new_df['EF_storage'] = new_df['Estorage_sd_l'] + new_df['Estorage_sl_l'] new_df.drop(['Estorage_sd_l', 'Estorage_sl_l'], axis=1, inplace=True) - self.__logger.write_time_log('LivestockSector', 'get_nox_no_ef', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'get_nox_no_ef', timeit.default_timer() - spent_time) return new_df @@ -696,8 +696,8 @@ class LivestockSector(Sector): animals_df.drop(columns=['centroid', 'REC', 'geometry_y'], axis=1, inplace=True) animals_df.rename(columns={'geometry_x': 'geometry'}, inplace=True) - self.__logger.write_time_log('LivestockSector', 'add_daily_factors_to_animal_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'add_daily_factors_to_animal_distribution', + timeit.default_timer() - spent_time) return animals_df @@ -732,7 +732,7 @@ class LivestockSector(Sector): # get_list out_pollutants from speciation map -> NH3 out_pollutants = self.get_output_pollutants('nh3') for out_p in out_pollutants: - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -771,7 +771,7 @@ class LivestockSector(Sector): # get_list out_pollutants from speciation map -> PAR, OLE, TOL ... (15 species) out_pollutants = self.get_output_pollutants('nmvoc') for out_p in out_pollutants: - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -801,7 +801,7 @@ class LivestockSector(Sector): if 'pm10' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('pm10') for out_p in out_pollutants: - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -853,7 +853,7 @@ class LivestockSector(Sector): if 'pm25' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('pm25') for out_p in out_pollutants: - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -905,7 +905,7 @@ class LivestockSector(Sector): if 'nox_no' in [x.lower() for x in self.source_pollutants]: out_pollutants = self.get_output_pollutants('nox_no') for out_p in out_pollutants: - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(out_p), message_level=3) out_df[out_p] = 0 if out_p not in self.output_pollutants: self.output_pollutants.append(out_p) @@ -924,7 +924,7 @@ class LivestockSector(Sector): # ===== PMC ===== if 'pmc' in [x.lower() for x in self.speciation_map.keys()]: pmc_name = 'PMC' - self.__logger.write_log('\t\t\tCalculating {0} emissions'.format(pmc_name), message_level=3) + self.logger.write_log('\t\t\tCalculating {0} emissions'.format(pmc_name), message_level=3) if all(x in [x.lower() for x in self.source_pollutants] for x in ['pm10', 'pm25']): if pmc_name not in self.output_pollutants: self.output_pollutants.append(pmc_name) @@ -937,9 +937,9 @@ class LivestockSector(Sector): not_pollutants = [poll for poll in self.source_pollutants if poll not in ['nh3', 'nox_no', 'nh3', 'nmvoc', 'pm10', 'pm25']] if len(not_pollutants) > 0: - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: warn('The pollutants {0} cannot be calculated on the Livestock sector'.format(not_pollutants)) - self.__logger.write_time_log('LivestockSector', 'calculate_day_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'calculate_day_emissions', timeit.default_timer() - spent_time) return out_df @@ -962,8 +962,8 @@ class LivestockSector(Sector): daily_emissions = {} for day in self.day_dict.keys(): daily_emissions[day] = self.calculate_day_emissions(animals_df, day) - self.__logger.write_time_log('LivestockSector', 'calculate_daily_emissions_dict', - timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'calculate_daily_emissions_dict', + timeit.default_timer() - spent_time) return daily_emissions @@ -988,7 +988,7 @@ class LivestockSector(Sector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.__logger.write_time_log('LivestockSector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -1020,8 +1020,8 @@ class LivestockSector(Sector): distribution['hour'] = distribution['date'].dt.hour for out_p in self.output_pollutants: - self.__logger.write_log('\t\t\tDistributing {0} emissions to hourly resolution'.format(out_p), - message_level=3) + self.logger.write_log('\t\t\tDistributing {0} emissions to hourly resolution'.format(out_p), + message_level=3) if out_p.lower() == 'pmc': in_p = 'pmc' else: @@ -1046,8 +1046,8 @@ class LivestockSector(Sector): distribution['date'] = distribution['date_utc'] distribution.drop(columns=['hour', 'date_utc'], axis=1, inplace=True) - self.__logger.write_time_log('LivestockSector', 'calculate_hourly_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('LivestockSector', 'calculate_hourly_distribution', + timeit.default_timer() - spent_time) return distribution @@ -1059,18 +1059,18 @@ class LivestockSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') - self.__logger.write_log('\t\tCalculating Daily emissions', message_level=2) + self.logger.write_log('\t\tCalculating Daily emissions', message_level=2) df_by_day = self.calculate_daily_emissions_dict(self.animals_df) - self.__logger.write_log('\t\tCalculating hourly emissions', message_level=2) + self.logger.write_log('\t\tCalculating hourly emissions', message_level=2) animals_df = self.calculate_hourly_distribution(df_by_day) animals_df.drop(columns=['geometry'], inplace=True) animals_df['layer'] = 0 animals_df = animals_df.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_log('\t\tLivestock emissions calculated', message_level=2) - self.__logger.write_time_log('LivestockSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tLivestock emissions calculated', message_level=2) + self.logger.write_time_log('LivestockSector', 'calculate_emissions', timeit.default_timer() - spent_time) return animals_df diff --git a/hermesv3_bu/sectors/point_source_sector.py b/hermesv3_bu/sectors/point_source_sector.py index 6ee451a..2b7d1b0 100755 --- a/hermesv3_bu/sectors/point_source_sector.py +++ b/hermesv3_bu/sectors/point_source_sector.py @@ -69,7 +69,7 @@ class PointSourceSector(Sector): self.measured_path = measured_emission_path self.plume_rise_pahts = plume_rise_pahts - self.__logger.write_time_log('PointSourceSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', '__init__', timeit.default_timer() - spent_time) def check_catalog(self): # Checking monthly profiles IDs @@ -116,7 +116,7 @@ class PointSourceSector(Sector): """ spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: if self.plume_rise: columns = {"Code": np.str, "Cons": np.bool, "SNAP": np.str, "Lon": np.float64, "Lat": np.float64, "Height": np.float64, "Diameter": np.float64, @@ -151,9 +151,9 @@ class PointSourceSector(Sector): else: catalog_df = None - self.__comm.Barrier() - catalog_df = IoShapefile(self.__comm).split_shapefile(catalog_df) - self.__logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) + self.comm.Barrier() + catalog_df = IoShapefile(self.comm).split_shapefile(catalog_df) + self.logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) return catalog_df def read_catalog_shapefile(self, catalog_path, sector_list): @@ -171,7 +171,7 @@ class PointSourceSector(Sector): """ spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: if self.plume_rise: columns = {"Code": np.str, "Cons": np.bool, "SNAP": np.str, "Height": np.float64, "Diameter": np.float64, "Speed": np.float64, "Temp": np.float64, "AF": np.float64, @@ -208,9 +208,9 @@ class PointSourceSector(Sector): else: catalog_df = None - self.__comm.Barrier() - catalog_df = IoShapefile(self.__comm).split_shapefile(catalog_df) - self.__logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) + self.comm.Barrier() + catalog_df = IoShapefile(self.comm).split_shapefile(catalog_df) + self.logger.write_time_log('PointSourceSector', 'read_catalog', timeit.default_timer() - spent_time) return catalog_df def read_catalog_for_measured_emissions_csv(self, catalog_path, sector_list): @@ -252,8 +252,8 @@ class PointSourceSector(Sector): catalog_df = catalog_df.loc[catalog_df['SNAP'].str[:2].isin(sector_list)] catalog_df.drop('SNAP', axis=1, inplace=True) - self.__logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', + timeit.default_timer() - spent_time) return catalog_df def read_catalog_for_measured_emissions(self, catalog_path, sector_list): @@ -302,8 +302,8 @@ class PointSourceSector(Sector): catalog_df = catalog_df.loc[catalog_df['SNAP'].str[:2].isin(sector_list)] catalog_df.drop('SNAP', axis=1, inplace=True) - self.__logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'read_catalog_for_measured_emissions', + timeit.default_timer() - spent_time) return catalog_df def to_geodataframe(self, catalog): @@ -323,7 +323,7 @@ class PointSourceSector(Sector): catalog.drop(['Lon', 'Lat'], axis=1, inplace=True) crs = {'init': 'epsg:4326'} catalog = gpd.GeoDataFrame(catalog, crs=crs, geometry=geometry) - self.__logger.write_time_log('PointSourceSector', 'to_geodataframe', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'to_geodataframe', timeit.default_timer() - spent_time) return catalog def get_yearly_emissions(self, catalog): @@ -338,13 +338,13 @@ class PointSourceSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating yearly emissions', message_level=2) + self.logger.write_log('\tCalculating yearly emissions', message_level=2) for pollutant in self.source_pollutants: catalog.rename(columns={u'EF_{0}'.format(pollutant): pollutant}, inplace=True) catalog[pollutant] = catalog[pollutant] * catalog['AF'] catalog.drop('AF', axis=1, inplace=True) - self.__logger.write_time_log('PointSourceSector', 'get_yearly_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'get_yearly_emissions', timeit.default_timer() - spent_time) return catalog def get_temporal_factors(self, catalog): @@ -359,7 +359,7 @@ class PointSourceSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating hourly emissions', message_level=2) + self.logger.write_log('\tCalculating hourly emissions', message_level=2) def get_mf(df): month_factor = self.monthly_profiles.loc[df.name[1], df.name[0]] @@ -396,7 +396,7 @@ class PointSourceSector(Sector): catalog.drop('temp_factor', axis=1, inplace=True) - self.__logger.write_time_log('PointSourceSector', 'get_temporal_factors', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'get_temporal_factors', timeit.default_timer() - spent_time) return catalog def calculate_hourly_emissions(self, catalog): @@ -415,8 +415,8 @@ class PointSourceSector(Sector): catalog = self.get_temporal_factors(catalog) catalog.set_index(['Code', 'tstep'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return catalog def get_meteo_xy(self, dataframe, netcdf_path): @@ -461,7 +461,7 @@ class PointSourceSector(Sector): dataframe['X'] = nc_dataframe.loc[dataframe['meteo_index'], 'X'].values dataframe['Y'] = nc_dataframe.loc[dataframe['meteo_index'], 'Y'].values - self.__logger.write_time_log('PointSourceSector', 'get_meteo_xy', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'get_meteo_xy', timeit.default_timer() - spent_time) return dataframe[['X', 'Y']] def get_plumerise_meteo(self, catalog): @@ -675,42 +675,42 @@ class PointSourceSector(Sector): # ===== 3D Meteo variables ===== # Adding stc_temp - self.__logger.write_log('\t\tGetting temperature from {0}'.format(self.plume_rise_pahts['temperature_sfc_dir']), - message_level=3) + self.logger.write_log('\t\tGetting temperature from {0}'.format(self.plume_rise_pahts['temperature_sfc_dir']), + message_level=3) catalog['temp_sfc'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['temperature_sfc_dir'], 't2')) - self.__logger.write_log('\t\tGetting friction velocity from {0}'.format( + self.logger.write_log('\t\tGetting friction velocity from {0}'.format( self.plume_rise_pahts['friction_velocity_dir']), message_level=3) catalog['friction_v'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['friction_velocity_dir'], 'ustar')) - self.__logger.write_log('\t\tGetting PBL height from {0}'.format( + self.logger.write_log('\t\tGetting PBL height from {0}'.format( self.plume_rise_pahts['pblh_dir']), message_level=3) catalog['pbl'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['pblh_dir'], 'mixed_layer_height')) - self.__logger.write_log('\t\tGetting obukhov length from {0}'.format( + self.logger.write_log('\t\tGetting obukhov length from {0}'.format( self.plume_rise_pahts['obukhov_length_dir']), message_level=3) catalog['obukhov_len'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_sfc_value(x, self.plume_rise_pahts['obukhov_length_dir'], 'rmol')) catalog['obukhov_len'] = 1. / catalog['obukhov_len'] - self.__logger.write_log('\t\tGetting layer thickness from {0}'.format( + self.logger.write_log('\t\tGetting layer thickness from {0}'.format( self.plume_rise_pahts['layer_thickness_dir']), message_level=3) catalog['layers'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_layers(x, self.plume_rise_pahts['layer_thickness_dir'], 'layer_thickness')) - self.__logger.write_log('\t\tGetting temperatue at the top from {0}'.format( + self.logger.write_log('\t\tGetting temperatue at the top from {0}'.format( self.plume_rise_pahts['temperature_4d_dir']), message_level=3) catalog['temp_top'] = catalog.groupby('date_utc')['X', 'Y', 'Height', 'layers', 'temp_sfc'].apply( lambda x: get_temp_top(x, self.plume_rise_pahts['temperature_4d_dir'], 't')) - self.__logger.write_log('\t\tGetting wind speed at 10 m', message_level=3) + self.logger.write_log('\t\tGetting wind speed at 10 m', message_level=3) catalog['wSpeed_10'] = catalog.groupby('date_utc')['X', 'Y'].apply( lambda x: get_wind_speed_10m(x, self.plume_rise_pahts['u10_wind_speed_dir'], self.plume_rise_pahts['v10_wind_speed_dir'], 'u10', 'v10')) - self.__logger.write_log('\t\tGetting wind speed at the top', message_level=3) + self.logger.write_log('\t\tGetting wind speed at the top', message_level=3) catalog['wSpeed_top'] = catalog.groupby('date_utc')['X', 'Y', 'Height', 'layers', 'wSpeed_10'].apply( lambda x: get_wind_speed_top(x, self.plume_rise_pahts['u_wind_speed_4d_dir'], self.plume_rise_pahts['v_wind_speed_4d_dir'], 'u', 'v')) catalog.drop(columns=['wSpeed_10', 'layers', 'X', 'Y'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'get_plumerise_meteo', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'get_plumerise_meteo', timeit.default_timer() - spent_time) return catalog def get_plume_rise_top_bot(self, catalog): @@ -755,7 +755,7 @@ class PointSourceSector(Sector): catalog.drop(columns=['Height', 'Diameter', 'Speed', 'Temp', 'date_utc', 'temp_sfc', 'friction_v', 'pbl', 'obukhov_len', 'temp_top', 'wSpeed_top', 'Fb', 'S', 'Ah'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'get_plume_rise_top_bot', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'get_plume_rise_top_bot', timeit.default_timer() - spent_time) return catalog def set_layer(self, catalog): @@ -809,7 +809,7 @@ class PointSourceSector(Sector): new_catalog = catalog_by_layer[~catalog_by_layer.index.duplicated(keep='first')] new_catalog[self.source_pollutants] = catalog_by_layer.groupby(['Code', 'tstep', 'layer'])[ self.source_pollutants].sum() - self.__logger.write_time_log('PointSourceSector', 'set_layer', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'set_layer', timeit.default_timer() - spent_time) return new_catalog def calculate_vertical_distribution(self, catalog): @@ -838,8 +838,8 @@ class PointSourceSector(Sector): catalog.reset_index(inplace=True) catalog.set_index(['Code', 'tstep', 'layer'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'calculate_vertical_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'calculate_vertical_distribution', + timeit.default_timer() - spent_time) return catalog def add_measured_emissions(self, catalog): @@ -871,7 +871,7 @@ class PointSourceSector(Sector): for pollutant in self.source_pollutants: catalog[pollutant] = catalog.groupby('Code')['date'].apply(lambda x: func(x, pollutant)) - self.__logger.write_time_log('PointSourceSector', 'add_measured_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'add_measured_emissions', timeit.default_timer() - spent_time) return catalog def calculate_measured_emissions(self, catalog): @@ -885,8 +885,8 @@ class PointSourceSector(Sector): catalog = self.add_measured_emissions(catalog) catalog.set_index(['Code', 'tstep'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'calculate_measured_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'calculate_measured_emissions', + timeit.default_timer() - spent_time) return catalog def merge_catalogs(self, catalog_list): @@ -894,19 +894,19 @@ class PointSourceSector(Sector): catalog = pd.concat(catalog_list).reset_index() catalog.set_index(['Code', 'tstep'], inplace=True) - self.__logger.write_time_log('PointSourceSector', 'merge_catalogs', timeit.default_timer() - spent_time) + self.logger.write_time_log('PointSourceSector', 'merge_catalogs', timeit.default_timer() - spent_time) return catalog def speciate(self, dataframe, code='default'): spent_time = timeit.default_timer() - self.__logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) + self.logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) new_dataframe = gpd.GeoDataFrame(index=dataframe.index, data=None, crs=dataframe.crs, geometry=dataframe.geometry) for out_pollutant in self.output_pollutants: input_pollutant = self.speciation_map[out_pollutant] if input_pollutant == 'nmvoc' and input_pollutant in dataframe.columns.values: - self.__logger.write_log("\t\t\t{0} = {4}*({1}/{2})*{3}".format( + self.logger.write_log("\t\t\t{0} = {4}*({1}/{2})*{3}".format( out_pollutant, input_pollutant, self.molecular_weights[input_pollutant], self.speciation_profile.loc[code, out_pollutant], self.speciation_profile.loc[code, 'VOCtoTOG']), message_level=3) @@ -916,7 +916,7 @@ class PointSourceSector(Sector): self.molecular_weights[input_pollutant]) * self.speciation_profile.loc[code, out_pollutant] else: if out_pollutant != 'PMC': - self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, input_pollutant, self.molecular_weights[input_pollutant], self.speciation_profile.loc[code, out_pollutant]), message_level=3) @@ -925,7 +925,7 @@ class PointSourceSector(Sector): self.molecular_weights[input_pollutant]) * \ self.speciation_profile.loc[code, out_pollutant] else: - self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc[code, out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) @@ -934,7 +934,7 @@ class PointSourceSector(Sector): ((dataframe['pm10'] / self.molecular_weights['pm10']) - (dataframe['pm25'] / self.molecular_weights['pm25'])) * \ self.speciation_profile.loc[code, out_pollutant] - self.__logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def point_source_to_fid(self, catalog): @@ -957,12 +957,12 @@ class PointSourceSector(Sector): def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') emissions = self.add_dates(self.catalog, drop_utc=False) emissions = self.calculate_hourly_emissions(emissions) - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: emissions_measured = self.calculate_measured_emissions(self.catalog_measured) else: emissions_measured = None @@ -980,7 +980,7 @@ class PointSourceSector(Sector): # From kmol/h or kg/h to mol/h or g/h emissions = emissions.mul(1000.0) - self.__logger.write_log('\t\tPoint sources emissions calculated', message_level=2) - self.__logger.write_time_log('PointSourceSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tPoint sources emissions calculated', message_level=2) + self.logger.write_time_log('PointSourceSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/recreational_boats_sector.py b/hermesv3_bu/sectors/recreational_boats_sector.py index 205cd11..4b7c5c7 100755 --- a/hermesv3_bu/sectors/recreational_boats_sector.py +++ b/hermesv3_bu/sectors/recreational_boats_sector.py @@ -35,14 +35,14 @@ class RecreationalBoatsSector(Sector): self.boats_data_path = boats_data_path self.ef_file_path = ef_file_path - self.__logger.write_time_log('RecreationalBoatsSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', '__init__', timeit.default_timer() - spent_time) def create_density_map(self, density_map_path): spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: density_map_auxpath = os.path.join(self.auxiliary_dir, 'recreational_boats', 'density_map.shp') if not os.path.exists(density_map_auxpath): - src_density_map = IoRaster(self.__comm).to_shapefile_serie(density_map_path, nodata=0) + src_density_map = IoRaster(self.comm).to_shapefile_serie(density_map_path, nodata=0) src_density_map = src_density_map.loc[src_density_map['data'] > 0] src_density_map['data'] = src_density_map['data'] / src_density_map['data'].sum() src_density_map.to_crs(self.grid.shapefile.crs, inplace=True) @@ -59,15 +59,15 @@ class RecreationalBoatsSector(Sector): geometry=self.grid.shapefile.loc[src_density_map.index, 'geometry']) src_density_map.reset_index(inplace=True) - IoShapefile(self.__comm).write_shapefile_serial(src_density_map, density_map_auxpath) + IoShapefile(self.comm).write_shapefile_serial(src_density_map, density_map_auxpath) else: - src_density_map = IoShapefile(self.__comm).read_shapefile_serial(density_map_auxpath) + src_density_map = IoShapefile(self.comm).read_shapefile_serial(density_map_auxpath) else: src_density_map = None - src_density_map = IoShapefile(self.__comm).split_shapefile(src_density_map) + src_density_map = IoShapefile(self.comm).split_shapefile(src_density_map) src_density_map.set_index('FID', inplace=True) - self.__logger.write_time_log('RecreationalBoatsSector', 'create_density_map', timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'create_density_map', timeit.default_timer() - spent_time) return src_density_map def speciate_dict(self, annual_emissions_dict): @@ -76,7 +76,7 @@ class RecreationalBoatsSector(Sector): speciated_emissions = {} for out_pollutant in self.output_pollutants: if out_pollutant != 'PMC': - self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, self.speciation_map[out_pollutant], self.molecular_weights[self.speciation_map[out_pollutant]], self.speciation_profile.loc['default', out_pollutant]), message_level=3) @@ -85,14 +85,14 @@ class RecreationalBoatsSector(Sector): self.molecular_weights[self.speciation_map[out_pollutant]] ) * self.speciation_profile.loc['default', out_pollutant] else: - self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc['default', out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) speciated_emissions[out_pollutant] = ((annual_emissions_dict['pm10'] / self.molecular_weights['pm10']) - (annual_emissions_dict['pm25'] / self.molecular_weights['pm25']) ) * self.speciation_profile.loc['default', out_pollutant] - self.__logger.write_time_log('RecreationalBoatsSector', 'speciate_dict', timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'speciate_dict', timeit.default_timer() - spent_time) return speciated_emissions def get_annual_emissions(self): @@ -108,8 +108,8 @@ class RecreationalBoatsSector(Sector): for in_p in self.source_pollutants: emissions_dict[in_p] = dataframe['AF'].multiply(dataframe['EF_{0}'.format(in_p)]).sum() - self.__logger.write_time_log('RecreationalBoatsSector', 'get_annual_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'get_annual_emissions', + timeit.default_timer() - spent_time) return emissions_dict def calculate_yearly_emissions(self, annual_emissions): @@ -121,8 +121,8 @@ class RecreationalBoatsSector(Sector): for pollutant, annual_value in annual_emissions.items(): new_dataframe[pollutant] = self.density_map['data'] * annual_value - self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_yearly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'calculate_yearly_emissions', + timeit.default_timer() - spent_time) return new_dataframe def dates_to_month_weekday_hour(self, dataframe): @@ -131,8 +131,8 @@ class RecreationalBoatsSector(Sector): dataframe['weekday'] = dataframe['date'].dt.weekday dataframe['hour'] = dataframe['date'].dt.hour - self.__logger.write_time_log('RecreationalBoatsSector', 'dates_to_month_weekday_hour', - timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'dates_to_month_weekday_hour', + timeit.default_timer() - spent_time) return dataframe def calculate_hourly_emissions(self, annual_distribution): @@ -174,13 +174,13 @@ class RecreationalBoatsSector(Sector): dataframe[self.output_pollutants] = dataframe[self.output_pollutants].mul(dataframe['HF'], axis=0) dataframe.drop(columns=['hour', 'HF'], inplace=True) - self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_hourly_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('RecreationalBoatsSector', 'calculate_hourly_emissions', + timeit.default_timer() - spent_time) return dataframe def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') annual_emissions = self.get_annual_emissions() annual_emissions = self.speciate_dict(annual_emissions) @@ -190,7 +190,7 @@ class RecreationalBoatsSector(Sector): distribution.drop(columns=['geometry'], inplace=True) distribution['layer'] = 0 distribution.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.__logger.write_log('\t\tRecreational boats emissions calculated', message_level=2) - self.__logger.write_time_log('RecreationalBoatsSector', 'calculate_emissions', - timeit.default_timer() - spent_time) + self.logger.write_log('\t\tRecreational boats emissions calculated', message_level=2) + self.logger.write_time_log('RecreationalBoatsSector', 'calculate_emissions', + timeit.default_timer() - spent_time) return distribution diff --git a/hermesv3_bu/sectors/residential_sector.py b/hermesv3_bu/sectors/residential_sector.py index ce8c0c1..4f34981 100755 --- a/hermesv3_bu/sectors/residential_sector.py +++ b/hermesv3_bu/sectors/residential_sector.py @@ -55,7 +55,7 @@ class ResidentialSector(Sector): prov_shapefile, ccaa_shapefile, population_density_map, population_type_map, create_pop_csv=False) self.heating_degree_day_path = heating_degree_day_path self.temperature_path = temperature_path - self.__logger.write_time_log('ResidentialSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', '__init__', timeit.default_timer() - spent_time) def read_ef_file(self, path): """ @@ -73,7 +73,7 @@ class ResidentialSector(Sector): df_ef = pd.read_csv(path) df_ef = df_ef.loc[df_ef['fuel_type'].isin(self.fuel_list), ['fuel_type'] + self.source_pollutants] - self.__logger.write_time_log('ResidentialSector', 'read_ef_file', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'read_ef_file', timeit.default_timer() - spent_time) return df_ef def calculate_num_days(self): @@ -86,7 +86,7 @@ class ResidentialSector(Sector): for key, value in zip(days, num_days): day_dict[key] = value - self.__logger.write_time_log('ResidentialSector', 'calculate_num_days', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_num_days', timeit.default_timer() - spent_time) return day_dict def read_residential_spatial_proxies(self, path): @@ -95,8 +95,8 @@ class ResidentialSector(Sector): spatial_proxies = pd.read_csv(path) spatial_proxies = spatial_proxies.loc[spatial_proxies['fuel_type'].isin(self.fuel_list), :] - self.__logger.write_time_log('ResidentialSector', 'read_residential_spatial_proxies', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'read_residential_spatial_proxies', + timeit.default_timer() - spent_time) return spatial_proxies def get_spatial_proxy(self, fuel_type): @@ -119,7 +119,7 @@ class ResidentialSector(Sector): else: proxy_type = proxy[1] - self.__logger.write_time_log('ResidentialSector', 'get_spatial_proxy', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'get_spatial_proxy', timeit.default_timer() - spent_time) return {'nut_level': nut_level, 'proxy_type': proxy_type} def to_dst_resolution(self, src_distribution): @@ -144,29 +144,29 @@ class ResidentialSector(Sector): geometry=self.grid.shapefile.loc[src_distribution.index, 'geometry']) src_distribution.reset_index(inplace=True) - self.__logger.write_time_log('ResidentialSector', 'to_dst_resolution', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'to_dst_resolution', timeit.default_timer() - spent_time) return src_distribution def get_fuel_distribution(self, prov_shapefile, ccaa_shapefile, population_density_map, population_type_map, create_pop_csv=False): spent_time = timeit.default_timer() - self.__logger.write_log('Calculating fuel distribution', message_level=2) + self.logger.write_log('Calculating fuel distribution', message_level=2) fuel_distribution_path = os.path.join(self.auxiliary_dir, 'residential', 'fuel_distribution.shp') if not os.path.exists(fuel_distribution_path): population_density = os.path.join(self.auxiliary_dir, 'residential', 'population_density.tif') - if self.__comm.Get_rank() == 0: - population_density = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + if self.comm.Get_rank() == 0: + population_density = IoRaster(self.comm).clip_raster_with_shapefile_poly( population_density_map, self.clip.shapefile, population_density) - population_density = IoRaster(self.__comm).to_shapefile_parallel(population_density) + population_density = IoRaster(self.comm).to_shapefile_parallel(population_density) population_density.rename(columns={'data': 'pop'}, inplace=True) population_type = os.path.join(self.auxiliary_dir, 'residential', 'population_type.tif') - if self.__comm.Get_rank() == 0: - population_type = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + if self.comm.Get_rank() == 0: + population_type = IoRaster(self.comm).clip_raster_with_shapefile_poly( population_type_map, self.clip.shapefile, population_type) - population_type = IoRaster(self.__comm).to_shapefile_parallel(population_type) + population_type = IoRaster(self.comm).to_shapefile_parallel(population_type) population_type.rename(columns={'data': 'type'}, inplace=True) population_type['type'] = population_type['type'].astype(np.int16) @@ -180,7 +180,7 @@ class ResidentialSector(Sector): population_density = self.add_nut_code(population_density, ccaa_shapefile, nut_value='nuts2_id') population_density.rename(columns={'nut_code': 'ccaa'}, inplace=True) population_density = population_density.loc[population_density['ccaa'] != -999, :] - population_density = IoShapefile(self.__comm).balance(population_density) + population_density = IoShapefile(self.comm).balance(population_density) if create_pop_csv: population_density.loc[:, ['prov', 'pop', 'type']].groupby(['prov', 'type']).sum().reset_index().to_csv( @@ -263,18 +263,18 @@ class ResidentialSector(Sector): fuel] = population_density['pop'].multiply( energy_consumption / total_pop) fuel_distribution = self.to_dst_resolution(fuel_distribution) - fuel_distribution = IoShapefile(self.__comm).gather_shapefile(fuel_distribution, rank=0) - if self.__comm.Get_rank() == 0: + fuel_distribution = IoShapefile(self.comm).gather_shapefile(fuel_distribution, rank=0) + if self.comm.Get_rank() == 0: fuel_distribution.groupby('FID').sum() - IoShapefile(self.__comm).write_shapefile_serial(fuel_distribution, fuel_distribution_path) + IoShapefile(self.comm).write_shapefile_serial(fuel_distribution, fuel_distribution_path) else: fuel_distribution = None - fuel_distribution = IoShapefile(self.__comm).split_shapefile(fuel_distribution) + fuel_distribution = IoShapefile(self.comm).split_shapefile(fuel_distribution) else: - fuel_distribution = IoShapefile(self.__comm).read_shapefile_parallel(fuel_distribution_path) + fuel_distribution = IoShapefile(self.comm).read_shapefile_parallel(fuel_distribution_path) fuel_distribution.set_index('FID', inplace=True) - self.__logger.write_time_log('ResidentialSector', 'get_fuel_distribution', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'get_fuel_distribution', timeit.default_timer() - spent_time) return fuel_distribution def calculate_daily_distribution(self, day): @@ -292,7 +292,7 @@ class ResidentialSector(Sector): geometry_shp['centroid'] = geometry_shp.centroid geometry_shp.drop(columns='geometry', inplace=True) - meteo = IoNetcdf(self.__comm).get_data_from_netcdf( + meteo = IoNetcdf(self.comm).get_data_from_netcdf( os.path.join(self.temperature_path, 'tas_{0}{1}.nc'.format(day.year, str(day.month).zfill(2))), 'tas', 'daily', day, geometry_shp) # From K to Celsius degrees @@ -302,7 +302,7 @@ class ResidentialSector(Sector): meteo['hdd'] = np.maximum(self.hdd_base_temperature - meteo['tas'], 1) meteo.drop('tas', axis=1, inplace=True) - meteo['hdd_mean'] = IoNetcdf(self.__comm).get_data_from_netcdf(self.heating_degree_day_path.replace( + meteo['hdd_mean'] = IoNetcdf(self.comm).get_data_from_netcdf(self.heating_degree_day_path.replace( '', str(day.year)), 'HDD', 'yearly', day, geometry_shp).loc[:, 'HDD'] daily_distribution = self.fuel_distribution.copy() @@ -332,8 +332,8 @@ class ResidentialSector(Sector): daily_distribution.drop(['hdd', 'hdd_mean'], axis=1, inplace=True) - self.__logger.write_time_log('ResidentialSector', 'calculate_daily_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_daily_distribution', + timeit.default_timer() - spent_time) return daily_distribution def get_fuel_distribution_by_day(self): @@ -343,8 +343,8 @@ class ResidentialSector(Sector): for day in self.day_dict.keys(): daily_distribution[day] = self.calculate_daily_distribution(day) - self.__logger.write_time_log('ResidentialSector', 'get_fuel_distribution_by_day', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'get_fuel_distribution_by_day', + timeit.default_timer() - spent_time) return daily_distribution def calculate_hourly_distribution(self, fuel_distribution): @@ -362,8 +362,8 @@ class ResidentialSector(Sector): ) fuel_distribution.drop('hour', axis=1, inplace=True) - self.__logger.write_time_log('ResidentialSector', 'calculate_hourly_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_hourly_distribution', + timeit.default_timer() - spent_time) return fuel_distribution def add_dates(self, df_by_day): @@ -380,7 +380,7 @@ class ResidentialSector(Sector): dataframe_by_day = pd.concat(df_list, ignore_index=True) dataframe_by_day = self.to_timezone(dataframe_by_day) - self.__logger.write_time_log('ResidentialSector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe_by_day @@ -395,8 +395,8 @@ class ResidentialSector(Sector): fuel_distribution = self.calculate_hourly_distribution(fuel_distribution_by_day) - self.__logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', + timeit.default_timer() - spent_time) return fuel_distribution def calculate_emissions_from_fuel_distribution(self, fuel_distribution): @@ -407,8 +407,8 @@ class ResidentialSector(Sector): emissions[in_p] = 0 for i, fuel_type_ef in self.ef_profiles.iterrows(): emissions[in_p] += fuel_distribution.loc[:, fuel_type_ef['fuel_type']].multiply(fuel_type_ef[in_p]) - self.__logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_fuel_distribution_by_hour', + timeit.default_timer() - spent_time) return emissions @@ -467,13 +467,13 @@ class ResidentialSector(Sector): fuel_type_ef[in_p] * speciation_factor) emissions[out_p] = in_df.divide(self.molecular_weights[in_p]) - self.__logger.write_time_log('ResidentialSector', 'calculate_output_emissions_from_fuel_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ResidentialSector', 'calculate_output_emissions_from_fuel_distribution', + timeit.default_timer() - spent_time) return emissions def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') fuel_distribution_by_hour = self.calculate_fuel_distribution_by_hour() emissions = self.calculate_output_emissions_from_fuel_distribution(fuel_distribution_by_hour) @@ -481,6 +481,6 @@ class ResidentialSector(Sector): emissions['layer'] = 0 emissions.set_index(['FID', 'layer', 'tstep'], inplace=True) - self.__logger.write_log('\t\tResidential emissions calculated', message_level=2) - self.__logger.write_time_log('ResidentialSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tResidential emissions calculated', message_level=2) + self.logger.write_time_log('ResidentialSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/sector.py b/hermesv3_bu/sectors/sector.py index 46bc5c3..bac090a 100755 --- a/hermesv3_bu/sectors/sector.py +++ b/hermesv3_bu/sectors/sector.py @@ -76,8 +76,8 @@ class Sector(object): """ spent_time = timeit.default_timer() - self.__comm = comm - self.__logger = logger + self.comm = comm + self.logger = logger self.auxiliary_dir = auxiliary_dir self.grid = grid self.clip = clip @@ -98,7 +98,7 @@ class Sector(object): self.output_pollutants = list(self.speciation_map.keys()) - self.__logger.write_time_log('Sector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', '__init__', timeit.default_timer() - spent_time) def read_speciation_profiles(self, path): """ @@ -124,7 +124,7 @@ class Sector(object): dataframe = pd.read_csv(path) dataframe.set_index('ID', inplace=True) - self.__logger.write_time_log('Sector', 'read_speciation_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_speciation_profiles', timeit.default_timer() - spent_time) return dataframe def read_speciation_map(self, path): @@ -161,7 +161,7 @@ class Sector(object): dataframe = dataframe.loc[dataframe['src'].isin(self.source_pollutants), :] dataframe = dict(zip(dataframe['dst'], dataframe['src'])) - self.__logger.write_time_log('Sector', 'read_speciation_map', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_speciation_map', timeit.default_timer() - spent_time) return dataframe @@ -189,7 +189,7 @@ class Sector(object): # dataframe = dataframe.loc[dataframe['Specie'].isin(self.source_pollutants)] mol_wei = dict(zip(dataframe['Specie'], dataframe['MW'])) - self.__logger.write_time_log('Sector', 'read_molecular_weights', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_molecular_weights', timeit.default_timer() - spent_time) return mol_wei @@ -208,7 +208,7 @@ class Sector(object): """ spent_time = timeit.default_timer() dataframe = pd.read_csv(path, sep=sep) - self.__logger.write_time_log('Sector', 'read_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_profiles', timeit.default_timer() - spent_time) return dataframe @@ -234,7 +234,7 @@ class Sector(object): inplace=True) profiles.set_index('P_month', inplace=True) - self.__logger.write_time_log('Sector', 'read_monthly_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_monthly_profiles', timeit.default_timer() - spent_time) return profiles def read_weekly_profiles(self, path): @@ -258,7 +258,7 @@ class Sector(object): columns={'Monday': 0, 'Tuesday': 1, 'Wednesday': 2, 'Thursday': 3, 'Friday': 4, 'Saturday': 5, 'Sunday': 6, }, inplace=True) profiles.set_index('P_week', inplace=True) - self.__logger.write_time_log('Sector', 'read_weekly_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_weekly_profiles', timeit.default_timer() - spent_time) return profiles def read_hourly_profiles(self, path): @@ -283,7 +283,7 @@ class Sector(object): profiles.columns = profiles.columns.astype(int) profiles.rename(columns={-1: 'P_hour'}, inplace=True) profiles.set_index('P_hour', inplace=True) - self.__logger.write_time_log('Sector', 'read_hourly_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'read_hourly_profiles', timeit.default_timer() - spent_time) return profiles def calculate_rebalanced_weekly_profile(self, profile, date): @@ -306,7 +306,7 @@ class Sector(object): weekdays = self.calculate_weekdays(date) rebalanced_profile = self.calculate_weekday_factor_full_month(profile, weekdays) - self.__logger.write_time_log('Sector', 'calculate_rebalanced_weekly_profile', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'calculate_rebalanced_weekly_profile', timeit.default_timer() - spent_time) return rebalanced_profile @@ -332,7 +332,7 @@ class Sector(object): increment = float(num_days - weekdays_factors) / num_days for day in range(7): profile[day] = (increment + profile[day]) / num_days - self.__logger.write_time_log('Sector', 'calculate_weekday_factor_full_month', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'calculate_weekday_factor_full_month', timeit.default_timer() - spent_time) return profile @@ -354,7 +354,7 @@ class Sector(object): weekdays_dict = {} for i, day in enumerate(weekdays): weekdays_dict[i] = days.count(day) - self.__logger.write_time_log('Sector', 'calculate_weekdays', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'calculate_weekdays', timeit.default_timer() - spent_time) return weekdays_dict def add_dates(self, dataframe, drop_utc=True): @@ -385,7 +385,7 @@ class Sector(object): dataframe = self.to_timezone(dataframe) if drop_utc: dataframe.drop('date_utc', axis=1, inplace=True) - self.__logger.write_time_log('Sector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'add_dates', timeit.default_timer() - spent_time) return dataframe @@ -404,7 +404,7 @@ class Sector(object): tzfinder = TimezoneFinder() dataframe['timezone'] = dataframe.centroid.apply(lambda x: tzfinder.timezone_at(lng=x.x, lat=x.y)) dataframe.reset_index(inplace=True) - self.__logger.write_time_log('Sector', 'add_timezone', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'add_timezone', timeit.default_timer() - spent_time) return dataframe def to_timezone(self, dataframe): @@ -422,7 +422,7 @@ class Sector(object): lambda x: x.dt.tz_convert(x.name).dt.tz_localize(None)) dataframe.drop('timezone', axis=1, inplace=True) - self.__logger.write_time_log('Sector', 'to_timezone', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'to_timezone', timeit.default_timer() - spent_time) return dataframe @@ -456,7 +456,7 @@ class Sector(object): shapefile.rename(columns={nut_value: 'nut_code'}, inplace=True) shapefile.loc[shapefile['nut_code'].isna(), 'nut_code'] = -999 shapefile['nut_code'] = shapefile['nut_code'].astype(np.int64) - self.__logger.write_time_log('Sector', 'add_nut_code', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'add_nut_code', timeit.default_timer() - spent_time) return shapefile @@ -516,7 +516,7 @@ class Sector(object): df1 = df1.loc[~df1.geometry.is_empty].copy() df1.drop(['bbox', 'histreg', 'new_g'], axis=1, inplace=True) return_value = df1 - self.__logger.write_time_log('Sector', 'spatial_overlays', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'spatial_overlays', timeit.default_timer() - spent_time) return return_value @@ -530,18 +530,18 @@ class Sector(object): nearest = df2[geom2_col] == nearest_points(row[geom1_col], geom_union)[1] # Get the corresponding value from df2 (matching is based on the geometry) value = df2[nearest][src_column].get_values()[0] - self.__logger.write_time_log('Sector', 'nearest', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'nearest', timeit.default_timer() - spent_time) return value def speciate(self, dataframe, code='default'): spent_time = timeit.default_timer() - self.__logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) + self.logger.write_log('\t\tSpeciating {0} emissions'.format(code), message_level=2) new_dataframe = pd.DataFrame(index=dataframe.index, data=None) for out_pollutant in self.output_pollutants: if out_pollutant != 'PMC': - self.__logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2})*{3}".format( out_pollutant, self.speciation_map[out_pollutant], self.molecular_weights[self.speciation_map[out_pollutant]], self.speciation_profile.loc[code, out_pollutant]), message_level=3) @@ -550,7 +550,7 @@ class Sector(object): self.molecular_weights[self.speciation_map[out_pollutant]]) * \ self.speciation_profile.loc[code, out_pollutant] else: - self.__logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( + self.logger.write_log("\t\t\t{0} = ({1}/{2} - {4}/{5})*{3}".format( out_pollutant, 'pm10', self.molecular_weights['pm10'], self.speciation_profile.loc[code, out_pollutant], 'pm25', self.molecular_weights['pm25']), message_level=3) @@ -559,33 +559,33 @@ class Sector(object): ((dataframe['pm10'] / self.molecular_weights['pm10']) - (dataframe['pm25'] / self.molecular_weights['pm25'])) * \ self.speciation_profile.loc[code, out_pollutant] - self.__logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def get_output_pollutants(self, input_pollutant): spent_time = timeit.default_timer() return_value = [outs for outs, ints in self.speciation_map.items() if ints == input_pollutant] - self.__logger.write_time_log('Sector', 'get_output_pollutants', timeit.default_timer() - spent_time) + self.logger.write_time_log('Sector', 'get_output_pollutants', timeit.default_timer() - spent_time) return return_value def calculate_land_use_by_nut(self, land_use_raster_path, nut_shapefile_path, out_land_use_by_nut_path): # 1st Clip the raster lu_raster_path = os.path.join(self.auxiliary_dir, 'clipped_land_use.tif') - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: if not os.path.exists(lu_raster_path): - lu_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + lu_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( land_use_raster_path, self.clip.shapefile, lu_raster_path) # 2nd Raster to shapefile - land_use_shp = IoRaster(self.__comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) + land_use_shp = IoRaster(self.comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) # 3rd Add NUT code land_use_shp.drop(columns='CELL_ID', inplace=True) land_use_shp.rename(columns={'data': 'land_use'}, inplace=True) land_use_shp = self.add_nut_code(land_use_shp, nut_shapefile_path, nut_value='nuts2_id') land_use_shp = land_use_shp[land_use_shp['nut_code'] != -999] - land_use_shp = IoShapefile(self.__comm).balance(land_use_shp) + land_use_shp = IoShapefile(self.comm).balance(land_use_shp) # 4th Calculate land_use percent land_use_shp['area'] = land_use_shp.geometry.area @@ -599,31 +599,31 @@ class Sector(object): (land_use_shp['land_use'] == land_use) & (land_use_shp['nut_code'] == nut_code), 'area'].sum() land_use_by_nut.reset_index(inplace=True) - land_use_by_nut = IoShapefile(self.__comm).gather_shapefile(land_use_by_nut, rank=0) + land_use_by_nut = IoShapefile(self.comm).gather_shapefile(land_use_by_nut, rank=0) - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: land_use_by_nut = land_use_by_nut.groupby(['nuts2_id', 'land_use']).sum() land_use_by_nut.to_csv(out_land_use_by_nut_path) print('DONE -> {0}'.format(out_land_use_by_nut_path)) - self.__comm.Barrier() + self.comm.Barrier() def create_population_by_nut(self, population_raster_path, nut_shapefile_path, output_path, nut_column='nuts3_id'): # 1st Clip the raster - self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.__comm.Get_rank() == 0: - clipped_population_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + self.logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.comm.Get_rank() == 0: + clipped_population_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( population_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) else: clipped_population_path = None # 2nd Raster to shapefile - self.__logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.__comm).to_shapefile_parallel( + self.logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.comm).to_shapefile_parallel( clipped_population_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) @@ -631,10 +631,10 @@ class Sector(object): pop_shp = pop_shp[pop_shp['nut_code'] != -999] pop_shp.rename(columns={'nut_code': nut_column}, inplace=True) - pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) - if self.__comm.Get_rank() == 0: + pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) + if self.comm.Get_rank() == 0: popu_dist = pop_shp.groupby(nut_column).sum() popu_dist.to_csv(output_path) - self.__comm.Barrier() + self.comm.Barrier() return True diff --git a/hermesv3_bu/sectors/shipping_port_sector.py b/hermesv3_bu/sectors/shipping_port_sector.py index dc5cd28..88759a2 100755 --- a/hermesv3_bu/sectors/shipping_port_sector.py +++ b/hermesv3_bu/sectors/shipping_port_sector.py @@ -108,24 +108,24 @@ class ShippingPortSector(Sector): self.tonnage.set_index('code', inplace=True) self.load_factor = self.read_profiles(load_factor_path) self.power_values = self.read_profiles(power_path) - self.__logger.write_time_log('ShippingPortSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', '__init__', timeit.default_timer() - spent_time) def get_port_list(self): - if self.__comm.Get_rank() == 0: - port_shp = IoShapefile(self.__comm).read_shapefile_serial(self.maneuvering_shapefile_path) + if self.comm.Get_rank() == 0: + port_shp = IoShapefile(self.comm).read_shapefile_serial(self.maneuvering_shapefile_path) port_shp.drop(columns=['Name', 'Weight'], inplace=True) port_shp = gpd.sjoin(port_shp, self.clip.shapefile.to_crs(port_shp.crs), how='inner', op='intersects') port_list = np.unique(port_shp['code'].values) - if len(port_list) < self.__comm.Get_size(): + if len(port_list) < self.comm.Get_size(): error_exit("The chosen number of processors {0} exceeds the number of involved ports {1}.".format( - self.__comm.Get_size(), len(port_list)) + " Set {0} at shipping_port_processors value.".format( + self.comm.Get_size(), len(port_list)) + " Set {0} at shipping_port_processors value.".format( len(port_list))) - port_list = np.array_split(port_list, self.__comm.Get_size()) + port_list = np.array_split(port_list, self.comm.Get_size()) else: port_list = None - port_list = self.__comm.scatter(port_list, root=0) + port_list = self.comm.scatter(port_list, root=0) return list(port_list) @@ -151,7 +151,7 @@ class ShippingPortSector(Sector): columns={'January': 1, 'February': 2, 'March': 3, 'April': 4, 'May': 5, 'June': 6, 'July': 7, 'August': 8, 'September': 9, 'October': 10, 'November': 11, 'December': 12}, inplace=True) - self.__logger.write_time_log('ShippingPortSector', 'read_monthly_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'read_monthly_profiles', timeit.default_timer() - spent_time) return profiles @@ -182,7 +182,7 @@ class ShippingPortSector(Sector): dataframe.reset_index(inplace=True) dataframe = pd.merge(dataframe, shapefile.loc[:, ['code', 'timezone']], on='code') dataframe.set_index(['code', 'vessel'], inplace=True) - self.__logger.write_time_log('ShippingPortSector', 'add_timezone', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'add_timezone', timeit.default_timer() - spent_time) return dataframe @@ -218,7 +218,7 @@ class ShippingPortSector(Sector): dataframe.drop('date_utc', axis=1, inplace=True) dataframe.set_index(['code', 'vessel', 'tstep'], inplace=True) # del dataframe['date_utc'] - self.__logger.write_time_log('ShippingPortSector', 'add_dates', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'add_dates', timeit.default_timer() - spent_time) return dataframe @@ -379,7 +379,7 @@ class ShippingPortSector(Sector): lambda x: get_ef(x, 'main', pollutant)) dataframe['EF_a_{0}'.format(pollutant)] = dataframe.groupby('vessel').apply( lambda x: get_ef(x, 'aux', pollutant)) - self.__logger.write_time_log('ShippingPortSector', 'get_constants', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'get_constants', timeit.default_timer() - spent_time) return dataframe @@ -407,8 +407,8 @@ class ShippingPortSector(Sector): hoteling['{0}'.format(pollutant)] += \ constants['P'] * constants['Rae'] * constants['N'] * constants['LF_ha'] * constants['T_h'] * \ constants['EF_a_{0}'.format(pollutant)] - self.__logger.write_time_log('ShippingPortSector', 'calculate_yearly_emissions_by_port_vessel', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'calculate_yearly_emissions_by_port_vessel', + timeit.default_timer() - spent_time) return manoeuvring, hoteling @@ -426,8 +426,8 @@ class ShippingPortSector(Sector): dataframe['month'] = dataframe['date'].dt.month dataframe['weekday'] = dataframe['date'].dt.weekday dataframe['hour'] = dataframe['date'].dt.hour - self.__logger.write_time_log('ShippingPortSector', 'dates_to_month_weekday_hour', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'dates_to_month_weekday_hour', + timeit.default_timer() - spent_time) return dataframe @@ -472,8 +472,8 @@ class ShippingPortSector(Sector): operations['hour'] = 'max' operations['date'] = 'max' dataframe = dataframe.groupby(level=['code', 'tstep']).agg(operations) - self.__logger.write_time_log('ShippingPortSector', 'calculate_monthly_emissions_by_port', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'calculate_monthly_emissions_by_port', + timeit.default_timer() - spent_time) return dataframe @@ -529,8 +529,8 @@ class ShippingPortSector(Sector): dataframe['HF'] = dataframe.groupby('hour').apply(get_hf) dataframe[self.source_pollutants] = dataframe[self.source_pollutants].multiply(dataframe['HF'], axis=0) dataframe.drop(columns=['hour', 'HF'], inplace=True) - self.__logger.write_time_log('ShippingPortSector', 'calculate_hourly_emissions_by_port', - timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'calculate_hourly_emissions_by_port', + timeit.default_timer() - spent_time) return dataframe @@ -565,7 +565,7 @@ class ShippingPortSector(Sector): dataframe[self.source_pollutants] = dataframe[self.source_pollutants].multiply(dataframe['Weight'], axis=0) dataframe.drop(columns=['Weight'], inplace=True) - self.__logger.write_time_log('ShippingPortSector', 'to_port_geometry', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'to_port_geometry', timeit.default_timer() - spent_time) return dataframe @@ -597,7 +597,7 @@ class ShippingPortSector(Sector): dataframe['layer'] = 0 dataframe = dataframe.loc[:, ~dataframe.columns.duplicated()] dataframe = dataframe.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_time_log('ShippingPortSector', 'to_grid_geometry', timeit.default_timer() - spent_time) + self.logger.write_time_log('ShippingPortSector', 'to_grid_geometry', timeit.default_timer() - spent_time) return dataframe @@ -609,9 +609,9 @@ class ShippingPortSector(Sector): :rtype: padas.DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') - self.__logger.write_log('\t\tCalculating yearly emissions', message_level=2) + self.logger.write_log('\t\tCalculating yearly emissions', message_level=2) manoeuvring, hoteling = self.calculate_yearly_emissions_by_port_vessel() manoeuvring = self.add_timezone(manoeuvring, self.maneuvering_shapefile_path) @@ -623,18 +623,18 @@ class ShippingPortSector(Sector): manoeuvring = self.dates_to_month_weekday_hour(manoeuvring) hoteling = self.dates_to_month_weekday_hour(hoteling) - self.__logger.write_log('\t\tCalculating monthly emissions', message_level=2) + self.logger.write_log('\t\tCalculating monthly emissions', message_level=2) manoeuvring = self.calculate_monthly_emissions_by_port(manoeuvring) hoteling = self.calculate_monthly_emissions_by_port(hoteling) - self.__logger.write_log('\t\tCalculating hourly emissions', message_level=2) + self.logger.write_log('\t\tCalculating hourly emissions', message_level=2) manoeuvring = self.calculate_hourly_emissions_by_port(manoeuvring) hoteling = self.calculate_hourly_emissions_by_port(hoteling) # TODO pre-calculate distribution during initialization. - self.__logger.write_log('\t\tDistributing emissions', message_level=2) + self.logger.write_log('\t\tDistributing emissions', message_level=2) manoeuvring = self.to_port_geometry(manoeuvring, self.maneuvering_shapefile_path) hoteling = self.to_port_geometry(hoteling, self.hoteling_shapefile_path) @@ -647,6 +647,6 @@ class ShippingPortSector(Sector): dataframe = self.speciate(dataframe, 'default') - self.__logger.write_log('\t\tShipping port emissions calculated', message_level=2) - self.__logger.write_time_log('ShippingPortSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tShipping port emissions calculated', message_level=2) + self.logger.write_time_log('ShippingPortSector', 'calculate_emissions', timeit.default_timer() - spent_time) return dataframe diff --git a/hermesv3_bu/sectors/solvents_sector.py b/hermesv3_bu/sectors/solvents_sector.py index 20b0fd6..8468fb7 100755 --- a/hermesv3_bu/sectors/solvents_sector.py +++ b/hermesv3_bu/sectors/solvents_sector.py @@ -151,7 +151,7 @@ class SolventsSector(Sector): nut2_shapefile_path, point_sources_shapefile_path, point_sources_weight_by_nut2_path) self.yearly_emissions_path = yearly_emissions_by_nut2_path - self.__logger.write_time_log('SolventsSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', '__init__', timeit.default_timer() - spent_time) def read_proxies(self, path): """ @@ -179,7 +179,7 @@ class SolventsSector(Sector): proxies_df.loc[proxies_df['spatial_proxy'] == 'shapefile', 'proxy_name'] = \ proxies_df['industry_code'].map(PROXY_NAMES) - self.__logger.write_time_log('SolventsSector', 'read_proxies', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'read_proxies', timeit.default_timer() - spent_time) return proxies_df def check_profiles(self): @@ -227,7 +227,7 @@ class SolventsSector(Sector): error_exit("The following speciation profile IDs reported in the solvent proxies CSV file do not appear " + "in the speciation profiles file. {0}".format(spec_res)) - self.__logger.write_time_log('SolventsSector', 'check_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'check_profiles', timeit.default_timer() - spent_time) return True def read_yearly_emissions(self, path, nut_list): @@ -256,7 +256,7 @@ class SolventsSector(Sector): year_emis.set_index(['nuts2_id', 'snap'], inplace=True) year_emis.drop(columns=['gnfr_description', 'gnfr', 'snap_description', 'nuts2_na'], inplace=True) - self.__logger.write_time_log('SolventsSector', 'read_yearly_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'read_yearly_emissions', timeit.default_timer() - spent_time) return year_emis def get_population_by_nut2(self, path): @@ -275,7 +275,7 @@ class SolventsSector(Sector): pop_by_nut2.set_index('nuts2_id', inplace=True) pop_by_nut2 = pop_by_nut2.to_dict()['pop'] - self.__logger.write_time_log('SolventsSector', 'get_pop_by_nut2', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_pop_by_nut2', timeit.default_timer() - spent_time) return pop_by_nut2 def get_point_sources_weights_by_nut2(self, path, proxy_name): @@ -300,8 +300,8 @@ class SolventsSector(Sector): weights_by_nut2.set_index("nuts2_id", inplace=True) weights_by_nut2 = weights_by_nut2.to_dict()['weight'] - self.__logger.write_time_log('SolventsSector', 'get_point_sources_weights_by_nut2', - timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_point_sources_weights_by_nut2', + timeit.default_timer() - spent_time) return weights_by_nut2 def get_land_use_by_nut2(self, path, land_uses, nut_codes): @@ -327,7 +327,7 @@ class SolventsSector(Sector): land_use_by_nut2 = land_use_by_nut2[land_use_by_nut2['land_use'].isin(land_uses)] land_use_by_nut2.set_index(['nuts2_id', 'land_use'], inplace=True) - self.__logger.write_time_log('SolventsSector', 'get_land_use_by_nut2', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_land_use_by_nut2', timeit.default_timer() - spent_time) return land_use_by_nut2 def get_population_proxy(self, pop_raster_path, pop_by_nut2_path, nut2_shapefile_path): @@ -349,35 +349,35 @@ class SolventsSector(Sector): spent_time = timeit.default_timer() # 1st Clip the raster - self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.__comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + self.logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'solvents', 'pop.tif')) # 2nd Raster to shapefile - self.__logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.__comm).to_shapefile_parallel( + self.logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut2_shapefile_path, nut_value='nuts2_id') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.__comm).balance(pop_shp) + pop_shp = IoShapefile(self.comm).balance(pop_shp) # pop_shp = IoShapefile(self.comm).split_shapefile(pop_shp) # 4th Calculate population percent - self.__logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) + self.logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) pop_by_nut2 = self.get_population_by_nut2(pop_by_nut2_path) pop_shp['tot_pop'] = pop_shp['nut_code'].map(pop_by_nut2) pop_shp['pop_percent'] = pop_shp['population'] / pop_shp['tot_pop'] pop_shp.drop(columns=['tot_pop', 'population'], inplace=True) # 5th Calculate percent by destiny cell - self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -389,7 +389,7 @@ class SolventsSector(Sector): popu_dist = pop_shp.groupby(['FID', 'nut_code']).sum() popu_dist.rename(columns={'pop_percent': 'population'}, inplace=True) - self.__logger.write_time_log('SolventsSector', 'get_population_proxie', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_population_proxie', timeit.default_timer() - spent_time) return popu_dist def get_land_use_proxy(self, land_use_raster, land_use_by_nut2_path, land_uses, nut2_shapefile_path): @@ -413,31 +413,31 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() # 1st Clip the raster - self.__logger.write_log("\t\tCreating clipped land use raster", message_level=3) + self.logger.write_log("\t\tCreating clipped land use raster", message_level=3) lu_raster_path = os.path.join(self.auxiliary_dir, 'solvents', 'lu_{0}.tif'.format( '_'.join([str(x) for x in land_uses]))) - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: if not os.path.exists(lu_raster_path): - lu_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + lu_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( land_use_raster, self.clip.shapefile, lu_raster_path, values=land_uses) # 2nd Raster to shapefile - self.__logger.write_log("\t\tRaster to shapefile", message_level=3) - land_use_shp = IoRaster(self.__comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) + self.logger.write_log("\t\tRaster to shapefile", message_level=3) + land_use_shp = IoRaster(self.comm).to_shapefile_parallel(lu_raster_path, gather=False, bcast=False) # 3rd Add NUT code - self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: land_use_shp.drop(columns='CELL_ID', inplace=True) land_use_shp.rename(columns={'data': 'land_use'}, inplace=True) land_use_shp = self.add_nut_code(land_use_shp, nut2_shapefile_path, nut_value='nuts2_id') land_use_shp = land_use_shp[land_use_shp['nut_code'] != -999] - land_use_shp = IoShapefile(self.__comm).balance(land_use_shp) + land_use_shp = IoShapefile(self.comm).balance(land_use_shp) # land_use_shp = IoShapefile(self.comm).split_shapefile(land_use_shp) # 4th Calculate land_use percent - self.__logger.write_log("\t\tCalculating land use percentage on source resolution", message_level=3) + self.logger.write_log("\t\tCalculating land use percentage on source resolution", message_level=3) land_use_shp['area'] = land_use_shp.geometry.area land_use_by_nut2 = self.get_land_use_by_nut2( @@ -449,7 +449,7 @@ class SolventsSector(Sector): land_use_shp.drop(columns='area', inplace=True) # 5th Calculate percent by dest_cell - self.__logger.write_log("\t\tCalculating land use percentage on destiny resolution", message_level=3) + self.logger.write_log("\t\tCalculating land use percentage on destiny resolution", message_level=3) land_use_shp.to_crs(self.grid.shapefile.crs, inplace=True) land_use_shp['src_inter_fraction'] = land_use_shp.geometry.area @@ -462,7 +462,7 @@ class SolventsSector(Sector): land_use_dist = land_use_shp.groupby(['FID', 'nut_code']).sum() land_use_dist.rename(columns={'fraction': 'lu_{0}'.format('_'.join([str(x) for x in land_uses]))}, inplace=True) - self.__logger.write_time_log('SolventsSector', 'get_land_use_proxy', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_land_use_proxy', timeit.default_timer() - spent_time) return land_use_dist def get_point_shapefile_proxy(self, proxy_name, point_shapefile_path, point_sources_weight_by_nut2_path, @@ -488,17 +488,17 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() - point_shapefile = IoShapefile(self.__comm).read_shapefile_parallel(point_shapefile_path) + point_shapefile = IoShapefile(self.comm).read_shapefile_parallel(point_shapefile_path) point_shapefile.drop(columns=['Empresa', 'Empleados', 'Ingresos', 'Consumos', 'LON', 'LAT'], inplace=True) point_shapefile = point_shapefile[point_shapefile['industry_c'] == [key for key, value in PROXY_NAMES.items() if value == proxy_name][0]] - point_shapefile = IoShapefile(self.__comm).balance(point_shapefile) + point_shapefile = IoShapefile(self.comm).balance(point_shapefile) point_shapefile.drop(columns=['industry_c'], inplace=True) point_shapefile = self.add_nut_code(point_shapefile, nut2_shapefile_path, nut_value='nuts2_id') point_shapefile = point_shapefile[point_shapefile['nut_code'] != -999] - point_shapefile = IoShapefile(self.__comm).gather_shapefile(point_shapefile, rank=0) - if self.__comm.Get_rank() == 0: + point_shapefile = IoShapefile(self.comm).gather_shapefile(point_shapefile, rank=0) + if self.comm.Get_rank() == 0: weight_by_nut2 = self.get_point_sources_weights_by_nut2( point_sources_weight_by_nut2_path, [key for key, value in PROXY_NAMES.items() if value == proxy_name][0]) @@ -507,12 +507,12 @@ class SolventsSector(Sector): point_shapefile.drop(columns=['weight'], inplace=True) # print(point_shapefile.groupby('nut_code')['weight'].sum()) - point_shapefile = IoShapefile(self.__comm).split_shapefile(point_shapefile) + point_shapefile = IoShapefile(self.comm).split_shapefile(point_shapefile) point_shapefile = gpd.sjoin(point_shapefile.to_crs(self.grid.shapefile.crs), self.grid.shapefile.reset_index()) point_shapefile.drop(columns=['geometry', 'index_right'], inplace=True) point_shapefile = point_shapefile.groupby(['FID', 'nut_code']).sum() - self.__logger.write_time_log('SolventsSector', 'get_point_shapefile_proxy', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_point_shapefile_proxy', timeit.default_timer() - spent_time) return point_shapefile def get_proxy_shapefile(self, population_raster_path, population_nuts2_path, land_uses_raster_path, @@ -551,13 +551,13 @@ class SolventsSector(Sector): """ spent_time = timeit.default_timer() - self.__logger.write_log("Getting proxies shapefile", message_level=1) + self.logger.write_log("Getting proxies shapefile", message_level=1) proxy_names_list = np.unique(self.proxies_map['proxy_name']) proxy_path = os.path.join(self.auxiliary_dir, 'solvents', 'proxy_distributions.shp') if not os.path.exists(proxy_path): proxy_list = [] for proxy_name in proxy_names_list: - self.__logger.write_log("\tGetting proxy for {0}".format(proxy_name), message_level=2) + self.logger.write_log("\tGetting proxy for {0}".format(proxy_name), message_level=2) if proxy_name == 'population': proxy = self.get_population_proxy(population_raster_path, population_nuts2_path, nut2_shapefile_path) @@ -569,10 +569,10 @@ class SolventsSector(Sector): else: proxy = self.get_point_shapefile_proxy(proxy_name, point_sources_shapefile_path, point_sources_weight_by_nut2_path, nut2_shapefile_path) - proxy = IoShapefile(self.__comm).gather_shapefile(proxy.reset_index()) - if self.__comm.Get_rank() == 0: + proxy = IoShapefile(self.comm).gather_shapefile(proxy.reset_index()) + if self.comm.Get_rank() == 0: proxy_list.append(proxy) - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: proxies = pd.concat(proxy_list, sort=False) proxies['FID'] = proxies['FID'].astype(int) proxies['nut_code'] = proxies['nut_code'].astype(int) @@ -583,18 +583,18 @@ class SolventsSector(Sector): proxies = GeoDataFrame( proxies, geometry=self.grid.shapefile.loc[proxies.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.__comm).write_shapefile_serial(proxies.reset_index(), proxy_path) + IoShapefile(self.comm).write_shapefile_serial(proxies.reset_index(), proxy_path) else: proxies = None else: - if self.__comm.Get_rank() == 0: - proxies = IoShapefile(self.__comm).read_shapefile_serial(proxy_path) + if self.comm.Get_rank() == 0: + proxies = IoShapefile(self.comm).read_shapefile_serial(proxy_path) proxies.set_index(['FID', 'nut_code'], inplace=True) else: proxies = None - proxies = IoShapefile(self.__comm).split_shapefile(proxies) + proxies = IoShapefile(self.comm).split_shapefile(proxies) - self.__logger.write_time_log('SolventsSector', 'get_proxy_shapefile', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'get_proxy_shapefile', timeit.default_timer() - spent_time) return proxies def calculate_hourly_emissions(self, yearly_emissions): @@ -628,7 +628,7 @@ class SolventsSector(Sector): spent_time = timeit.default_timer() - self.__logger.write_log('\tHourly disaggregation', message_level=2) + self.logger.write_log('\tHourly disaggregation', message_level=2) emissions = self.add_dates(yearly_emissions, drop_utc=True) emissions['month'] = emissions['date'].dt.month @@ -647,7 +647,7 @@ class SolventsSector(Sector): emissions.drop(columns=['temp_factor'], inplace=True) emissions.set_index(['FID', 'snap', 'tstep'], inplace=True) - self.__logger.write_time_log('SolventsSector', 'calculate_hourly_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'calculate_hourly_emissions', timeit.default_timer() - spent_time) return emissions def distribute_yearly_emissions(self): @@ -658,7 +658,7 @@ class SolventsSector(Sector): :rtype: GeoDataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\t\tYearly distribution', message_level=2) + self.logger.write_log('\t\tYearly distribution', message_level=2) yearly_emis = self.read_yearly_emissions( self.yearly_emissions_path, np.unique(self.proxy.index.get_level_values('nut_code'))) @@ -686,7 +686,7 @@ class SolventsSector(Sector): emis = pd.concat(emis_list).sort_index() emis = emis[emis['nmvoc'] > 0] - self.__logger.write_time_log('SolventsSector', 'distribute_yearly_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'distribute_yearly_emissions', timeit.default_timer() - spent_time) return emis def speciate(self, dataframe, code='default'): @@ -709,12 +709,12 @@ class SolventsSector(Sector): return x[[out_p]] spent_time = timeit.default_timer() - self.__logger.write_log('\tSpeciation emissions', message_level=2) + self.logger.write_log('\tSpeciation emissions', message_level=2) new_dataframe = gpd.GeoDataFrame(index=dataframe.index, data=None, crs=dataframe.crs, geometry=dataframe.geometry) for out_pollutant in self.output_pollutants: - self.__logger.write_log('\t\tSpeciating {0}'.format(out_pollutant), message_level=3) + self.logger.write_log('\t\tSpeciating {0}'.format(out_pollutant), message_level=3) new_dataframe[out_pollutant] = dataframe.groupby('P_spec').apply( lambda x: calculate_new_pollutant(x, out_pollutant)) new_dataframe.reset_index(inplace=True) @@ -722,7 +722,7 @@ class SolventsSector(Sector): new_dataframe.drop(columns=['snap', 'geometry'], inplace=True) new_dataframe.set_index(['FID', 'tstep'], inplace=True) - self.__logger.write_time_log('SolventsSector', 'speciate', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'speciate', timeit.default_timer() - spent_time) return new_dataframe def calculate_emissions(self): @@ -733,7 +733,7 @@ class SolventsSector(Sector): :rtype: DataFrame """ spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating emissions') + self.logger.write_log('\tCalculating emissions') emissions = self.distribute_yearly_emissions() emissions = self.calculate_hourly_emissions(emissions) @@ -743,5 +743,5 @@ class SolventsSector(Sector): emissions['layer'] = 0 emissions = emissions.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_time_log('SolventsSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('SolventsSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/traffic_area_sector.py b/hermesv3_bu/sectors/traffic_area_sector.py index 5a01651..44e9ae0 100755 --- a/hermesv3_bu/sectors/traffic_area_sector.py +++ b/hermesv3_bu/sectors/traffic_area_sector.py @@ -67,7 +67,7 @@ class TrafficAreaSector(Sector): else: self.small_cities = None - self.__logger.write_time_log('TrafficAreaSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', '__init__', timeit.default_timer() - spent_time) def get_population_by_nut2(self, path): """ @@ -85,7 +85,7 @@ class TrafficAreaSector(Sector): pop_by_nut3.set_index('nuts3_id', inplace=True) pop_by_nut3 = pop_by_nut3.to_dict()['population'] - self.__logger.write_time_log('TrafficAreaSector', 'get_pop_by_nut3', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'get_pop_by_nut3', timeit.default_timer() - spent_time) return pop_by_nut3 def get_population_percent(self, pop_raster_path, pop_by_nut_path, nut_shapefile_path): @@ -109,34 +109,34 @@ class TrafficAreaSector(Sector): pop_percent_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'population_percent') if not os.path.exists(pop_percent_path): # 1st Clip the raster - self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.__comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + self.logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) # 2nd Raster to shapefile - self.__logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.__comm).to_shapefile_parallel( + self.logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut_shapefile_path, nut_value='nuts3_id') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.__comm).balance(pop_shp) + pop_shp = IoShapefile(self.comm).balance(pop_shp) # 4th Calculate population percent - self.__logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) + self.logger.write_log("\t\tCalculating population percentage on source resolution", message_level=3) pop_by_nut2 = self.get_population_by_nut2(pop_by_nut_path) pop_shp['tot_pop'] = pop_shp['nut_code'].map(pop_by_nut2) pop_shp['pop_percent'] = pop_shp['population'] / pop_shp['tot_pop'] pop_shp.drop(columns=['tot_pop', 'population'], inplace=True) # 5th Calculate percent by destiny cell - self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -145,22 +145,22 @@ class TrafficAreaSector(Sector): pop_shp['pop_percent'] = pop_shp['pop_percent'] * pop_shp['src_inter_fraction'] pop_shp.drop(columns=['src_inter_fraction'], inplace=True) - pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) - if self.__comm.Get_rank() == 0: + pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) + if self.comm.Get_rank() == 0: popu_dist = pop_shp.groupby(['FID', 'nut_code']).sum() popu_dist = GeoDataFrame( popu_dist, geometry=self.grid.shapefile.loc[popu_dist.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.__comm).write_shapefile_serial(popu_dist.reset_index(), pop_percent_path) + IoShapefile(self.comm).write_shapefile_serial(popu_dist.reset_index(), pop_percent_path) else: popu_dist = None - popu_dist = IoShapefile(self.__comm).split_shapefile(popu_dist) + popu_dist = IoShapefile(self.comm).split_shapefile(popu_dist) else: - popu_dist = IoShapefile(self.__comm).read_shapefile_parallel(pop_percent_path) + popu_dist = IoShapefile(self.comm).read_shapefile_parallel(pop_percent_path) popu_dist.set_index(['FID', 'nut_code'], inplace=True) - self.__logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) return popu_dist def get_population(self, pop_raster_path, nut_shapefile_path): @@ -181,28 +181,28 @@ class TrafficAreaSector(Sector): pop_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'population_small') if not os.path.exists(pop_path): # 1st Clip the raster - self.__logger.write_log("\t\tCreating clipped population raster", message_level=3) - if self.__comm.Get_rank() == 0: - pop_raster_path = IoRaster(self.__comm).clip_raster_with_shapefile_poly( + self.logger.write_log("\t\tCreating clipped population raster", message_level=3) + if self.comm.Get_rank() == 0: + pop_raster_path = IoRaster(self.comm).clip_raster_with_shapefile_poly( pop_raster_path, self.clip.shapefile, os.path.join(self.auxiliary_dir, 'traffic_area', 'pop.tif')) # 2nd Raster to shapefile - self.__logger.write_log("\t\tRaster to shapefile", message_level=3) - pop_shp = IoRaster(self.__comm).to_shapefile_parallel( + self.logger.write_log("\t\tRaster to shapefile", message_level=3) + pop_shp = IoRaster(self.comm).to_shapefile_parallel( pop_raster_path, gather=False, bcast=False, crs={'init': 'epsg:4326'}) # 3rd Add NUT code - self.__logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) + self.logger.write_log("\t\tAdding nut codes to the shapefile", message_level=3) # if self.comm.Get_rank() == 0: pop_shp.drop(columns='CELL_ID', inplace=True) pop_shp.rename(columns={'data': 'population'}, inplace=True) pop_shp = self.add_nut_code(pop_shp, nut_shapefile_path, nut_value='ORDER08') pop_shp = pop_shp[pop_shp['nut_code'] != -999] - pop_shp = IoShapefile(self.__comm).balance(pop_shp) + pop_shp = IoShapefile(self.comm).balance(pop_shp) # 4th Calculate percent by destiny cell - self.__logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) + self.logger.write_log("\t\tCalculating population percentage on destiny resolution", message_level=3) pop_shp.to_crs(self.grid.shapefile.crs, inplace=True) pop_shp['src_inter_fraction'] = pop_shp.geometry.area pop_shp = self.spatial_overlays(pop_shp.reset_index(), self.grid.shapefile.reset_index()) @@ -211,22 +211,22 @@ class TrafficAreaSector(Sector): pop_shp['population'] = pop_shp['population'] * pop_shp['src_inter_fraction'] pop_shp.drop(columns=['src_inter_fraction', 'nut_code'], inplace=True) - pop_shp = IoShapefile(self.__comm).gather_shapefile(pop_shp) - if self.__comm.Get_rank() == 0: + pop_shp = IoShapefile(self.comm).gather_shapefile(pop_shp) + if self.comm.Get_rank() == 0: popu_dist = pop_shp.groupby(['FID']).sum() popu_dist = GeoDataFrame( popu_dist, geometry=self.grid.shapefile.loc[popu_dist.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - IoShapefile(self.__comm).write_shapefile_serial(popu_dist.reset_index(), pop_path) + IoShapefile(self.comm).write_shapefile_serial(popu_dist.reset_index(), pop_path) else: popu_dist = None - popu_dist = IoShapefile(self.__comm).split_shapefile(popu_dist) + popu_dist = IoShapefile(self.comm).split_shapefile(popu_dist) else: - popu_dist = IoShapefile(self.__comm).read_shapefile_parallel(pop_path) + popu_dist = IoShapefile(self.comm).read_shapefile_parallel(pop_path) popu_dist.set_index(['FID'], inplace=True) - self.__logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'get_population_percent', timeit.default_timer() - spent_time) return popu_dist def init_evaporative(self, gasoline_path): @@ -243,20 +243,20 @@ class TrafficAreaSector(Sector): veh_cell_path = os.path.join(self.auxiliary_dir, 'traffic_area', 'vehicle_by_cell') if not os.path.exists(veh_cell_path): veh_cell = self.make_vehicles_by_cell(gasoline_path) - IoShapefile(self.__comm).write_shapefile_parallel(veh_cell.reset_index(), veh_cell_path) + IoShapefile(self.comm).write_shapefile_parallel(veh_cell.reset_index(), veh_cell_path) else: - self.__logger.write_log('\t\tReading vehicle shapefile by cell.', message_level=3) - veh_cell = IoShapefile(self.__comm).read_shapefile_parallel(veh_cell_path) + self.logger.write_log('\t\tReading vehicle shapefile by cell.', message_level=3) + veh_cell = IoShapefile(self.comm).read_shapefile_parallel(veh_cell_path) veh_cell.set_index('FID', inplace=True) - self.__logger.write_time_log('TrafficAreaSector', 'init_evaporative', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'init_evaporative', timeit.default_timer() - spent_time) return veh_cell def init_small_cities(self, global_path, small_cities_shapefile): spent_time = timeit.default_timer() pop = self.get_population(global_path, small_cities_shapefile) - self.__logger.write_time_log('TrafficAreaSector', 'init_small_cities', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'init_small_cities', timeit.default_timer() - spent_time) return pop def read_vehicles_by_nut(self, path): @@ -268,7 +268,7 @@ class TrafficAreaSector(Sector): vehicles_by_nut = pd.DataFrame(vehicles_by_nut.values.T, index=nut_list, columns=vehicle_list) vehicles_by_nut.index.name = 'nuts3_id' - self.__logger.write_time_log('TrafficAreaSector', 'read_vehicles_by_nut', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'read_vehicles_by_nut', timeit.default_timer() - spent_time) return vehicles_by_nut def make_vehicles_by_cell(self, gasoline_path): @@ -282,19 +282,19 @@ class TrafficAreaSector(Sector): vehicle_by_cell[vehicle_list] = vehicle_by_cell[vehicle_list].multiply( vehicle_by_cell['pop_percent'], axis='index') vehicle_by_cell.drop(columns=['pop_percent'], inplace=True) - vehicle_by_cell = IoShapefile(self.__comm).gather_shapefile(vehicle_by_cell, rank=0) - if self.__comm.Get_rank() == 0: + vehicle_by_cell = IoShapefile(self.comm).gather_shapefile(vehicle_by_cell, rank=0) + if self.comm.Get_rank() == 0: vehicle_by_cell = vehicle_by_cell.groupby('FID').sum() else: vehicle_by_cell = None - vehicle_by_cell = IoShapefile(self.__comm).split_shapefile(vehicle_by_cell) + vehicle_by_cell = IoShapefile(self.comm).split_shapefile(vehicle_by_cell) vehicle_by_cell = GeoDataFrame( vehicle_by_cell, geometry=self.grid.shapefile.loc[vehicle_by_cell.index.get_level_values('FID'), 'geometry'].values, crs=self.grid.shapefile.crs) - self.__logger.write_time_log('TrafficAreaSector', 'make_vehicles_by_cell', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'make_vehicles_by_cell', timeit.default_timer() - spent_time) return vehicle_by_cell def get_profiles_from_temperature(self, temperature, default=False): @@ -324,8 +324,8 @@ class TrafficAreaSector(Sector): temperature.loc[:, temp_list] = temperature[temp_list].add(second_min, axis=0) temperature.loc[:, temp_list] = temperature[temp_list].div(temperature[temp_list].sum(axis=1), axis=0) - self.__logger.write_time_log('TrafficAreaSector', 'get_profiles_from_temperature', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'get_profiles_from_temperature', + timeit.default_timer() - spent_time) return temperature def calculate_evaporative_emissions(self): @@ -347,7 +347,7 @@ class TrafficAreaSector(Sector): self.evaporative['c_lon'] = aux_df.centroid.x self.evaporative['centroid'] = aux_df.centroid - temperature = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( + temperature = IoNetcdf(self.comm).get_hourly_data_from_netcdf( self.evaporative['c_lon'].min(), self.evaporative['c_lon'].max(), self.evaporative['c_lat'].min(), self.evaporative['c_lat'].max(), self.temperature_dir, 'tas', self.date_array) temperature.rename(columns={x: 't_{0}'.format(x) for x in range(len(self.date_array))}, inplace=True) @@ -365,7 +365,7 @@ class TrafficAreaSector(Sector): df1=self.evaporative, df2=temperature_mean, geom1_col='centroid', src_column='REC', axis=1) del self.evaporative['c_lat'], self.evaporative['c_lon'], self.evaporative['centroid'] - IoShapefile(self.__comm).write_shapefile_parallel( + IoShapefile(self.comm).write_shapefile_parallel( self.evaporative, os.path.join(self.auxiliary_dir, 'traffic_area', 'vehicle_by_cell')) else: del self.evaporative['c_lat'], self.evaporative['c_lon'], self.evaporative['centroid'] @@ -397,8 +397,8 @@ class TrafficAreaSector(Sector): self.evaporative.set_index(['FID', 'tstep'], inplace=True) - self.__logger.write_time_log('TrafficAreaSector', 'calculate_evaporative_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'calculate_evaporative_emissions', + timeit.default_timer() - spent_time) return self.evaporative def evaporative_temporal_distribution(self, temporal_profiles): @@ -416,8 +416,8 @@ class TrafficAreaSector(Sector): temporal_df_list.append(aux_temporal) df = pd.concat(temporal_df_list) - self.__logger.write_time_log('TrafficAreaSector', 'evaporative_temporal_distribution', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'evaporative_temporal_distribution', + timeit.default_timer() - spent_time) return df def speciate_evaporative(self): @@ -430,7 +430,7 @@ class TrafficAreaSector(Sector): # From g/day to mol/day speciated_df[p] = self.evaporative['nmvoc'] * self.speciation_profiles_evaporative.loc['default', p] - self.__logger.write_time_log('TrafficAreaSector', 'speciate_evaporative', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'speciate_evaporative', timeit.default_timer() - spent_time) return speciated_df def small_cities_emissions_by_population(self, pop_by_cell): @@ -442,8 +442,8 @@ class TrafficAreaSector(Sector): pop_by_cell[pollutant] = pop_by_cell['population'] * ef_df[pollutant].iloc[0] pop_by_cell.drop(columns=['population'], inplace=True) - self.__logger.write_time_log('TrafficAreaSector', 'small_cities_emissions_by_population', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'small_cities_emissions_by_population', + timeit.default_timer() - spent_time) return pop_by_cell def add_timezones(self, grid, default=False): @@ -464,7 +464,7 @@ class TrafficAreaSector(Sector): lambda x: tz.closest_timezone_at(lng=x['lons'], lat=x['lats'], delta_degree=inc), axis=1) inc += 1 - self.__logger.write_time_log('TrafficAreaSector', 'add_timezones', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'add_timezones', timeit.default_timer() - spent_time) return grid def temporal_distribution_small(self, small_cities): @@ -509,8 +509,8 @@ class TrafficAreaSector(Sector): small_cities['date'] = small_cities['date'] + pd.to_timedelta(1, unit='h') df = pd.concat(df_list) - self.__logger.write_time_log('TrafficAreaSector', 'temporal_distribution_small', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'temporal_distribution_small', + timeit.default_timer() - spent_time) return df def calculate_small_cities_emissions(self): @@ -530,8 +530,8 @@ class TrafficAreaSector(Sector): # default=True) self.small_cities = self.temporal_distribution_small(self.small_cities) - self.__logger.write_time_log('TrafficAreaSector', 'calculate_small_cities_emissions', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'calculate_small_cities_emissions', + timeit.default_timer() - spent_time) return True @@ -550,22 +550,22 @@ class TrafficAreaSector(Sector): dataset['layer'] = 0 dataset = dataset.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_time_log('TrafficAreaSector', 'to_grid', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficAreaSector', 'to_grid', timeit.default_timer() - spent_time) return dataset def calculate_emissions(self): spent_time = timeit.default_timer() - self.__logger.write_log('\tCalculating traffic area.', message_level=2) + self.logger.write_log('\tCalculating traffic area.', message_level=2) if self.do_evaporative: - self.__logger.write_log('\tCalculating evaporative emissions.', message_level=2) + self.logger.write_log('\tCalculating evaporative emissions.', message_level=2) self.calculate_evaporative_emissions() if self.do_small_cities: - self.__logger.write_log('\tCalculating small cities emissions.', message_level=2) + self.logger.write_log('\tCalculating small cities emissions.', message_level=2) self.calculate_small_cities_emissions() emissions = self.to_grid() - self.__logger.write_log('\t\tTraffic area emissions calculated', message_level=2) - self.__logger.write_time_log('TrafficAreaSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\t\tTraffic area emissions calculated', message_level=2) + self.logger.write_time_log('TrafficAreaSector', 'calculate_emissions', timeit.default_timer() - spent_time) return emissions diff --git a/hermesv3_bu/sectors/traffic_sector.py b/hermesv3_bu/sectors/traffic_sector.py index 4375fa4..6ff9e65 100755 --- a/hermesv3_bu/sectors/traffic_sector.py +++ b/hermesv3_bu/sectors/traffic_sector.py @@ -105,10 +105,10 @@ class TrafficSector(Sector): self.output_dir = output_dir self.link_to_grid_csv = os.path.join(auxiliary_dir, 'traffic', 'link_grid.csv') - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: if not os.path.exists(os.path.dirname(self.link_to_grid_csv)): os.makedirs(os.path.dirname(self.link_to_grid_csv)) - self.__comm.Barrier() + self.comm.Barrier() self.crs = None # crs is the projection of the road links and it is set on the read_road_links function. self.write_rline = write_rline self.road_links = self.read_road_links(road_link_path) @@ -142,7 +142,7 @@ class TrafficSector(Sector): self.do_road_wear = do_road_wear self.do_resuspension = do_resuspension - self.__logger.write_time_log('TrafficSector', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', '__init__', timeit.default_timer() - spent_time) def check_profiles(self): spent_time = timeit.default_timer() @@ -193,7 +193,7 @@ class TrafficSector(Sector): error_exit("The following hourly profile IDs reported in the road links shapefile do not appear " + "in the hourly profiles file. {0}".format(hour_res)) - self.__logger.write_time_log('TrafficSector', 'check_profiles', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'check_profiles', timeit.default_timer() - spent_time) def read_all_hourly_profiles(self, hourly_mean_profiles_path, hourly_weekday_profiles_path, hourly_saturday_profiles_path, hourly_sunday_profiles_path): @@ -249,7 +249,7 @@ class TrafficSector(Sector): # dataframe['PM10'] = 'pm10' # if 'pm' in self.source_pollutants and 'PM25' in speciation_map[['dst']].values: # dataframe['PM25'] = 'pm25' - self.__logger.write_time_log('TrafficSector', 'read_speciation_map', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_speciation_map', timeit.default_timer() - spent_time) return dataframe @@ -271,7 +271,7 @@ class TrafficSector(Sector): self.road_links.drop(columns=['utc', 'timezone'], inplace=True) libc.malloc_trim(0) - self.__logger.write_time_log('TrafficSector', 'add_local_date', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'add_local_date', timeit.default_timer() - spent_time) return True def add_timezones(self): @@ -283,7 +283,7 @@ class TrafficSector(Sector): self.road_links['timezone'] = 'Europe/Madrid' - self.__logger.write_time_log('TrafficSector', 'add_timezones', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'add_timezones', timeit.default_timer() - spent_time) return True def read_speed_hourly(self, path): @@ -303,7 +303,7 @@ class TrafficSector(Sector): df['P_speed'] = df['P_speed'].astype(int) df.set_index('P_speed', inplace=True) - self.__logger.write_time_log('TrafficSector', 'read_speed_hourly', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_speed_hourly', timeit.default_timer() - spent_time) return df def read_fleet_compo(self, path, vehicle_list): @@ -311,7 +311,7 @@ class TrafficSector(Sector): df = pd.read_csv(path, sep=',') if vehicle_list is not None: df = df.loc[df['Code'].isin(vehicle_list), :] - self.__logger.write_time_log('TrafficSector', 'read_fleet_compo', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_fleet_compo', timeit.default_timer() - spent_time) return df def read_road_links(self, path): @@ -342,7 +342,7 @@ class TrafficSector(Sector): spent_time = timeit.default_timer() - if self.__comm.Get_rank() == 0: + if self.comm.Get_rank() == 0: df = gpd.read_file(path) try: df.drop(columns=['Adminis', 'CCAA', 'NETWORK_ID', 'Province', 'Road_name', 'aadt_m_sat', 'aadt_m_sun', @@ -374,12 +374,12 @@ class TrafficSector(Sector): df.set_index('Link_ID', inplace=True) libc.malloc_trim(0) - chunks = chunk_road_links(df, self.__comm.Get_size()) + chunks = chunk_road_links(df, self.comm.Get_size()) else: chunks = None - self.__comm.Barrier() + self.comm.Barrier() - df = self.__comm.scatter(chunks, root=0) + df = self.comm.scatter(chunks, root=0) del chunks libc.malloc_trim(0) @@ -409,7 +409,7 @@ class TrafficSector(Sector): if self.write_rline: self.write_rline_roadlinks(df) - self.__logger.write_time_log('TrafficSector', 'read_road_links', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_road_links', timeit.default_timer() - spent_time) libc.malloc_trim(0) return df @@ -487,7 +487,7 @@ class TrafficSector(Sector): return df - self.__logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) return None def read_mcorr_file(self, pollutant_name): @@ -499,11 +499,11 @@ class TrafficSector(Sector): if 'Copert_V_name' in list(df.columns.values): df.drop(columns=['Copert_V_name'], inplace=True) except IOError: - self.__logger.write_log('WARNING! No mileage correction applied to {0}'.format(pollutant_name)) + self.logger.write_log('WARNING! No mileage correction applied to {0}'.format(pollutant_name)) warnings.warn('No mileage correction applied to {0}'.format(pollutant_name)) df = None - self.__logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'read_ef', timeit.default_timer() - spent_time) return df def calculate_precipitation_factor(self, lon_min, lon_max, lat_min, lat_max, precipitation_dir): @@ -511,7 +511,7 @@ class TrafficSector(Sector): dates_to_extract = [self.date_array[0] + timedelta(hours=x - 47) for x in range(47)] + self.date_array - precipitation = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( + precipitation = IoNetcdf(self.comm).get_hourly_data_from_netcdf( lon_min, lon_max, lat_min, lat_max, precipitation_dir, 'prlr', dates_to_extract) precipitation.set_index('REC', inplace=True, drop=True) @@ -537,8 +537,8 @@ class TrafficSector(Sector): df.loc[:, 'REC'] = df.index - self.__logger.write_time_log('TrafficSector', 'calculate_precipitation_factor', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_precipitation_factor', + timeit.default_timer() - spent_time) return df def update_fleet_value(self, df): @@ -562,8 +562,8 @@ class TrafficSector(Sector): aadt = round(aux_df['aadt'].min(), 1) fleet_value = round(aux_df['Fleet_value'].sum(), 1) if aadt != fleet_value: - self.__logger.write_log('link_ID: {0} aadt: {1} sum_fleet: {2}'.format(link_id, aadt, fleet_value), - message_level=2) + self.logger.write_log('link_ID: {0} aadt: {1} sum_fleet: {2}'.format(link_id, aadt, fleet_value), + message_level=2) # Drop 0 values df = df[df['Fleet_value'] > 0] @@ -575,7 +575,7 @@ class TrafficSector(Sector): error_exit(str(e).replace('axis', 'the road links shapefile')) libc.malloc_trim(0) - self.__logger.write_time_log('TrafficSector', 'update_fleet_value', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'update_fleet_value', timeit.default_timer() - spent_time) return df def calculate_time_dependent_values(self, df): @@ -675,8 +675,8 @@ class TrafficSector(Sector): error_exit(str(e).replace('axis', 'the road links shapefile')) libc.malloc_trim(0) - self.__logger.write_time_log('TrafficSector', 'calculate_time_dependent_values', - timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_time_dependent_values', + timeit.default_timer() - spent_time) return df def expand_road_links(self): @@ -703,7 +703,7 @@ class TrafficSector(Sector): df = self.update_fleet_value(df) df = self.calculate_time_dependent_values(df) - self.__logger.write_time_log('TrafficSector', 'expand_road_links', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'expand_road_links', timeit.default_timer() - spent_time) return df @@ -720,7 +720,7 @@ class TrafficSector(Sector): fleet['fleet_comp'] = zone - self.__logger.write_time_log('TrafficSector', 'find_fleet', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'find_fleet', timeit.default_timer() - spent_time) return fleet @@ -762,7 +762,7 @@ class TrafficSector(Sector): resta_2 = [item for item in calculated_ef_profiles if item not in original_ef_profile] # Error if len(resta_1) > 0: - self.__logger.write_log('WARNING! Exists some fleet codes that not appear on the EF file: {0}'.format( + self.logger.write_log('WARNING! Exists some fleet codes that not appear on the EF file: {0}'.format( resta_1)) warnings.warn('Exists some fleet codes that not appear on the EF file: {0}'.format(resta_1), Warning) if len(resta_2) > 0: @@ -832,7 +832,7 @@ class TrafficSector(Sector): libc.malloc_trim(0) - self.__logger.write_time_log('TrafficSector', 'calculate_hot', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_hot', timeit.default_timer() - spent_time) return expanded_aux @@ -850,7 +850,7 @@ class TrafficSector(Sector): link_lons = cold_links['geometry'].centroid.x link_lats = cold_links['geometry'].centroid.y - temperature = IoNetcdf(self.__comm).get_hourly_data_from_netcdf( + temperature = IoNetcdf(self.comm).get_hourly_data_from_netcdf( link_lons.min(), link_lons.max(), link_lats.min(), link_lats.max(), self.temp_common_path, 'tas', self.date_array) temperature.rename(columns={x: 't_{0}'.format(x) for x in range(len(self.date_array))}, inplace=True) @@ -949,14 +949,14 @@ class TrafficSector(Sector): cold_df.drop(columns=['voc_{0}'.format(tstep)], inplace=True) libc.malloc_trim(0) else: - self.__logger.write_log("WARNING! nmvoc emissions cannot be estimated because voc or ch4 are not " + + self.logger.write_log("WARNING! nmvoc emissions cannot be estimated because voc or ch4 are not " + "selected in the pollutant list.") warnings.warn("nmvoc emissions cannot be estimated because voc or ch4 are not selected in the " + "pollutant list.") cold_df = self.speciate_traffic(cold_df, self.hot_cold_speciation) libc.malloc_trim(0) - self.__logger.write_time_log('TrafficSector', 'calculate_cold', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_cold', timeit.default_timer() - spent_time) return cold_df def compact_hot_expanded(self, expanded): @@ -978,14 +978,14 @@ class TrafficSector(Sector): expanded.loc[expanded['nmvoc_{0}'.format(tstep)] < 0, 'nmvoc_{0}'.format(tstep)] = 0 expanded.drop(columns=['voc_{0}'.format(tstep)], inplace=True) else: - self.__logger.write_log("nmvoc emissions cannot be estimated because voc or ch4 are not selected in " + + self.logger.write_log("nmvoc emissions cannot be estimated because voc or ch4 are not selected in " + "the pollutant list.") warnings.warn( "nmvoc emissions cannot be estimated because voc or ch4 are not selected in the pollutant list.") compacted = self.speciate_traffic(expanded, self.hot_cold_speciation) - self.__logger.write_time_log('TrafficSector', 'compact_hot_expanded', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'compact_hot_expanded', timeit.default_timer() - spent_time) return compacted def calculate_tyre_wear(self): @@ -1019,7 +1019,7 @@ class TrafficSector(Sector): df.drop(columns=columns_to_delete, inplace=True) df = self.speciate_traffic(df, self.tyre_speciation) - self.__logger.write_time_log('TrafficSector', 'calculate_tyre_wear', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_tyre_wear', timeit.default_timer() - spent_time) return df def calculate_brake_wear(self): @@ -1054,7 +1054,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.brake_speciation) - self.__logger.write_time_log('TrafficSector', 'calculate_brake_wear', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_brake_wear', timeit.default_timer() - spent_time) return df def calculate_road_wear(self): @@ -1084,7 +1084,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.road_speciation) - self.__logger.write_time_log('TrafficSector', 'calculate_road_wear', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_road_wear', timeit.default_timer() - spent_time) return df def calculate_resuspension(self): @@ -1142,7 +1142,7 @@ class TrafficSector(Sector): df = self.speciate_traffic(df, self.resuspension_speciation) - self.__logger.write_time_log('TrafficSector', 'calculate_resuspension', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'calculate_resuspension', timeit.default_timer() - spent_time) return df def transform_df(self, df): @@ -1164,7 +1164,7 @@ class TrafficSector(Sector): df.drop(columns=pollutants_renamed, inplace=True) df = pd.concat(df_list, ignore_index=True) - self.__logger.write_time_log('TrafficSector', 'transform_df', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'transform_df', timeit.default_timer() - spent_time) return df def speciate_traffic(self, df, speciation): @@ -1238,36 +1238,36 @@ class TrafficSector(Sector): df_out = pd.concat(df_out_list, axis=1) - self.__logger.write_time_log('TrafficSector', 'speciate_traffic', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'speciate_traffic', timeit.default_timer() - spent_time) return df_out def calculate_emissions(self): spent_time = timeit.default_timer() version = 1 - self.__logger.write_log('\tCalculating Road traffic emissions', message_level=1) + self.logger.write_log('\tCalculating Road traffic emissions', message_level=1) df_accum = pd.DataFrame() if version == 2: if self.do_hot: - self.__logger.write_log('\t\tCalculating Hot emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Hot emissions.', message_level=2) df_accum = pd.concat([df_accum, self.compact_hot_expanded(self.calculate_hot())]).groupby( ['tstep', 'Link_ID']).sum() if self.do_cold: - self.__logger.write_log('\t\tCalculating Cold emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Cold emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_cold(self.calculate_hot())]).groupby( ['tstep', 'Link_ID']).sum() else: if self.do_hot or self.do_cold: - self.__logger.write_log('\t\tCalculating Hot emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Hot emissions.', message_level=2) hot_emis = self.calculate_hot() if self.do_hot: - self.__logger.write_log('\t\tCompacting Hot emissions.', message_level=2) + self.logger.write_log('\t\tCompacting Hot emissions.', message_level=2) df_accum = pd.concat([df_accum, self.compact_hot_expanded(hot_emis.copy())]).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_cold: - self.__logger.write_log('\t\tCalculating Cold emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Cold emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_cold(hot_emis)]).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) @@ -1276,20 +1276,20 @@ class TrafficSector(Sector): libc.malloc_trim(0) if self.do_tyre_wear: - self.__logger.write_log('\t\tCalculating Tyre wear emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Tyre wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_tyre_wear()], sort=False).groupby(['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_brake_wear: - self.__logger.write_log('\t\tCalculating Brake wear emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Brake wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_brake_wear()], sort=False).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_road_wear: - self.__logger.write_log('\t\tCalculating Road wear emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Road wear emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_road_wear()], sort=False).groupby(['tstep', 'Link_ID']).sum() libc.malloc_trim(0) if self.do_resuspension: - self.__logger.write_log('\t\tCalculating Resuspension emissions.', message_level=2) + self.logger.write_log('\t\tCalculating Resuspension emissions.', message_level=2) df_accum = pd.concat([df_accum, self.calculate_resuspension()], sort=False).groupby( ['tstep', 'Link_ID']).sum() libc.malloc_trim(0) @@ -1301,12 +1301,12 @@ class TrafficSector(Sector): if self.write_rline: self.write_rline_output(df_accum.copy()) - self.__logger.write_log('\t\tRoad link emissions to grid.', message_level=2) + self.logger.write_log('\t\tRoad link emissions to grid.', message_level=2) df_accum = self.links_to_grid(df_accum) libc.malloc_trim(0) - self.__logger.write_log('\tRoad traffic emissions calculated', message_level=2) - self.__logger.write_time_log('TrafficSector', 'calculate_emissions', timeit.default_timer() - spent_time) + self.logger.write_log('\tRoad traffic emissions calculated', message_level=2) + self.logger.write_time_log('TrafficSector', 'calculate_emissions', timeit.default_timer() - spent_time) return df_accum def links_to_grid(self, link_emissions): @@ -1348,14 +1348,14 @@ class TrafficSector(Sector): link_grid = pd.DataFrame({'Link_ID': link_id_list, 'FID': fid_list, 'length': length_list}) # Writing link to grid file - data = self.__comm.gather(link_grid, root=0) - if self.__comm.Get_rank() == 0: + data = self.comm.gather(link_grid, root=0) + if self.comm.Get_rank() == 0: if not os.path.exists(os.path.dirname(self.link_to_grid_csv)): os.makedirs(os.path.dirname(self.link_to_grid_csv)) data = pd.concat(data) data.to_csv(self.link_to_grid_csv) - self.__comm.Barrier() + self.comm.Barrier() else: link_grid = pd.read_csv(self.link_to_grid_csv) @@ -1376,7 +1376,7 @@ class TrafficSector(Sector): link_grid['layer'] = 0 link_grid = link_grid.groupby(['FID', 'layer', 'tstep']).sum() - self.__logger.write_time_log('TrafficSector', 'links_to_grid', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'links_to_grid', timeit.default_timer() - spent_time) return link_grid @@ -1392,8 +1392,8 @@ class TrafficSector(Sector): emissions.reset_index(inplace=True) - emissions_list = self.__comm.gather(emissions, root=0) - if self.__comm.Get_rank() == 0: + emissions_list = self.comm.gather(emissions, root=0) + if self.comm.Get_rank() == 0: emissions = pd.concat(emissions_list) p_list = list(emissions.columns.values) p_list.remove('tstep') @@ -1413,16 +1413,16 @@ class TrafficSector(Sector): out_df.to_csv(os.path.join(self.output_dir, 'rline_{1}_{0}.csv'.format( p, self.date_array[0].strftime('%Y%m%d'))), index=False) - self.__comm.Barrier() + self.comm.Barrier() - self.__logger.write_time_log('TrafficSector', 'write_rline_output', timeit.default_timer() - spent_time) + self.logger.write_time_log('TrafficSector', 'write_rline_output', timeit.default_timer() - spent_time) return True def write_rline_roadlinks(self, df_in): spent_time = timeit.default_timer() - df_in_list = self.__comm.gather(df_in, root=0) - if self.__comm.Get_rank() == 0: + df_in_list = self.comm.gather(df_in, root=0) + if self.comm.Get_rank() == 0: df_in = pd.concat(df_in_list) df_out = pd.DataFrame( @@ -1482,7 +1482,7 @@ class TrafficSector(Sector): df_out.set_index('Link_ID', inplace=True) df_out.sort_index(inplace=True) df_out.to_csv(os.path.join(self.output_dir, 'roads.txt'), index=False, sep=' ') - self.__comm.Barrier() - self.__logger.write_log('\t\tTraffic emissions calculated', message_level=2) - self.__logger.write_time_log('TrafficSector', 'write_rline_roadlinks', timeit.default_timer() - spent_time) + self.comm.Barrier() + self.logger.write_log('\t\tTraffic emissions calculated', message_level=2) + self.logger.write_time_log('TrafficSector', 'write_rline_roadlinks', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/cmaq_writer.py b/hermesv3_bu/writer/cmaq_writer.py index 8d86acd..53c5bec 100755 --- a/hermesv3_bu/writer/cmaq_writer.py +++ b/hermesv3_bu/writer/cmaq_writer.py @@ -79,7 +79,7 @@ class CmaqWriter(Writer): self.global_attributes = self.create_global_attributes(global_attributes_path) self.pollutant_info = self.change_pollutant_attributes() - self.__logger.write_time_log('CmaqWriter', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -96,7 +96,7 @@ class CmaqWriter(Writer): # From mol/h g/h to mol/s g/s emissions = emissions / 3600.0 - self.__logger.write_time_log('CmaqWriter', 'unit_change', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'unit_change', timeit.default_timer() - spent_time) return emissions def change_pollutant_attributes(self): @@ -123,7 +123,7 @@ class CmaqWriter(Writer): new_pollutant_info.loc[i, 'long_name'] = "{:<16}".format(pollutant) new_pollutant_info.set_index('pollutant', inplace=True) - self.__logger.write_time_log('CmaqWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) return new_pollutant_info def create_tflag(self): @@ -144,7 +144,7 @@ class CmaqWriter(Writer): t_flag[i_d, i_p, 0] = y_d t_flag[i_d, i_p, 1] = hms - self.__logger.write_time_log('CmaqWriter', 'create_tflag', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'create_tflag', timeit.default_timer() - spent_time) return t_flag def str_var_list(self): @@ -160,7 +160,7 @@ class CmaqWriter(Writer): for var in list(self.pollutant_info.index): str_var_list += "{:<16}".format(var) - self.__logger.write_time_log('CmaqWriter', 'str_var_list', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'str_var_list', timeit.default_timer() - spent_time) return str_var_list def read_global_attributes(self, global_attributes_path): @@ -199,13 +199,13 @@ class CmaqWriter(Writer): atts_dict[att] = np.array(df.loc[df['attribute'] == att, 'value'].item().split(), dtype=np.float32) except ValueError: - self.__logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + + self.logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + " Using default value '{0}'".format(atts_dict[att])) if self.comm_write.Get_rank() == 0: warn('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( att, atts_dict[att])) - self.__logger.write_time_log('CmaqWriter', 'read_global_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'read_global_attributes', timeit.default_timer() - spent_time) return atts_dict def create_global_attributes(self, global_attributes_path): @@ -256,7 +256,7 @@ class CmaqWriter(Writer): global_attributes['XCELL'] = np.float(self.grid.attributes['inc_x']) global_attributes['YCELL'] = np.float(self.grid.attributes['inc_y']) - self.__logger.write_time_log('CmaqWriter', 'create_global_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('CmaqWriter', 'create_global_attributes', timeit.default_timer() - spent_time) return global_attributes def write_netcdf(self, emissions): @@ -276,7 +276,7 @@ class CmaqWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ===== DIMENSIONS ===== - self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('TSTEP', len(self.date_array)) netcdf.createDimension('DATE-TIME', 2) netcdf.createDimension('LAY', len(self.grid.vertical_desctiption)) @@ -285,7 +285,7 @@ class CmaqWriter(Writer): netcdf.createDimension('COL', self.grid.center_longitudes.shape[1]) # ========== VARIABLES ========== - self.__logger.write_log('\tCreating NetCDF variables', message_level=2) + self.logger.write_log('\tCreating NetCDF variables', message_level=2) tflag = netcdf.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME',)) tflag.setncatts({'units': "{:<16}".format(''), 'long_name': "{:<16}".format('TFLAG'), 'var_desc': "{:<80}".format('Timestep-valid flags: (1) YYYYDDD or (2) HHMMSS')}) @@ -294,7 +294,7 @@ class CmaqWriter(Writer): # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: var = netcdf.createVariable(var_name, np.float64, ('TSTEP', 'LAY', 'ROW', 'COL',)) @@ -315,13 +315,13 @@ class CmaqWriter(Writer): var.var_desc = self.pollutant_info.loc[var_name, 'var_desc'] # ========== METADATA ========== - self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.logger.write_log('\tCreating NetCDF metadata', message_level=2) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, self.global_attributes[attribute]) netcdf.close() - self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.__logger.write_time_log('CmaqWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.logger.write_time_log('CmaqWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/default_writer.py b/hermesv3_bu/writer/default_writer.py index 2f1ebb6..b25d1ae 100755 --- a/hermesv3_bu/writer/default_writer.py +++ b/hermesv3_bu/writer/default_writer.py @@ -62,7 +62,7 @@ class DefaultWriter(Writer): super(DefaultWriter, self).__init__(comm_world, comm_write, logger, netcdf_path, grid, date_array, pollutant_info, rank_distribution, emission_summary) - self.__logger.write_time_log('DefaultWriter', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('DefaultWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -74,7 +74,7 @@ class DefaultWriter(Writer): :return: Same emissions as input :rtype: DataFrame """ - self.__logger.write_time_log('DefaultWriter', 'unit_change', 0.0) + self.logger.write_time_log('DefaultWriter', 'unit_change', 0.0) return emissions @@ -96,7 +96,7 @@ class DefaultWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ========== DIMENSIONS ========== - self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) if self.grid.grid_type == 'Regular Lat-Lon': netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) netcdf.createDimension('lon', self.grid.center_longitudes.shape[0]) @@ -124,8 +124,8 @@ class DefaultWriter(Writer): netcdf.createDimension('time', len(self.date_array)) # ========== VARIABLES ========== - self.__logger.write_log('\tCreating NetCDF variables', message_level=2) - self.__logger.write_log('\t\tCreating time variable', message_level=3) + self.logger.write_log('\tCreating NetCDF variables', message_level=2) + self.logger.write_log('\t\tCreating time variable', message_level=3) time_var = netcdf.createVariable('time', np.float64, ('time',)) time_var.units = 'hours since {0}'.format(self.date_array[0].strftime("%Y-%m-%d %H:%M:%S")) @@ -134,13 +134,13 @@ class DefaultWriter(Writer): time_var.long_name = "time" time_var[:] = date2num(self.date_array, time_var.units, calendar=time_var.calendar) - self.__logger.write_log('\t\tCreating lev variable', message_level=3) + self.logger.write_log('\t\tCreating lev variable', message_level=3) lev = netcdf.createVariable('lev', np.float64, ('lev',)) lev.units = Unit("m").symbol lev.positive = 'up' lev[:] = self.grid.vertical_desctiption - self.__logger.write_log('\t\tCreating lat variable', message_level=3) + self.logger.write_log('\t\tCreating lat variable', message_level=3) lats = netcdf.createVariable('lat', np.float64, lat_dim) lats.units = "degrees_north" lats.axis = "Y" @@ -151,7 +151,7 @@ class DefaultWriter(Writer): lat_bnds = netcdf.createVariable('lat_bnds', np.float64, lat_dim + ('nv',)) lat_bnds[:] = self.grid.boundary_latitudes - self.__logger.write_log('\t\tCreating lon variable', message_level=3) + self.logger.write_log('\t\tCreating lon variable', message_level=3) lons = netcdf.createVariable('lon', np.float64, lon_dim) lons.units = "degrees_east" lons.axis = "X" @@ -163,14 +163,14 @@ class DefaultWriter(Writer): lon_bnds[:] = self.grid.boundary_longitudes if self.grid.grid_type in ['Lambert Conformal Conic', 'Mercator']: - self.__logger.write_log('\t\tCreating x variable', message_level=3) + self.logger.write_log('\t\tCreating x variable', message_level=3) x_var = netcdf.createVariable('x', np.float64, ('x',)) x_var.units = Unit("km").symbol x_var.long_name = "x coordinate of projection" x_var.standard_name = "projection_x_coordinate" x_var[:] = self.grid.x - self.__logger.write_log('\t\tCreating y variable', message_level=3) + self.logger.write_log('\t\tCreating y variable', message_level=3) y_var = netcdf.createVariable('y', np.float64, ('y',)) y_var.units = Unit("km").symbol y_var.long_name = "y coordinate of projection" @@ -178,7 +178,7 @@ class DefaultWriter(Writer): y_var[:] = self.grid.y elif self.grid.grid_type == 'Rotated': - self.__logger.write_log('\t\tCreating rlat variable', message_level=3) + self.logger.write_log('\t\tCreating rlat variable', message_level=3) rlat = netcdf.createVariable('rlat', np.float64, ('rlat',)) rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol @@ -186,7 +186,7 @@ class DefaultWriter(Writer): rlat[:] = self.grid.rlat # Rotated Longitude - self.__logger.write_log('\t\tCreating rlon variable', message_level=3) + self.logger.write_log('\t\tCreating rlon variable', message_level=3) rlon = netcdf.createVariable('rlon', np.float64, ('rlon',)) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol @@ -197,7 +197,7 @@ class DefaultWriter(Writer): # if 'Unnamed: 0' in emissions.columns.values: # emissions.drop(columns=['Unnamed: 0'], inplace=True) for var_name in emissions.columns.values: - self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: if CHUNK: var = netcdf.createVariable(var_name, np.float64, ('time', 'lev',) + var_dim, @@ -230,9 +230,9 @@ class DefaultWriter(Writer): var.grid_mapping = 'mercator' # ========== METADATA ========== - self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.logger.write_log('\tCreating NetCDF metadata', message_level=2) - self.__logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) + self.logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) if self.grid.grid_type == 'Regular Lat-Lon': mapping = netcdf.createVariable('Latitude_Longitude', 'i') @@ -262,7 +262,7 @@ class DefaultWriter(Writer): netcdf.setncattr('Conventions', 'CF-1.6') self.comm_write.Barrier() netcdf.close() - self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.__logger.write_time_log('DefaultWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.logger.write_time_log('DefaultWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/monarch_writer.py b/hermesv3_bu/writer/monarch_writer.py index 4b0d4be..3e43f0d 100755 --- a/hermesv3_bu/writer/monarch_writer.py +++ b/hermesv3_bu/writer/monarch_writer.py @@ -71,7 +71,7 @@ class MonarchWriter(Writer): error_exit("'{0}' unit is not supported for CMAQ emission ".format(variable.get('units')) + "input file. Set mol.s-1.m-2 or kg.s-1.m-2 in the speciation_map file.") - self.__logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -100,7 +100,7 @@ class MonarchWriter(Writer): if info.get('units') == "kg.s-1.m-2": # From g.s-1.m-2 to kg.s-1.m-2 emissions[[pollutant]] = emissions[[pollutant]].div(10**3) - self.__logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('MonarchWriter', '__init__', timeit.default_timer() - spent_time) return emissions @@ -121,7 +121,7 @@ class MonarchWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ========== DIMENSIONS ========== - self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('rlat', len(self.grid.rlat)) netcdf.createDimension('rlon', len(self.grid.rlon)) @@ -134,8 +134,8 @@ class MonarchWriter(Writer): netcdf.createDimension('time', len(self.date_array)) # ========== VARIABLES ========== - self.__logger.write_log('\tCreating NetCDF variables', message_level=2) - self.__logger.write_log('\t\tCreating time variable', message_level=3) + self.logger.write_log('\tCreating NetCDF variables', message_level=2) + self.logger.write_log('\t\tCreating time variable', message_level=3) time = netcdf.createVariable('time', np.float64, ('time',)) time.units = 'hours since {0}'.format(self.date_array[0].strftime("%Y-%m-%d %H:%M:%S")) @@ -144,13 +144,13 @@ class MonarchWriter(Writer): time.long_name = "time" time[:] = date2num(self.date_array, time.units, calendar=time.calendar) - self.__logger.write_log('\t\tCreating lev variable', message_level=3) + self.logger.write_log('\t\tCreating lev variable', message_level=3) lev = netcdf.createVariable('lev', np.float64, ('lev',)) lev.units = Unit("m").symbol lev.positive = 'up' lev[:] = self.grid.vertical_desctiption - self.__logger.write_log('\t\tCreating lat variable', message_level=3) + self.logger.write_log('\t\tCreating lat variable', message_level=3) lats = netcdf.createVariable('lat', np.float64, lat_dim) lats.units = "degrees_north" lats.axis = "Y" @@ -161,7 +161,7 @@ class MonarchWriter(Writer): lat_bnds = netcdf.createVariable('lat_bnds', np.float64, lat_dim + ('nv',)) lat_bnds[:] = self.grid.boundary_latitudes - self.__logger.write_log('\t\tCreating lon variable', message_level=3) + self.logger.write_log('\t\tCreating lon variable', message_level=3) lons = netcdf.createVariable('lon', np.float64, lon_dim) lons.units = "degrees_east" lons.axis = "X" @@ -172,7 +172,7 @@ class MonarchWriter(Writer): lon_bnds = netcdf.createVariable('lon_bnds', np.float64, lon_dim + ('nv',)) lon_bnds[:] = self.grid.boundary_longitudes - self.__logger.write_log('\t\tCreating rlat variable', message_level=3) + self.logger.write_log('\t\tCreating rlat variable', message_level=3) rlat = netcdf.createVariable('rlat', np.float64, ('rlat',)) rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol @@ -180,7 +180,7 @@ class MonarchWriter(Writer): rlat[:] = self.grid.rlat # Rotated Longitude - self.__logger.write_log('\t\tCreating rlon variable', message_level=3) + self.logger.write_log('\t\tCreating rlon variable', message_level=3) rlon = netcdf.createVariable('rlon', np.float64, ('rlon',)) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol @@ -189,7 +189,7 @@ class MonarchWriter(Writer): # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) # var = netcdf.createVariable(var_name, np.float64, ('time', 'lev',) + var_dim, # chunksizes=self.rank_distribution[0]['shape']) @@ -215,9 +215,9 @@ class MonarchWriter(Writer): var.grid_mapping = 'rotated_pole' # ========== METADATA ========== - self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.logger.write_log('\tCreating NetCDF metadata', message_level=2) - self.__logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) + self.logger.write_log('\t\tCreating Coordinate Reference System metadata', message_level=3) mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' @@ -226,7 +226,7 @@ class MonarchWriter(Writer): netcdf.setncattr('Conventions', 'CF-1.6') netcdf.close() - self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.__logger.write_time_log('MonarchWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.logger.write_time_log('MonarchWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/wrfchem_writer.py b/hermesv3_bu/writer/wrfchem_writer.py index 55d2e1c..a1fd289 100755 --- a/hermesv3_bu/writer/wrfchem_writer.py +++ b/hermesv3_bu/writer/wrfchem_writer.py @@ -88,7 +88,7 @@ class WrfChemWriter(Writer): self.global_attributes = self.create_global_attributes(global_attributes_path) self.pollutant_info = self.change_pollutant_attributes() - self.__logger.write_time_log('WrfChemWriter', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('WrfChemWriter', '__init__', timeit.default_timer() - spent_time) def unit_change(self, emissions): """ @@ -122,7 +122,7 @@ class WrfChemWriter(Writer): # From mol/m2.h to mol/km2.h emissions[[pollutant]] = emissions[[pollutant]].mul(10**6) - self.__logger.write_time_log('WrfChemWriter', 'unit_change', timeit.default_timer() - spent_time) + self.logger.write_time_log('WrfChemWriter', 'unit_change', timeit.default_timer() - spent_time) return emissions def change_pollutant_attributes(self): @@ -156,7 +156,7 @@ class WrfChemWriter(Writer): new_pollutant_info.loc[i, 'coordinates'] = "XLONG XLAT" new_pollutant_info.set_index('pollutant', inplace=True) - self.__logger.write_time_log('WrfChemWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('WrfChemWriter', 'change_pollutant_attributes', timeit.default_timer() - spent_time) return new_pollutant_info def read_global_attributes(self, global_attributes_path): @@ -239,13 +239,13 @@ class WrfChemWriter(Writer): atts_dict[att] = str(df.loc[df['attribute'] == att, 'value'].item()) except ValueError: - self.__logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + + self.logger.write_log("WARNING: The global attribute {0} is not defined;".format(att) + " Using default value '{0}'".format(atts_dict[att])) if self.comm_write.Get_rank() == 0: warn('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( att, atts_dict[att])) - self.__logger.write_time_log('WrfChemWriter', 'read_global_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('WrfChemWriter', 'read_global_attributes', timeit.default_timer() - spent_time) return atts_dict def create_global_attributes(self, global_attributes_path): @@ -299,7 +299,7 @@ class WrfChemWriter(Writer): global_attributes['MOAD_CEN_LAT'] = np.float32(self.grid.attributes['lat_ts']) global_attributes['STAND_LON'] = np.float32(self.grid.attributes['lon_0']) - self.__logger.write_time_log('WrfChemWriter', 'create_global_attributes', timeit.default_timer() - spent_time) + self.logger.write_time_log('WrfChemWriter', 'create_global_attributes', timeit.default_timer() - spent_time) return global_attributes def create_times_var(self): @@ -332,7 +332,7 @@ class WrfChemWriter(Writer): netcdf = Dataset(self.netcdf_path, format="NETCDF4", mode='w') # ===== DIMENSIONS ===== - self.__logger.write_log('\tCreating NetCDF dimensions', message_level=2) + self.logger.write_log('\tCreating NetCDF dimensions', message_level=2) netcdf.createDimension('Time', len(self.date_array)) netcdf.createDimension('DateStrLen', 19) @@ -341,13 +341,13 @@ class WrfChemWriter(Writer): netcdf.createDimension('emissions_zdim', len(self.grid.vertical_desctiption)) # ========== VARIABLES ========== - self.__logger.write_log('\tCreating NetCDF variables', message_level=2) + self.logger.write_log('\tCreating NetCDF variables', message_level=2) times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen',)) times[:] = self.create_times_var() # ========== POLLUTANTS ========== for var_name in emissions.columns.values: - self.__logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) + self.logger.write_log('\t\tCreating {0} variable'.format(var_name), message_level=3) if self.comm_write.Get_size() > 1: var = netcdf.createVariable(var_name, np.float64, @@ -373,13 +373,13 @@ class WrfChemWriter(Writer): var.coordinates = self.pollutant_info.loc[var_name, 'coordinates'] # ========== METADATA ========== - self.__logger.write_log('\tCreating NetCDF metadata', message_level=2) + self.logger.write_log('\tCreating NetCDF metadata', message_level=2) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, self.global_attributes[attribute]) netcdf.close() - self.__logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) - self.__logger.write_time_log('WrfChemWriter', 'write_netcdf', timeit.default_timer() - spent_time) + self.logger.write_log('NetCDF write at {0}'.format(self.netcdf_path)) + self.logger.write_time_log('WrfChemWriter', 'write_netcdf', timeit.default_timer() - spent_time) return True diff --git a/hermesv3_bu/writer/writer.py b/hermesv3_bu/writer/writer.py index 728bc62..3d5d284 100755 --- a/hermesv3_bu/writer/writer.py +++ b/hermesv3_bu/writer/writer.py @@ -273,7 +273,7 @@ class Writer(object): self.comm_world = comm_world self.comm_write = comm_write - self.__logger = logger + self.logger = logger self.netcdf_path = netcdf_path self.grid = grid self.date_array = date_array @@ -290,7 +290,7 @@ class Writer(object): else: self.emission_summary_paths = None - self.__logger.write_time_log('Writer', '__init__', timeit.default_timer() - spent_time) + self.logger.write_time_log('Writer', '__init__', timeit.default_timer() - spent_time) def gather_emissions(self, emissions): """ @@ -307,27 +307,27 @@ class Writer(object): """ spent_time = timeit.default_timer() # Sending - self.__logger.write_log('Sending emissions to the writing processors.', message_level=2) + self.logger.write_log('Sending emissions to the writing processors.', message_level=2) requests = [] for w_rank, info in self.rank_distribution.items(): partial_emis = emissions.loc[(emissions.index.get_level_values(0) >= info['fid_min']) & (emissions.index.get_level_values(0) < info['fid_max'])] - self.__logger.write_log('\tFrom {0} sending {1} to {2}'.format( + self.logger.write_log('\tFrom {0} sending {1} to {2}'.format( self.comm_world.Get_rank(), sys.getsizeof(partial_emis), w_rank), message_level=3) # requests.append(self.comm_world.isend(sys.getsizeof(partial_emis), dest=w_rank, # tag=self.comm_world.Get_rank() + MPI_TAG_CONSTANT)) requests.append(self.comm_world.isend(partial_emis, dest=w_rank, tag=self.comm_world.Get_rank())) # Receiving - self.__logger.write_log('Receiving emissions in the writing processors.', message_level=2) + self.logger.write_log('Receiving emissions in the writing processors.', message_level=2) if self.comm_world.Get_rank() in self.rank_distribution.keys(): - self.__logger.write_log("I'm a writing processor.", message_level=3) + self.logger.write_log("I'm a writing processor.", message_level=3) data_list = [] - self.__logger.write_log("Prepared to receive", message_level=3) + self.logger.write_log("Prepared to receive", message_level=3) for i_rank in range(self.comm_world.Get_size()): - self.__logger.write_log( + self.logger.write_log( '\tFrom {0} to {1}'.format(i_rank, self.comm_world.Get_rank()), message_level=3) req = self.comm_world.irecv(2**27, source=i_rank, tag=i_rank) dataframe = req.wait() @@ -341,12 +341,12 @@ class Writer(object): else: new_emissions = None self.comm_world.Barrier() - self.__logger.write_log('All emissions received.', message_level=2) + self.logger.write_log('All emissions received.', message_level=2) if self.emission_summary and self.comm_world.Get_rank() in self.rank_distribution.keys(): self.make_summary(new_emissions) - self.__logger.write_time_log('Writer', 'gather_emissions', timeit.default_timer() - spent_time) + self.logger.write_time_log('Writer', 'gather_emissions', timeit.default_timer() - spent_time) return new_emissions @@ -369,7 +369,7 @@ class Writer(object): for (layer, tstep), aux_df in dataframe.groupby(['layer', 'tstep']): data[tstep, layer, aux_df['FID']] = aux_df[var_name] - self.__logger.write_time_log('Writer', 'dataframe_to_array', timeit.default_timer() - spent_time) + self.logger.write_time_log('Writer', 'dataframe_to_array', timeit.default_timer() - spent_time) return data.reshape(shape) @@ -390,7 +390,7 @@ class Writer(object): self.write_netcdf(emissions) self.comm_world.Barrier() - self.__logger.write_time_log('Writer', 'write', timeit.default_timer() - spent_time) + self.logger.write_time_log('Writer', 'write', timeit.default_timer() - spent_time) return True @@ -438,4 +438,4 @@ class Writer(object): summary.groupby('tstep').sum().to_csv(self.emission_summary_paths['hourly_summary_path']) summary.drop(columns=['tstep'], inplace=True) pd.DataFrame(summary.sum()).to_csv(self.emission_summary_paths['total_summary_path']) - self.__logger.write_time_log('Writer', 'make_summary', timeit.default_timer() - spent_time) + self.logger.write_time_log('Writer', 'make_summary', timeit.default_timer() - spent_time) -- GitLab