From 9e0f476f746a2c707aa713039c0f1ee513f75838 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Wed, 5 Sep 2018 17:28:14 +0200 Subject: [PATCH 01/51] starting package --- setup.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 45941a9..4a3f403 100644 --- a/setup.py +++ b/setup.py @@ -10,15 +10,20 @@ here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'VERSION')) as f: version = f.read().strip() +with open("README.md", "r") as fh: + long_description = fh.read() + setup( name='hermesv3_gr', license='GNU GPL v3', # platforms=['GNU/Linux Debian'], version=version, description='HERMESv3 Global/Regional', + long_description=long_description, + long_description_content_type="text/markdown", author='Carles Tena Medina', author_email='carles.tena@bsc.es', - # url='http://www.bsc.es/projects/earthscience/autosubmit/', + url='https://earth.bsc.es/gitlab/es/hermesv3_gr', # download_url='https://earth.bsc.es/wiki/doku.php?id=tools:autosubmit', keywords=['emissions', 'cmaq', 'monarch', 'wrf-chem', 'atmospheric composition', 'air quality', 'earth science'], @@ -30,8 +35,23 @@ setup( 'pandas', 'geopandas', 'pyproj', + 'configargparse', + 'cf_units>=1.1.3', + 'calendar', + 'ESMPy', + 'holidays', + 'pytz', + 're', + 'timezonefinder', + 'unittest' ], packages=find_packages(), + classifiers=[ + "Programming Language :: Python :: 2.7", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Operating System :: OS Independent", + "Topic :: Scientific/Engineering :: Atmospheric Science" + ], include_package_data=True, # package_data={'hermes_v3': [ # 'autosubmit/config/files/autosubmit.conf', -- GitLab From 33c76814d0693a42142d7a816c8e285a4bb367a6 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 6 Sep 2018 18:07:01 +0200 Subject: [PATCH 02/51] Added all the scripts to the new project --- conf/EI_configuration.csv | 17 + conf/hermes.conf | 112 + data/global_attributes.csv | 44 + data/profiles/speciation/MolecularWeights.csv | 63 + .../Speciation_profile_cb05_aero5_CMAQ.csv | 25 + ...on_profile_cb05_aero5_MONARCH_aerosols.csv | 13 + ...on_profile_cb05_aero5_MONARCH_fullchem.csv | 25 + ...tion_profile_radm2_madesorgam_WRF_CHEM.csv | 28 + .../temporal/TemporalProfile_Daily.csv | 7 + .../temporal/TemporalProfile_Hourly.csv | 12 + .../temporal/TemporalProfile_Monthly.csv | 12 + .../temporal/tz_world_country_iso3166.csv | 423 ++ .../vertical/1layer_vertical_description.csv | 2 + .../CMAQ_15layers_vertical_description.csv | 16 + ...H_Global_48layers_vertical_description.csv | 49 + ...regional_48layers_vertical_description.csv | 49 + data/profiles/vertical/Vertical_profile.csv | 7 + hermesv3_gr/config/__init__.py | 0 hermesv3_gr/config/config.py | 275 ++ hermesv3_gr/config/settings.py | 153 + hermesv3_gr/hermes.py | 154 + hermesv3_gr/modules/__init__.py | 0 .../modules/emision_inventories/__init__.py | 0 .../emision_inventories/emission_inventory.py | 522 +++ .../gfas_emission_inventory.py | 326 ++ .../point_source_emission_inventory.py | 205 + hermesv3_gr/modules/grids/__init__.py | 0 hermesv3_gr/modules/grids/grid.py | 556 +++ hermesv3_gr/modules/grids/grid_global.py | 147 + hermesv3_gr/modules/grids/grid_lcc.py | 221 + hermesv3_gr/modules/grids/grid_mercator.py | 203 + hermesv3_gr/modules/grids/grid_rotated.py | 235 + hermesv3_gr/modules/masking/__init__.py | 0 hermesv3_gr/modules/masking/masking.py | 263 ++ hermesv3_gr/modules/regrid/__init__.py | 0 hermesv3_gr/modules/regrid/regrid.py | 281 ++ .../modules/regrid/regrid_conservative.py | 353 ++ hermesv3_gr/modules/speciation/__init__.py | 0 hermesv3_gr/modules/speciation/speciation.py | 232 + hermesv3_gr/modules/temporal/__init__.py | 0 hermesv3_gr/modules/temporal/temporal.py | 1017 +++++ hermesv3_gr/modules/vertical/__init__.py | 0 hermesv3_gr/modules/vertical/vertical.py | 219 + hermesv3_gr/modules/vertical/vertical_gfas.py | 181 + hermesv3_gr/modules/writing/__init__.py | 0 hermesv3_gr/modules/writing/writer.py | 227 + hermesv3_gr/modules/writing/writer_cmaq.py | 639 +++ hermesv3_gr/modules/writing/writer_monarch.py | 751 ++++ .../modules/writing/writer_wrf_chem.py | 444 ++ hermesv3_gr/tools/__init__.py | 0 hermesv3_gr/tools/coordinates_tools.py | 489 +++ hermesv3_gr/tools/custom_calendar.py | 104 + hermesv3_gr/tools/lcc_LatLon_to_m.py | 22 + hermesv3_gr/tools/netcdf_tools.py | 3778 +++++++++++++++++ preproc/ceds_preproc.py | 236 + preproc/eclipsev5a_preproc.py | 335 ++ preproc/edgarv432_ap_preproc.py | 326 ++ preproc/edgarv432_voc_preproc.py | 311 ++ preproc/emep_preproc.py | 141 + preproc/gfas12_preproc.py | 213 + preproc/htapv2_preproc.py | 523 +++ preproc/tno_mac_iii_preproc.py | 248 ++ preproc/tno_mac_iii_preproc_voc_ratios.py | 473 +++ preproc/wiedinmyer_preproc.py | 235 + setup.py | 26 +- tests/__init__.py | 0 tests/unit/__init__.py | 0 tests/unit/test_temporal.py | 427 ++ 68 files changed, 16391 insertions(+), 4 deletions(-) create mode 100644 conf/EI_configuration.csv create mode 100644 conf/hermes.conf create mode 100644 data/global_attributes.csv create mode 100644 data/profiles/speciation/MolecularWeights.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv create mode 100644 data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv create mode 100644 data/profiles/temporal/TemporalProfile_Daily.csv create mode 100644 data/profiles/temporal/TemporalProfile_Hourly.csv create mode 100644 data/profiles/temporal/TemporalProfile_Monthly.csv create mode 100644 data/profiles/temporal/tz_world_country_iso3166.csv create mode 100644 data/profiles/vertical/1layer_vertical_description.csv create mode 100644 data/profiles/vertical/CMAQ_15layers_vertical_description.csv create mode 100644 data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv create mode 100644 data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv create mode 100644 data/profiles/vertical/Vertical_profile.csv create mode 100644 hermesv3_gr/config/__init__.py create mode 100644 hermesv3_gr/config/config.py create mode 100644 hermesv3_gr/config/settings.py create mode 100755 hermesv3_gr/hermes.py create mode 100644 hermesv3_gr/modules/__init__.py create mode 100644 hermesv3_gr/modules/emision_inventories/__init__.py create mode 100644 hermesv3_gr/modules/emision_inventories/emission_inventory.py create mode 100755 hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py create mode 100755 hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py create mode 100644 hermesv3_gr/modules/grids/__init__.py create mode 100644 hermesv3_gr/modules/grids/grid.py create mode 100644 hermesv3_gr/modules/grids/grid_global.py create mode 100644 hermesv3_gr/modules/grids/grid_lcc.py create mode 100644 hermesv3_gr/modules/grids/grid_mercator.py create mode 100644 hermesv3_gr/modules/grids/grid_rotated.py create mode 100644 hermesv3_gr/modules/masking/__init__.py create mode 100644 hermesv3_gr/modules/masking/masking.py create mode 100644 hermesv3_gr/modules/regrid/__init__.py create mode 100644 hermesv3_gr/modules/regrid/regrid.py create mode 100644 hermesv3_gr/modules/regrid/regrid_conservative.py create mode 100644 hermesv3_gr/modules/speciation/__init__.py create mode 100644 hermesv3_gr/modules/speciation/speciation.py create mode 100644 hermesv3_gr/modules/temporal/__init__.py create mode 100644 hermesv3_gr/modules/temporal/temporal.py create mode 100644 hermesv3_gr/modules/vertical/__init__.py create mode 100644 hermesv3_gr/modules/vertical/vertical.py create mode 100644 hermesv3_gr/modules/vertical/vertical_gfas.py create mode 100644 hermesv3_gr/modules/writing/__init__.py create mode 100644 hermesv3_gr/modules/writing/writer.py create mode 100644 hermesv3_gr/modules/writing/writer_cmaq.py create mode 100644 hermesv3_gr/modules/writing/writer_monarch.py create mode 100644 hermesv3_gr/modules/writing/writer_wrf_chem.py create mode 100644 hermesv3_gr/tools/__init__.py create mode 100644 hermesv3_gr/tools/coordinates_tools.py create mode 100644 hermesv3_gr/tools/custom_calendar.py create mode 100644 hermesv3_gr/tools/lcc_LatLon_to_m.py create mode 100644 hermesv3_gr/tools/netcdf_tools.py create mode 100644 preproc/ceds_preproc.py create mode 100644 preproc/eclipsev5a_preproc.py create mode 100755 preproc/edgarv432_ap_preproc.py create mode 100755 preproc/edgarv432_voc_preproc.py create mode 100644 preproc/emep_preproc.py create mode 100755 preproc/gfas12_preproc.py create mode 100755 preproc/htapv2_preproc.py create mode 100644 preproc/tno_mac_iii_preproc.py create mode 100644 preproc/tno_mac_iii_preproc_voc_ratios.py create mode 100755 preproc/wiedinmyer_preproc.py create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/test_temporal.py diff --git a/conf/EI_configuration.csv b/conf/EI_configuration.csv new file mode 100644 index 0000000..d366331 --- /dev/null +++ b/conf/EI_configuration.csv @@ -0,0 +1,17 @@ +ei;sector;ref_year;active;factor_mask;regrid_mask;pollutants;path;frequency;source_type;p_vertical;p_month;p_day;p_hour;p_speciation;comment +HTAPv2;energy;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V001;;D002;H002;E998;added 05/2017 +HTAPv2;industry;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V002;;D003;H004;E998;added 05/2017 +HTAPv2;residential;2010;1;;- FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 +HTAPv2;residential;2010;1;FRA 50;+ FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 +HTAPv2;transport;2010;1;;;so2;/jrc/htapv2/monthly_mean;monthly;area;;;D001;weekday=H001, saturday=H002, sunday=H003;E998;added 05/2017 +HTAPv2;air_lto;2010;1;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V003;M001;D001;H001;E998;added 05/2017 +HTAPv2;air_cds;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V004;M001;D001;H001;E998;added 05/2017 +HTAPv2;air_crs;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V005;M001;D001;H001;E998;added 05/2017 +HTAPv2;ships;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;;M001;D001;H001;E008;added 05/2017 +wiedinmyer;;2014;0;;;so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H001;E998;added 05/2017 +ECLIPSEv5a;flaring;2010;0;;;so2;/iiasa/eclipsev5a/yearly_mean;area;yearly;V006;M001;D001;H001;E998;added 11/2017 +GFASv12;;2015;1;;;so2,nox_no;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E997;added 05/2017 +ECLIPSEv5a;transport;2010;0;;+ CHN,IND;so2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H001;E998;added 11/2017 +ECLIPSEv5a;transport;2010;0;;;nox_no2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E999;added 11/2017 +CARN;;2015;1;;;so2;/mtu/carnetal/yearly_mean;yearly;point;;M001;D001;H001;E998;added ... +Maestra;;2015;0;;;nox_no2;/home/Earth/ctena/Models/HERMESv3/;yearly;point;;M001;D001;H001;E999;added ... diff --git a/conf/hermes.conf b/conf/hermes.conf new file mode 100644 index 0000000..36f5cc2 --- /dev/null +++ b/conf/hermes.conf @@ -0,0 +1,112 @@ +[GENERAL] +log_level = 3 +# input_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/IN +input_dir = /home/Earth/ctena/Models/HERMESv3/IN +# data_path = /gpfs/scratch/bsc32/bsc32538/HERMES_data +data_path = /esarchive/recon +#output_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/OUT +output_dir = /home/Earth/ctena/Models/HERMESv3/OUT +output_name = HERMES_paralel_.nc +start_date = 2014/09/02 00:00:00 +# ***** end_date = start_date [DEFAULT] ***** +# end_date = 2014/09/03 00:00:00 +# ***** output_timestep_type = [hourly, daily, monthly, yearly] ***** +output_timestep_type = hourly +output_timestep_num = 24 +output_timestep_freq = 1 + +[DOMAIN] + +# ***** output_model = [MONARCH, CMAQ, WRF_CHEM] ***** +# output_model = MONARCH +# output_model = CMAQ +output_model = WRF_CHEM +output_attributes = /home/Earth/ctena/Models/HERMESv3/IN/data/wrf_chem_global_attributes.csv + +# ***** domain_type=[global, lcc, rotated, mercator] ***** +domain_type = mercator +# vertical_description = /data/profiles/vertical/1layer_vertical_description.csv +# vertical_description = /data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv +vertical_description = /data/profiles/vertical/CMAQ_15layers_vertical_description.csv +auxiliar_files_path = /data/auxiliar_files/_ + +# if domain_type == global: + inc_lat = 0.5 + inc_lon = 0.703125 + #inc_lat = 1. + #inc_lon = 1.40625 + +# if domain_type == rotated: + centre_lat = 35 + centre_lon = 20 + west_boundary = -51 + south_boundary = -35 + inc_rlat = 0.1 + inc_rlon = 0.1 + +# if domain_type == lcc: + + # CALIOPE + lat_1 = 37 + lat_2 = 43 + lon_0 = -3 + lat_0 = 40 + + # CATALUNYA + #nx = 278 + #ny = 298 + #inc_x = 1000 + #inc_y = 1000 + #x_0 = 253151.59375 + #y_0 = 43862.90625 + + # EUROPA + nx = 478 + ny = 398 + inc_x = 12000 + inc_y = 12000 + x_0 = -2131849.000 + y_0 = -2073137.875 + + # IP + #nx = 397 + #ny = 397 + #inc_x = 4000 + #inc_y = 4000 + #x_0 = -807847.688 + #y_0 = -797137.125 + +# if domain_type == mercator: + + lat_ts = -2.84 + lon_0 = -79.16 + nx = 99 + ny = 81 + inc_x = 1000 + inc_y = 1000 + x_0 = -49500.13899057542 + y_0 = -355986.6927808438 + + +[EMISSION_INVENTORY_CONFIGURATION] +# cross_table = /conf/EI_configuration_Scalability.csv +# cross_table = /conf/EI_configuration_gridded.csv +# cross_table = /conf/EI_configuration_test.csv +# cross_table = /conf/EI_configuration_publi.csv +# cross_table = /conf/EI_configuration_EU_aerosol_gas.csv +cross_table = /conf/EI_configuration_WRF_CHEM_Rene.csv + +[EMISSION_INVENTORY_PROFILES] + +p_vertical = /data/profiles/vertical/Vertical_profile.csv +p_month = /data/profiles/temporal/TemporalProfile_Monthly.csv +p_day = /data/profiles/temporal/TemporalProfile_Daily.csv +p_hour = /data/profiles/temporal/TemporalProfile_Hourly.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_test.csv +p_speciation = /data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_test.csv + +molecular_weights = /data/profiles/speciation/MolecularWeights.csv +world_info = /data/profiles/temporal/tz_world_country_iso3166.csv diff --git a/data/global_attributes.csv b/data/global_attributes.csv new file mode 100644 index 0000000..77cd4a2 --- /dev/null +++ b/data/global_attributes.csv @@ -0,0 +1,44 @@ +attribute,value +BOTTOM-TOP_GRID_DIMENSION,35 +GRIDTYPE,C +DIFF_OPT,1 +KM_OPT,4 +DAMP_OPT,0 +DAMPCOEF,0.01 +KHDIF,0. +KVDIF,0. +MP_PHYSICS,4 +RA_LW_PHYSICS,1 +RA_SW_PHYSICS,2 +SF_SFCLAY_PHYSICS,1 +SF_SURFACE_PHYSICS,2 +BL_PBL_PHYSICS,1 +CU_PHYSICS,5 +SF_LAKE_PHYSICS,0 +SURFACE_INPUT_SOURCE,1 +SST_UPDATE,0 +GRID_FDDA,0 +GFDDA_INTERVAL_M,0 +GFDDA_END_H,0 +GRID_SFDDA,0 +SGFDDA_INTERVAL_M,0 +SGFDDA_END_H,0 +BOTTOM-TOP_PATCH_START_UNSTAG,1 +BOTTOM-TOP_PATCH_END_UNSTAG,34 +BOTTOM-TOP_PATCH_START_STAG,1 +BOTTOM-TOP_PATCH_END_STAG,35 +GRID_ID,1 +PARENT_ID,0 +I_PARENT_START,1 +J_PARENT_START,1 +PARENT_GRID_RATIO,1 +DT,5. +MMINLU,USGS +NUM_LAND_CAT,24 +ISWATER,16 +ISLAKE,-1 +ISICE,24 +ISURBAN,1 +ISOILWATER,14 +CEN_LAT,-2.840012 +CEN_LON,-79.16 \ No newline at end of file diff --git a/data/profiles/speciation/MolecularWeights.csv b/data/profiles/speciation/MolecularWeights.csv new file mode 100644 index 0000000..9e3472c --- /dev/null +++ b/data/profiles/speciation/MolecularWeights.csv @@ -0,0 +1,63 @@ +Specie;MW +nox_no;30.01 +nox_no2;46.01 +co;28.01 +so2;64.06 +nh3;17.03 +pm10;1.0 +pm25;1.0 +oc;1.0 +bc;1.0 +c2h6s;62.13 +hcl;36.46 +c2h2;26.04 +ch3cooh;60.05 +c2h4o;44.05 +c4h10;58.12 +c2h5oh;46.07 +ch2o;30.03 +c6h14;86.18 +hialkanes;118.89 +c5h8;68.12 +c8h16;112.21 +c5h10;70.13 +c3h6;42.08 +c7h8;92.14 +c8h10;106.17 +c3h6o;58.08 +c6h6;78.11 +c4h8;56.11 +c2h6;30.07 +c2h4;28.05 +c7h16;100.2 +c6h12;84.16 +hialkenes;75.78 +ch3oh;32.04 +c5h12;72.15 +c3h8;44.1 +terpenes;160.0 +voc01;46.2 +voc02;30.07 +voc03;44.1 +voc04;58.12 +voc05;72.15 +voc06;106.8 +voc07;28.05 +voc08;42.08 +voc09;26.04 +voc10;68.12 +voc11;136.24 +voc12;67 +voc13;78.11 +voc14;92.14 +voc15;106.17 +voc16;120.0 +voc17;126.8 +voc18;104.7 +voc19;81.5 +voc20;138.8 +voc21;30.03 +voc22;68.8 +voc23;75.3 +voc24;59.1 +voc25;86.9 diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv new file mode 100644 index 0000000..fef9d59 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv @@ -0,0 +1,25 @@ +ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;OLE;PAR;SESQ;TERP;TOL;XYL;DMS;HCL;SULF;POC;PEC;PNO3;PSO4;PMFINE;PMC +units;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1 +short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;terpene;toluene;xylene;dimethyl_sulfide;hydrogen_chloride;sulfuric_acid;primary_organic_carbon;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;primary_others_fine;pm_coarse +E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;terpenes;ch2o+c7h8;c8h10;c2h6s;0;0;oc;5.9*bc;0;0;3.3*pm25-3*oc-5.9*bc;0 +E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E005;0.823*nox_no2;0.16*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0; +E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.02*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 +E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.35*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 +E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.03*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 +E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;1*pm25;pm10-pm25 +E014;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0;0 +E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0.288*pm25;0.675*pm25;0;0.01*pm25;0.037*pm25;pm10-pm25 +E017;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*pm25;0.06*pm25;0;0;0.74*pm25;pm10-pm25 +E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 +E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 +E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv new file mode 100644 index 0000000..534beb6 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv @@ -0,0 +1,13 @@ +ID;SO2;DMS;POA;PEC;PSO4;PMFINE;PMC +units;mol.s-1.m-2;mol.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2 +short_description;sulfur_dioxide;dimethyl_sulfide;primary_organic_aerosol;primary_elemental_carbon;primary_sulfate_fine;primary_others_fine;pm_coarse +E001;so2;c2h6s;1.8*3*oc;5.9*bc;0;pm25-oc-bc;0 +E002;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E003;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E004;so2;0;1.8*oc;bc;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E005;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E006;0;0;0;0;0;0;0 +E007;so2;0;1.3*oc;bc;0;pm25-oc-bc;pm10-pm25 +E008;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E009;so2;0;1.44*oc;bc;0;pm25-oc-bc;pm10-pm25 +E010;0;0;0;0;0;pm25;pm10-pm25 \ No newline at end of file diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv new file mode 100644 index 0000000..dc1bd1c --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv @@ -0,0 +1,25 @@ +ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;OLE;PAR;SESQ;TERP;TOL;XYL;DMS;HCL;POA;PEC;PNO3;PSO4;PMFINE;PMC +units;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2 +short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;terpene;toluene;xylene;dimethyl_sulfide;hydrogen_chloride;primary_organic_carbon;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;primary_others_fine;pm_coarse +E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;terpenes;ch2o+c7h8;c8h10;c2h6s;0;3*oc;5.9*bc;0;0;3.3*pm25-3*oc-5.9*bc;0 +E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.8*oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E005;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;1.44*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.02*1.3*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 +E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.35*1.8*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 +E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.03*1.3*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 +E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;1*pm25;pm10-pm25 +E014;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0 +E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0.288*1.3*pm25;0.675*pm25;0;0.01*pm25;0.037*pm25;pm10-pm25 +E017;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*1.3*pm25;0.06*pm25;0;0;0.74*pm25;pm10-pm25 +E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 +E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 +E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*1.8*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 diff --git a/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv b/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv new file mode 100644 index 0000000..a85e443 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv @@ -0,0 +1,28 @@ +ID;E_NO;E_CO;E_SO2;E_NH3;E_ALD;E_CSL;E_ETH;E_HC3;E_HC5;E_HC8;E_HCHO;E_ISO;E_KET;E_OL2;E_OLI;E_OLT;E_ORA1;E_ORA2;E_TOL;E_XYL;E_PM_10;E_PM25J;E_PM25I;E_ECJ;E_ECI;E_ORGJ;E_ORGI;E_NO3J;E_NO3I;E_SO4J;E_SO4I +units;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2 +short_description;nitrogen_oxides;carbon_monoxide;sulfur_dioxide;ammonia;higher_aldehydes;phenols_cresols;ethane;propane;alkanes_0.5_1;alkanes_1_2;formaldehyde;isoprene;ketones;ethene;alkenes_internal;propene;formic_acid;organic_acids;toluene;xylene_and_higher_aromatics;unspeciated_primary_PM10;unspeciated_primary_PM2.5_accumulation_mode;unspeciated_primary_PM2.5_nuclei_mode;elemental_carbon_PM2.5_accumulation_mode;elemental_carbon_PM2.5_nuclei_mode;organic_carbon_PM2.5_accumulation_mode;organic_carbon_PM2.5_nuclei_mode;nitrate_PM2.5_accumulation_mode;nitrate_PM2.5_nuclei_mode;sulfate_PM2.5_accumulation_mode;sulfate_PM2.5_nuclei_mode +E001;nox_no;co;so2;nh3;c2h4o;0;c2h6;c4h10+1.198*c2h5oh+0.402*ch3oh+0.519*c3h8;1.075*c2h6s+0.956*c6h14+0.43*hialkanes+0.956*c7h16+0.956*c5h12;0.57*hialkanes;ch2o;c5h8;0.253*c3h6o;c2h4;0.5*c5h10+hialkenes+terpenes;c8h16+0.5*c5h10+c3h6+c4h8+c6h12;0;0;c7h8+0.293*c6h6;c8h10;0;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E002;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.676;(pm25-oc-bc)*0.169;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.124;(pm25-oc-bc)*0.031 +E003;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.7288;(pm25-oc-bc)*0.1822;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0712;(pm25-oc-bc)*0.0178 +E004;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.7624;(pm25-oc-bc)*0.1906;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0376;(pm25-oc-bc)*0.0094 +E005;nox_no2;co;so2;nh3;voc22;0;voc02;voc03+voc04+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.7528;(pm25-oc-bc)*0.1882;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0472;(pm25-oc-bc)*0.0118 +E006;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E007;nox_no2;co;so2;0;voc22;0;voc02;voc03+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc17;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E008;nox_no2;co;so2;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.3704;(pm25-oc-bc)*0.0926;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.4296;(pm25-oc-bc)*0.1074 +E009;nox_no;co;so2;nh3;0;0;0;0.343*c2h2+0.402*ch3oh;0;0;ch2o;0;0;c2h4;0;c3h6;0;ch3cooh;0.293*c6h6;0;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E010;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.656;pm25*0.164;pm25*0.008;pm25*0.002;pm25*0.016;pm25*0.004;0;0;pm25*0.12;pm25*0.03 +E011;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.36;pm25*0.09;pm25*0.144;pm25*0.036;pm25*0.28;pm25*0.07;0;0;pm25*0.016;pm25*0.004 +E012;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.688;pm25*0.172;pm25*0.008;pm25*0.002;pm25*0.024;pm25*0.006;0;0;pm25*0.08;pm25*0.02 +E013;nox_no2;co;so2;nh3;0;0;voc02;voc03+voc04;voc05+0.43*voc06;0.57*voc06;0;0;0;0;voc12;0;0;0;0.293*voc13+voc14;voc15;pm10;pm25*0.8;pm25*0.2;0;0;0;0;0;0;0;0 +E014;0;0;0;0;0;0;0;0.95*voc01+0.69*voc18+voc20;0.05*voc01+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;0;0;voc23;0;0;0;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0;0;0 +E015;nox_no2;co;so2;nh3;voc22;0;voc02;voc03+voc04+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.168;pm25*0.042;pm25*0.168;pm25*0.042;pm25*0.464;pm25*0.116;0;0;pm25*0.008;pm25*0.002 +E016;nox_no2;co;so2;nh3;voc22;0;voc02;voc03+voc04+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc15;pm10;pm25*0.0296;pm25*0.0074;pm25*0.54;pm25*0.135;pm25*0.2304;pm25*0.0576;0;0;pm25*0.008;pm25*0.002 +E017;nox_no2;co;so2;nh3;voc22;0;voc02;voc03;0;voc17;voc21;0;0;voc07;voc12;voc08;0;0;0;0;pm10;pm25*0.168;pm25*0.042;pm25*0.168;pm25*0.042;pm25*0.464;pm25*0.116;0;0;pm25*0.008;pm25*0.002 +E018;0;0;0;0;0;0;0;voc03+voc04;voc05;0;0;0;0;0;voc12;0;0;0;0.293*voc13+voc14;voc15;0;0;0;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;pm25*0.12;pm25*0.03;pm25*0.36;pm25*0.09;pm25*0.32;pm25*0.08;0;0;0;0 +E020;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.2;pm25*0.05;pm25*0.328;pm25*0.082;pm25*0.248;pm25*0.062;0;0;pm25*0.024;pm25*0.006 +E021;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.392;pm25*0.098;pm25*0.16;pm25*0.04;pm25*0.248;pm25*0.062;0;0;0;0 +E022;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;pm25*0.296;pm25*0.074;pm25*0.12;pm25*0.03;pm25*0.384;pm25*0.096;0;0;0;0 +E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;pm25*0.8;pm25*0.2;0;0;0;0;0;0;0;0 +E024;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 + diff --git a/data/profiles/temporal/TemporalProfile_Daily.csv b/data/profiles/temporal/TemporalProfile_Daily.csv new file mode 100644 index 0000000..e48ec1d --- /dev/null +++ b/data/profiles/temporal/TemporalProfile_Daily.csv @@ -0,0 +1,7 @@ +TP_D,0,1,2,3,4,5,6 +D001,1,1,1,1,1,1,1 +D002,1.06,1.06,1.06,1.06,1.06,0.85,0.85 +D003,1.08,1.08,1.08,1.08,1.08,0.8,0.8 +D004,1.2,1.2,1.2,1.2,1.2,0.5,0.5 +D005,1.02,1.06,1.08,1.1,1.14,0.81,0.79 + diff --git a/data/profiles/temporal/TemporalProfile_Hourly.csv b/data/profiles/temporal/TemporalProfile_Hourly.csv new file mode 100644 index 0000000..857270e --- /dev/null +++ b/data/profiles/temporal/TemporalProfile_Hourly.csv @@ -0,0 +1,12 @@ +TP_H,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23 +H001,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1. +H002,0.79,0.72,0.72,0.71,0.74,0.8,0.92,1.08,1.19,1.22,1.21,1.21,1.17,1.15,1.14,1.13,1.1,1.07,1.04,1.02,1.02,1.01,0.96,0.88 +H003,0.38,0.36,0.36,0.36,0.37,0.69,1.19,1.53,1.57,1.56,1.35,1.16,1.07,1.06,1,0.98,0.99,1.12,1.41,1.52,1.39,1.35,1.19,0.42 +H004,0.75,0.75,0.78,0.82,0.88,0.95,1.02,1.09,1.16,1.22,1.28,1.3,1.22,1.24,1.25,1.16,1.08,1.01,0.95,0.9,0.85,0.81,0.78,0.75 +H005,0.5,0.35,0.2,0.1,0.1,0.2,0.75,1.25,1.4,1.5,1.5,1.5,1.5,1.5,1.5,1.5,1.5,1.4,1.25,1.1,1,0.9,0.8,0.7 +H006,0.19,0.09,0.06,0.05,0.09,0.22,0.86,1.84,1.86,1.41,1.24,1.2,1.32,1.44,1.45,1.59,2.03,2.08,1.51,1.06,0.74,0.62,0.61,0.44 +H007,0.6,0.6,0.6,0.6,0.6,0.65,0.75,0.9,1.1,1.35,1.45,1.6,1.65,1.75,1.7,1.55,1.35,1.1,0.9,0.75,0.65,0.6,0.6,0.6 +H008,0.06,0.168,0.192,0.18,0.24,0.192,0.204,0.216,0.336,0.6,1.08,1.8,2.52,3.12,3.12,3.12,2.64,1.68,0.96,0.72,0.42,0.36,0.036,0.036 +H009,0.597235,0.552995,0.497696,0.508756,0.575115,0.741014,0.917972,1.15023,1.32719,1.42673,1.45991,1.43779,1.39355,1.37143,1.36037,1.29401,1.24977,1.19447,1.09493,0.973272,0.829493,0.729954,0.685714,0.630415 +H010,0.613383,0.557621,0.490706,0.468401,0.479554,0.568773,0.669145,0.814126,0.97026,1.12639,1.22677,1.27138,1.29368,1.3829,1.46097,1.47212,1.46097,1.46097,1.3829,1.23792,1.08178,0.936803,0.847584,0.724907 + diff --git a/data/profiles/temporal/TemporalProfile_Monthly.csv b/data/profiles/temporal/TemporalProfile_Monthly.csv new file mode 100644 index 0000000..b66babd --- /dev/null +++ b/data/profiles/temporal/TemporalProfile_Monthly.csv @@ -0,0 +1,12 @@ +TP_M,1,2,3,4,5,6,7,8,9,10,11,12 +M001,1,1,1,1,1,1,1,1,1,1,1,1 +M002,1.2,1.15,1.05,1,0.9,0.85,0.8,0.875,0.95,1,1.075,1.15 +M003,1.7,1.5,1.3,1,0.7,0.4,0.2,0.4,0.7,1.05,1.4,1.65 +M004,1.1,1.075,1.05,1,0.95,0.9,0.93,0.95,0.97,1,1.025,1.05 +M005,1.2,1.2,1.2,0.8,0.8,0.8,0.8,0.8,0.8,1.2,1.2,1.2 +M006,0.95,0.96,1.02,1,1.01,1.03,1.03,1.01,1.04,1.03,1.01,0.91 +M007,0.88,0.92,0.98,1.03,1.05,1.06,1.01,1.02,1.06,1.05,1.01,0.93 +M008,0.88,0.92,0.98,1.03,1.05,1.06,1.01,1.02,1.06,1.05,1.01,0.93 +M009,0.45,1.3,2.35,1.7,0.85,0.85,0.85,1,1.1,0.65,0.45,0.45 +M999,0,0,0,0,0,0,0,0,1,0,0,0 + diff --git a/data/profiles/temporal/tz_world_country_iso3166.csv b/data/profiles/temporal/tz_world_country_iso3166.csv new file mode 100644 index 0000000..4c82180 --- /dev/null +++ b/data/profiles/temporal/tz_world_country_iso3166.csv @@ -0,0 +1,423 @@ +"country";"country_code";"time_zone";"time_zone_code";"country_code_alpha" +"Afghanistan";4;"Asia/Kabul";237;"AFG" +"Albania";8;"Europe/Tirane";355;"ALB" +"Antarctica";10;"uninhabited";418;"ATA" +"Algeria";12;"Africa/Algiers";4;"DZA" +"American Samoa";16;"Pacific/Pago_Pago";405;"ASM" +"Andorra";20;"Europe/Andorra";309;"AND" +"Angola";24;"Africa/Luanda";34;"AGO" +"Antigua and Barbuda";28;"America/Antigua";56;"ATG" +"Azerbaijan";31;"Asia/Baku";212;"AZE" +"Argentina";32;"America/Argentina/Salta";65;"ARG" +"Argentina";32;"America/Argentina/Catamarca";59;"ARG" +"Argentina";32;"America/Argentina/Tucuman";68;"ARG" +"Argentina";32;"America/Argentina/Cordoba";60;"ARG" +"Argentina";32;"America/Argentina/Rio_Gallegos";64;"ARG" +"Argentina";32;"America/Argentina/La_Rioja";62;"ARG" +"Argentina";32;"America/Argentina/Buenos_Aires";58;"ARG" +"Argentina";32;"America/Argentina/Ushuaia";69;"ARG" +"Argentina";32;"America/Argentina/Jujuy";61;"ARG" +"Argentina";32;"America/Argentina/San_Juan";66;"ARG" +"Argentina";32;"America/Argentina/San_Luis";67;"ARG" +"Argentina";32;"America/Argentina/Mendoza";63;"ARG" +"Australia";36;"Australia/Brisbane";297;"AUS" +"Australia";36;"Australia/Broken_Hill";298;"AUS" +"Australia";36;"Australia/Lindeman";303;"AUS" +"Australia";36;"Australia/Darwin";300;"AUS" +"Australia";36;"Australia/Melbourne";305;"AUS" +"Australia";36;"Australia/Perth";306;"AUS" +"Australia";36;"Antarctica/Macquarie";201;"AUS" +"Australia";36;"Australia/Lord_Howe";304;"AUS" +"Australia";36;"Australia/Eucla";301;"AUS" +"Australia";36;"Australia/Hobart";302;"AUS" +"Australia";36;"Australia/Sydney";307;"AUS" +"Australia";36;"Australia/Currie";299;"AUS" +"Australia";36;"Australia/Adelaide";296;"AUS" +"Austria";40;"Europe/Vienna";360;"AUT" +"Bahamas";44;"America/Nassau";154;"BHS" +"Bahrain";48;"Asia/Bahrain";211;"BHR" +"Bangladesh";50;"Asia/Dhaka";223;"BGD" +"Armenia";51;"Asia/Yerevan";285;"ARM" +"Barbados";52;"America/Barbados";75;"BRB" +"Belgium";56;"Europe/Brussels";315;"BEL" +"Bermuda";60;"Atlantic/Bermuda";287;"BMU" +"Bhutan";64;"Asia/Thimphu";275;"BTN" +"Bolivia Plurinational State of";68;"America/La_Paz";133;"BOL" +"Bosnia and Herzegovina";70;"Europe/Sarajevo";349;"BIH" +"Botswana";72;"Africa/Gaborone";23;"BWA" +"Brazil";76;"America/Bahia";73;"BRA" +"Brazil";76;"America/Santarem";177;"BRA" +"Brazil";76;"America/Sao_Paulo";180;"BRA" +"Brazil";76;"America/Araguaina";57;"BRA" +"Brazil";76;"America/Belem";76;"BRA" +"Brazil";76;"America/Maceio";137;"BRA" +"Brazil";76;"America/Cuiaba";93;"BRA" +"Brazil";76;"America/Manaus";139;"BRA" +"Brazil";76;"America/Eirunepe";102;"BRA" +"Brazil";76;"America/Noronha";158;"BRA" +"Brazil";76;"America/Porto_Velho";169;"BRA" +"Brazil";76;"America/Boa_Vista";79;"BRA" +"Brazil";76;"America/Campo_Grande";83;"BRA" +"Brazil";76;"America/Recife";173;"BRA" +"Brazil";76;"America/Rio_Branco";176;"BRA" +"Brazil";76;"America/Fortaleza";105;"BRA" +"Belize";84;"America/Belize";77;"BLZ" +"British Indian Ocean Territory";86;"Indian/Chagos";368;"IOT" +"Solomon Islands";90;"Pacific/Guadalcanal";391;"SLB" +"Virgin Islands British";92;"America/Tortola";195;"VGB" +"Brunei Darussalam";96;"Asia/Brunei";217;"BRN" +"Bulgaria";100;"Europe/Sofia";352;"BGR" +"Myanmar";104;"Asia/Rangoon";263;"MMR" +"Burundi";108;"Africa/Bujumbura";12;"BDI" +"Belarus";112;"Europe/Minsk";338;"BLR" +"Cambodia";116;"Asia/Phnom_Penh";258;"KHM" +"Cameroon";120;"Africa/Douala";20;"CMR" +"Canada";124;"America/Edmonton";101;"CAN" +"Canada";124;"America/Iqaluit";127;"CAN" +"Canada";124;"America/Vancouver";196;"CAN" +"Canada";124;"America/Winnipeg";198;"CAN" +"Canada";124;"America/Moncton";149;"CAN" +"Canada";124;"America/Dawson";96;"CAN" +"Canada";124;"America/Montreal";152;"CAN" +"Canada";124;"America/Goose_Bay";108;"CAN" +"Canada";124;"America/Inuvik";126;"CAN" +"Canada";124;"America/Rainy_River";171;"CAN" +"Canada";124;"America/Atikokan";72;"CAN" +"Canada";124;"America/Dawson_Creek";97;"CAN" +"Canada";124;"America/Coral_Harbour";90;"CAN" +"Canada";124;"America/Toronto";194;"CAN" +"Canada";124;"America/Creston";92;"CAN" +"Canada";124;"America/Nipigon";156;"CAN" +"Canada";124;"America/Regina";174;"CAN" +"Canada";124;"America/Thunder_Bay";192;"CAN" +"Canada";124;"America/Fort_Nelson";104;"CAN" +"Canada";124;"America/Pangnirtung";164;"CAN" +"Canada";124;"America/Halifax";115;"CAN" +"Canada";124;"America/Yellowknife";200;"CAN" +"Canada";124;"America/Resolute";175;"CAN" +"Canada";124;"America/Rankin_Inlet";172;"CAN" +"Canada";124;"America/Glace_Bay";106;"CAN" +"Canada";124;"America/Blanc-Sablon";78;"CAN" +"Canada";124;"America/Cambridge_Bay";82;"CAN" +"Canada";124;"America/Swift_Current";189;"CAN" +"Canada";124;"America/St_Johns";184;"CAN" +"Canada";124;"America/Whitehorse";197;"CAN" +"Cape Verde";132;"Atlantic/Cape_Verde";289;"CPV" +"Cayman Islands";136;"America/Cayman";87;"CYM" +"Central African Republic";140;"Africa/Bangui";7;"CAF" +"Sri Lanka";144;"Asia/Colombo";221;"LKA" +"Chad";148;"Africa/Ndjamena";44;"TCD" +"Chile";152;"America/Punta_Arenas";1003;"CHL" +"Chile";152;"Pacific/Easter";383;"CHL" +"Chile";152;"America/Santiago";178;"CHL" +"China";156;"Asia/Shanghai";268;"CHN" +"China";156;"Asia/Harbin";228;"CHN" +"China";156;"Asia/Kashgar";240;"CHN" +"China";156;"Asia/Chongqing";220;"CHN" +"China";156;"Asia/Urumqi";279;"CHN" +"Taiwan Province of China";158;"Asia/Taipei";271;"TWN" +"Christmas Island";162;"Indian/Christmas";369;"CXR" +"Cocos (Keeling) Islands";166;"Indian/Cocos";370;"CCK" +"Colombia";170;"America/Bogota";80;"COL" +"Comoros";174;"Indian/Comoro";371;"COM" +"Mayotte";175;"Indian/Mayotte";376;"MYT" +"Congo";178;"Africa/Brazzaville";11;"COG" +"Congo the Democratic Republic of the";180;"Africa/Lubumbashi";35;"COD" +"Congo the Democratic Republic of the";180;"Africa/Kinshasa";30;"COD" +"Cook Islands";184;"Pacific/Rarotonga";410;"COK" +"Costa Rica";188;"America/Costa_Rica";91;"CRI" +"Croatia";191;"Europe/Zagreb";364;"HRV" +"Cuba";192;"America/Havana";116;"CUB" +"Cyprus";196;"Asia/Nicosia";253;"CYP" +"Cyprus";196;"Asia/Famagusta";1000;"CYP" +"Czech Republic";203;"Europe/Prague";344;"CZE" +"Benin";204;"Africa/Porto-Novo";48;"BEN" +"Denmark";208;"Europe/Copenhagen";320;"DNK" +"Dominica";212;"America/Dominica";100;"DMA" +"Dominican Republic";214;"America/Santo_Domingo";179;"DOM" +"Ecuador";218;"America/Guayaquil";113;"ECU" +"Ecuador";218;"Pacific/Galapagos";389;"ECU" +"El Salvador";222;"America/El_Salvador";103;"SLV" +"Equatorial Guinea";226;"Africa/Malabo";37;"GNQ" +"Ethiopia";231;"Africa/Addis_Ababa";3;"ETH" +"Eritrea";232;"Africa/Asmara";5;"ERI" +"Estonia";233;"Europe/Tallinn";354;"EST" +"Faroe Islands";234;"Atlantic/Faroe";290;"FRO" +"Falkland Islands (Malvinas)";238;"Atlantic/Stanley";295;"FLK" +"South Georgia and the South Sandwich Islands";239;"Atlantic/South_Georgia";293;"SGS" +"Fiji";242;"Pacific/Fiji";387;"FJI" +"Finland";246;"Europe/Helsinki";324;"FIN" +"Aland Islands";248;"Europe/Mariehamn";337;"ALA" +"France";250;"Europe/Paris";342;"FRA" +"French Guiana";254;"America/Cayenne";86;"GUF" +"French Polynesia";258;"Pacific/Gambier";390;"PYF" +"French Polynesia";258;"Pacific/Marquesas";399;"PYF" +"French Polynesia";258;"Pacific/Tahiti";412;"PYF" +"French Southern Territories";260;"Indian/Kerguelen";372;"ATF" +"Djibouti";262;"Africa/Djibouti";19;"DJI" +"Gabon";266;"Africa/Libreville";32;"GAB" +"Georgia";268;"Asia/Tbilisi";273;"GEO" +"Gambia";270;"Africa/Banjul";8;"GMB" +"Palestine State of";275;"Asia/Hebron";229;"PSE" +"Palestine State of";275;"Asia/Gaza";227;"PSE" +"Germany";276;"Europe/Berlin";313;"DEU" +"Germany";276;"Europe/Busingen";318;"DEU" +"Ghana";288;"Africa/Accra";2;"GHA" +"Gibraltar";292;"Europe/Gibraltar";322;"GIB" +"Kiribati";296;"Pacific/Kiritimati";395;"KIR" +"Kiribati";296;"Pacific/Tarawa";413;"KIR" +"Kiribati";296;"Pacific/Enderbury";385;"KIR" +"Greece";300;"Europe/Athens";311;"GRC" +"Greenland";304;"America/Thule";191;"GRL" +"Greenland";304;"America/Scoresbysund";181;"GRL" +"Greenland";304;"America/Godthab";107;"GRL" +"Greenland";304;"America/Danmarkshavn";95;"GRL" +"Grenada";308;"America/Grenada";110;"GRD" +"Guadeloupe";312;"America/Guadeloupe";111;"GLP" +"Guam";316;"Pacific/Guam";392;"GUM" +"Guatemala";320;"America/Guatemala";112;"GTM" +"Guinea";324;"Africa/Conakry";16;"GIN" +"Guyana";328;"America/Guyana";114;"GUY" +"Haiti";332;"America/Port-au-Prince";168;"HTI" +"Holy See (Vatican City State)";336;"Europe/Vatican";359;"VAT" +"Honduras";340;"America/Tegucigalpa";190;"HND" +"Hong Kong";344;"Asia/Hong_Kong";231;"HKG" +"Hungary";348;"Europe/Budapest";317;"HUN" +"Iceland";352;"Atlantic/Reykjavik";292;"ISL" +"India";356;"Asia/Kolkata";243;"IND" +"Indonesia";360;"Asia/Jakarta";234;"IDN" +"Indonesia";360;"Asia/Pontianak";259;"IDN" +"Indonesia";360;"Asia/Makassar";250;"IDN" +"Indonesia";360;"Asia/Jayapura";235;"IDN" +"Iran Islamic Republic of";364;"Asia/Tehran";274;"IRN" +"Iraq";368;"Asia/Baghdad";210;"IRQ" +"Ireland";372;"Europe/Dublin";321;"IRL" +"Israel";376;"Asia/Jerusalem";236;"ISR" +"Italy";380;"Europe/Rome";346;"ITA" +"Cote d'Ivoire";384;"Africa/Abidjan";1;"CIV" +"Jamaica";388;"America/Jamaica";128;"JAM" +"Japan";392;"Asia/Tokyo";276;"JPN" +"Kazakhstan";398;"Asia/Almaty";204;"KAZ" +"Kazakhstan";398;"Asia/Atyrau";1001;"KAZ" +"Kazakhstan";398;"Asia/Aqtau";207;"KAZ" +"Kazakhstan";398;"Asia/Aqtobe";208;"KAZ" +"Kazakhstan";398;"Asia/Qyzylorda";262;"KAZ" +"Kazakhstan";398;"Asia/Oral";257;"KAZ" +"Jordan";400;"Asia/Amman";205;"JOR" +"Kenya";404;"Africa/Nairobi";43;"KEN" +"Korea Democratic People's Republic of";408;"Asia/Pyongyang";260;"PRK" +"Korea Republic of";410;"Asia/Seoul";267;"KOR" +"Kuwait";414;"Asia/Kuwait";247;"KWT" +"Kyrgyzstan";417;"Asia/Bishkek";216;"KGZ" +"Lao Peoples Democratic Republic";418;"Asia/Vientiane";281;"LAO" +"Lebanon";422;"Asia/Beirut";215;"LBN" +"Lesotho";426;"Africa/Maseru";39;"LSO" +"Latvia";428;"Europe/Riga";345;"LVA" +"Liberia";430;"Africa/Monrovia";42;"LBR" +"Libya";434;"Africa/Tripoli";50;"LBY" +"Liechtenstein";438;"Europe/Vaduz";358;"LIE" +"Lithuania";440;"Europe/Vilnius";361;"LTU" +"Luxembourg";442;"Europe/Luxembourg";334;"LUX" +"Macao";446;"Asia/Macau";248;"MAC" +"Madagascar";450;"Indian/Antananarivo";367;"MDG" +"Malawi";454;"Africa/Blantyre";10;"MWI" +"Malaysia";458;"Asia/Kuala_Lumpur";245;"MYS" +"Malaysia";458;"Asia/Kuching";246;"MYS" +"Maldives";462;"Indian/Maldives";374;"MDV" +"Mali";466;"Africa/Bamako";6;"MLI" +"Malta";470;"Europe/Malta";336;"MLT" +"Martinique";474;"America/Martinique";141;"MTQ" +"Mauritania";478;"Africa/Nouakchott";46;"MRT" +"Mauritius";480;"Indian/Mauritius";375;"MUS" +"Mexico";484;"America/Monterrey";150;"MEX" +"Mexico";484;"America/Bahia_Banderas";74;"MEX" +"Mexico";484;"America/Ojinaga";162;"MEX" +"Mexico";484;"America/Cancun";84;"MEX" +"Mexico";484;"America/Mazatlan";143;"MEX" +"Mexico";484;"America/Matamoros";142;"MEX" +"Mexico";484;"America/Merida";145;"MEX" +"Mexico";484;"America/Mexico_City";147;"MEX" +"Mexico";484;"America/Chihuahua";89;"MEX" +"Mexico";484;"America/Tijuana";193;"MEX" +"Mexico";484;"America/Hermosillo";117;"MEX" +"Monaco";492;"Europe/Monaco";339;"MCO" +"Mongolia";496;"Asia/Choibalsan";219;"MNG" +"Mongolia";496;"Asia/Hovd";232;"MNG" +"Mongolia";496;"Asia/Ulaanbaatar";278;"MNG" +"Moldova Republic of";498;"Europe/Chisinau";319;"MDA" +"Montenegro";499;"Europe/Podgorica";343;"MNE" +"Montserrat";500;"America/Montserrat";153;"MSR" +"Morocco";504;"Africa/Casablanca";14;"MAR" +"Mozambique";508;"Africa/Maputo";38;"MOZ" +"Oman";512;"Asia/Muscat";252;"OMN" +"Nauru";520;"Pacific/Nauru";401;"NRU" +"Nepal";524;"Asia/Kathmandu";241;"NPL" +"Netherlands";528;"Europe/Amsterdam";308;"NLD" +"Curacao";531;"America/Curacao";94;"CUW" +"Aruba";533;"America/Aruba";70;"ABW" +"Sint Maarten (Dutch part)";534;"America/Lower_Princes";136;"SXM" +"Bonaire Sint Eustatius and Saba";535;"America/Kralendijk";132;"BES" +"New Caledonia";540;"Pacific/Noumea";404;"NCL" +"Vanuatu";548;"Pacific/Efate";384;"VUT" +"New Zealand";554;"Pacific/Chatham";381;"NZL" +"New Zealand";554;"Pacific/Auckland";379;"NZL" +"Nicaragua";558;"America/Managua";138;"NIC" +"Niger";562;"Africa/Niamey";45;"NER" +"Nigeria";566;"Africa/Lagos";31;"NGA" +"Niue";570;"Pacific/Niue";402;"NIU" +"Norfolk Island";574;"Pacific/Norfolk";403;"NFK" +"Norway";578;"Europe/Oslo";341;"NOR" +"Northern Mariana Islands";580;"Pacific/Saipan";411;"MNP" +"United States Minor Outlying Islands";581;"Pacific/Johnston";394;"UMI" +"United States Minor Outlying Islands";581;"Pacific/Wake";415;"UMI" +"United States Minor Outlying Islands";581;"Pacific/Midway";400;"UMI" +"Micronesia Federated States of";583;"Pacific/Chuuk";382;"FSM" +"Micronesia Federated States of";583;"Pacific/Kosrae";396;"FSM" +"Micronesia Federated States of";583;"Pacific/Yap";417;"FSM" +"Micronesia Federated States of";583;"Pacific/Pohnpei";408;"FSM" +"Marshall Islands";584;"Pacific/Kwajalein";397;"MHL" +"Marshall Islands";584;"Pacific/Majuro";398;"MHL" +"Palau";585;"Pacific/Palau";406;"PLW" +"Pakistan";586;"Asia/Karachi";239;"PAK" +"Panama";591;"America/Panama";163;"PAN" +"Papua New Guinea";598;"Pacific/Port_Moresby";409;"PNG" +"Papua New Guinea";598;"Pacific/Bougainville";380;"PNG" +"Paraguay";600;"America/Asuncion";71;"PRY" +"Peru";604;"America/Lima";134;"PER" +"Philippines";608;"Asia/Manila";251;"PHL" +"Pitcairn";612;"Pacific/Pitcairn";407;"PCN" +"Poland";616;"Europe/Warsaw";363;"POL" +"Portugal";620;"Atlantic/Azores";286;"PRT" +"Portugal";620;"Europe/Lisbon";331;"PRT" +"Portugal";620;"Atlantic/Madeira";291;"PRT" +"Guinea-Bissau";624;"Africa/Bissau";9;"GNB" +"Timor-Leste";626;"Asia/Dili";224;"TLS" +"Puerto Rico";630;"America/Puerto_Rico";170;"PRI" +"Qatar";634;"Asia/Qatar";261;"QAT" +"Reunion";638;"Indian/Reunion";377;"REU" +"Romania";642;"Europe/Bucharest";316;"ROU" +"Russian Federation";643;"Asia/Sakhalin";265;"RUS" +"Russian Federation";643;"Asia/Novosibirsk";255;"RUS" +"Russian Federation";643;"Asia/Anadyr";206;"RUS" +"Russian Federation";643;"Asia/Irkutsk";233;"RUS" +"Russian Federation";643;"Europe/Simferopol";350;"RUS" +"Russian Federation";643;"Asia/Srednekolymsk";270;"RUS" +"Russian Federation";643;"Asia/Magadan";249;"RUS" +"Russian Federation";643;"Asia/Tomsk";277;"RUS" +"Russian Federation";643;"Asia/Khandyga";242;"RUS" +"Russian Federation";643;"Europe/Kirov";330;"RUS" +"Russian Federation";643;"Asia/Ust-Nera";280;"RUS" +"Russian Federation";643;"Europe/Volgograd";362;"RUS" +"Russian Federation";643;"Asia/Novokuznetsk";254;"RUS" +"Russian Federation";643;"Asia/Barnaul";214;"RUS" +"Russian Federation";643;"Asia/Krasnoyarsk";244;"RUS" +"Russian Federation";643;"Asia/Chita";218;"RUS" +"Russian Federation";643;"Asia/Yekaterinburg";284;"RUS" +"Russian Federation";643;"Europe/Ulyanovsk";356;"RUS" +"Russian Federation";643;"Europe/Kaliningrad";328;"RUS" +"Russian Federation";643;"Asia/Omsk";256;"RUS" +"Russian Federation";643;"Europe/Samara";347;"RUS" +"Russian Federation";643;"Europe/Moscow";340;"RUS" +"Russian Federation";643;"Asia/Yakutsk";283;"RUS" +"Russian Federation";643;"Europe/Saratov";1002;"RUS" +"Russian Federation";643;"Asia/Kamchatka";238;"RUS" +"Russian Federation";643;"Europe/Astrakhan";310;"RUS" +"Russian Federation";643;"Asia/Vladivostok";282;"RUS" +"Rwanda";646;"Africa/Kigali";29;"RWA" +"Saint Barthelemy";652;"America/St_Barthelemy";183;"BLM" +"Saint Helena Ascension and Tristan da Cunha";654;"Atlantic/St_Helena";294;"SHN" +"Saint Kitts and Nevis";659;"America/St_Kitts";185;"KNA" +"Anguilla";660;"America/Anguilla";55;"AIA" +"Saint Lucia";662;"America/St_Lucia";186;"LCA" +"Saint Martin (French part)";663;"America/Marigot";140;"MAF" +"Saint Pierre and Miquelon";666;"America/Miquelon";148;"SPM" +"Saint Vincent and the Grenadines";670;"America/St_Vincent";188;"VCT" +"San Marino";674;"Europe/San_Marino";348;"SMR" +"Sao Tome and Principe";678;"Africa/Sao_Tome";49;"STP" +"Saudi Arabia";682;"Asia/Riyadh";264;"SAU" +"Senegal";686;"Africa/Dakar";17;"SEN" +"Serbia";688;"Europe/Belgrade";312;"SRB" +"Seychelles";690;"Indian/Mahe";373;"SYC" +"Sierra Leone";694;"Africa/Freetown";22;"SLE" +"Singapore";702;"Asia/Singapore";269;"SGP" +"Slovakia";703;"Europe/Bratislava";314;"SVK" +"Viet Nam";704;"Asia/Ho_Chi_Minh";230;"VNM" +"Slovenia";705;"Europe/Ljubljana";332;"SVN" +"Somalia";706;"Africa/Mogadishu";41;"SOM" +"South Africa";710;"Africa/Johannesburg";25;"ZAF" +"Zimbabwe";716;"Africa/Harare";24;"ZWE" +"Spain";724;"Africa/Ceuta";15;"ESP" +"Spain";724;"Atlantic/Canary";288;"ESP" +"Spain";724;"Europe/Madrid";335;"ESP" +"South Sudan";728;"Africa/Juba";26;"SSD" +"Sudan";729;"Africa/Khartoum";28;"SDN" +"Western Sahara";732;"Africa/El_Aaiun";21;"ESH" +"Suriname";740;"America/Paramaribo";165;"SUR" +"Svalbard and Jan Mayen";744;"Arctic/Longyearbyen";202;"SJM" +"Swaziland";748;"Africa/Mbabane";40;"SWZ" +"Sweden";752;"Europe/Stockholm";353;"SWE" +"Switzerland";756;"Europe/Zurich";366;"CHE" +"Syrian Arab Republic";760;"Asia/Damascus";222;"SYR" +"Tajikistan";762;"Asia/Dushanbe";226;"TJK" +"Thailand";764;"Asia/Bangkok";213;"THA" +"Togo";768;"Africa/Lome";33;"TGO" +"Tokelau";772;"Pacific/Fakaofo";386;"TKL" +"Tonga";776;"Pacific/Tongatapu";414;"TON" +"Trinidad and Tobago";780;"America/Port_of_Spain";167;"TTO" +"United Arab Emirates";784;"Asia/Dubai";225;"ARE" +"Tunisia";788;"Africa/Tunis";51;"TUN" +"Turkey";792;"Europe/Istanbul";326;"TUR" +"Turkmenistan";795;"Asia/Ashgabat";209;"TKM" +"Turks and Caicos Islands";796;"America/Grand_Turk";109;"TCA" +"Tuvalu";798;"Pacific/Funafuti";388;"TUV" +"Uganda";800;"Africa/Kampala";27;"UGA" +"Ukraine";804;"Europe/Kiev";329;"UKR" +"Ukraine";804;"Europe/Zaporozhye";365;"UKR" +"Ukraine";804;"Europe/Uzhgorod";357;"UKR" +"Macedonia the Former Yugoslav Republic of";807;"Europe/Skopje";351;"MKD" +"Egypt";818;"Africa/Cairo";13;"EGY" +"United Kingdom";826;"Europe/London";333;"GBR" +"Guernsey";831;"Europe/Guernsey";323;"GGY" +"Jersey";832;"Europe/Jersey";327;"JEY" +"Isle of Man";833;"Europe/Isle_of_Man";325;"IMN" +"Tanzania United Republic of";834;"Africa/Dar_es_Salaam";18;"TZA" +"United States";840;"America/Anchorage";54;"USA" +"United States";840;"America/Los_Angeles";135;"USA" +"United States";840;"America/Indiana/Tell_City";122;"USA" +"United States";840;"America/Denver";98;"USA" +"United States";840;"America/Detroit";99;"USA" +"United States";840;"America/Phoenix";166;"USA" +"United States";840;"America/Yakutat";199;"USA" +"United States";840;"America/Metlakatla";146;"USA" +"United States";840;"America/Indiana/Marengo";120;"USA" +"United States";840;"America/Boise";81;"USA" +"United States";840;"America/Indiana/Petersburg";121;"USA" +"United States";840;"America/North_Dakota/New_Salem";161;"USA" +"United States";840;"Pacific/Honolulu";393;"USA" +"United States";840;"America/Indiana/Winamac";125;"USA" +"United States";840;"America/Nome";157;"USA" +"United States";840;"America/Sitka";182;"USA" +"United States";840;"America/Chicago";88;"USA" +"United States";840;"America/Adak";53;"USA" +"United States";840;"America/North_Dakota/Center";160;"USA" +"United States";840;"America/Kentucky/Monticello";131;"USA" +"United States";840;"America/Indiana/Knox";119;"USA" +"United States";840;"America/Indiana/Vincennes";124;"USA" +"United States";840;"America/North_Dakota/Beulah";159;"USA" +"United States";840;"America/Kentucky/Louisville";130;"USA" +"United States";840;"America/Indiana/Indianapolis";118;"USA" +"United States";840;"America/Indiana/Vevay";123;"USA" +"United States";840;"America/New_York";155;"USA" +"United States";840;"America/Juneau";129;"USA" +"United States";840;"America/Menominee";144;"USA" +"Virgin Islands U.S.";850;"America/St_Thomas";187;"VIR" +"Burkina Faso";854;"Africa/Ouagadougou";47;"BFA" +"Uruguay";858;"America/Montevideo";151;"URY" +"Uzbekistan";860;"Asia/Tashkent";272;"UZB" +"Uzbekistan";860;"Asia/Samarkand";266;"UZB" +"Venezuela Bolivarian Republic of";862;"America/Caracas";85;"VEN" +"Wallis and Futuna";876;"Pacific/Wallis";416;"WLF" +"Samoa";882;"Pacific/Apia";378;"WSM" +"Yemen";887;"Asia/Aden";203;"YEM" +"Zambia";894;"Africa/Lusaka";36;"ZMB" +"Namibia";"264";"Africa/Windhoek";52;"NAM" \ No newline at end of file diff --git a/data/profiles/vertical/1layer_vertical_description.csv b/data/profiles/vertical/1layer_vertical_description.csv new file mode 100644 index 0000000..defd16d --- /dev/null +++ b/data/profiles/vertical/1layer_vertical_description.csv @@ -0,0 +1,2 @@ +Ilayer;height_magl +1;1000 \ No newline at end of file diff --git a/data/profiles/vertical/CMAQ_15layers_vertical_description.csv b/data/profiles/vertical/CMAQ_15layers_vertical_description.csv new file mode 100644 index 0000000..64e1ce8 --- /dev/null +++ b/data/profiles/vertical/CMAQ_15layers_vertical_description.csv @@ -0,0 +1,16 @@ +Ilayer;height_magl +1;39 +2;78 +3;119 +4;157 +5;197 +6;237 +7;315 +8;390 +9;560 +10;820 +11;1250 +12;1870 +13;2850 +14;5100 +15;20100 \ No newline at end of file diff --git a/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv b/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv new file mode 100644 index 0000000..00da2f6 --- /dev/null +++ b/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv @@ -0,0 +1,49 @@ +Ilayer;height_magl +1;48 +2;104 +3;167 +4;240 +5;322 +6;416 +7;522 +8;643 +9;782 +10;942 +11;1126 +12;1337 +13;1580 +14;1857 +15;2170 +16;2520 +17;2908 +18;3334 +19;3796 +20;4290 +21;4811 +22;5351 +23;5898 +24;6440 +25;6967 +26;7473 +27;7952 +28;8407 +29;8843 +30;9278 +31;9730 +32;10218 +33;10754 +34;11348 +35;12003 +36;12725 +37;13517 +38;14386 +39;15341 +40;16398 +41;17580 +42;18921 +43;20463 +44;22260 +45;24390 +46;26990 +47;30299 +48;34719 \ No newline at end of file diff --git a/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv b/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv new file mode 100644 index 0000000..b4a06e1 --- /dev/null +++ b/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv @@ -0,0 +1,49 @@ +Ilayer;height_magl +1;48 +2;104 +3;168 +4;241 +5;324 +6;418 +7;525 +8;646 +9;786 +10;946 +11;1130 +12;1342 +13;1584 +14;1860 +15;2171 +16;2518 +17;2902 +18;3322 +19;3776 +20;4260 +21;4768 +22;5291 +23;5816 +24;6332 +25;6827 +26;7295 +27;7732 +28;8140 +29;8528 +30;8911 +31;9307 +32;9728 +33;10184 +34;10681 +35;11217 +36;11793 +37;12409 +38;13065 +39;13761 +40;14492 +41;15256 +42;16048 +43;16858 +44;17676 +45;18480 +46;19242 +47;19923 +48;20480 diff --git a/data/profiles/vertical/Vertical_profile.csv b/data/profiles/vertical/Vertical_profile.csv new file mode 100644 index 0000000..2518a5a --- /dev/null +++ b/data/profiles/vertical/Vertical_profile.csv @@ -0,0 +1,7 @@ +ID;layers;weights +V001;36,72,108,144,218,292,366,440,514,588,724,896,1050,1204,1358;0,0,0,0,0.1,0.32,0.31,0.15,0.06,0.03,0.03,0,0,0,0 +V002;36,72,108,144,218,292,366,440,514,588,724,896,1050,1204,1358;0,0.18,0.47,0.25,0.08,0.02,0,0,0,0,0,0,0,0,0 +V003;10,1000;0.6,0.4 +V004;1000,9000;0,1 +V005;9000,12000;0,1 +V006;36,72,108,144,218,292,366,440,514,588,724,896,1050,1204,1358;0,0,0.02,0.26,0.56,0.13,0.03,0,0,0,0,0,0,0,0 \ No newline at end of file diff --git a/hermesv3_gr/config/__init__.py b/hermesv3_gr/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/config/config.py b/hermesv3_gr/config/config.py new file mode 100644 index 0000000..dae6c2a --- /dev/null +++ b/hermesv3_gr/config/config.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +from configargparse import ArgParser +from hermesv3_gr.config import settings +import os +import sys + + +class Config(ArgParser): + """ + Initialization of the arguments that the parser can handle. + """ + def __init__(self): + super(Config, self).__init__() + self.options = self.read_options() + + def read_options(self): + """ + Reads all the options from command line or from the configuration file. + The value of an argument given by command line has high priority that the one that appear in the + configuration file. + + :return: Arguments already parsed. + :rtype: Namespace + """ + # p = ArgParser(default_config_files=['/home/Earth/mguevara/HERMES/HERMESv3/IN/conf/hermes.conf']) + p = ArgParser() + p.add('-c', '--my-config', required=False, is_config_file=True, help='Path to the configuration file.') + # TODO Detallar mas que significan 1, 2 y 3 los log_level + p.add('--log_level', required=True, help='Level of detail of the running process information.', + type=int, choices=[1, 2, 3]) + # p.add('--do_fix_part', required=False, help='Indicates if is needed to do the fix part', + # type=str, default='True') + p.add('--input_dir', required=True, help='Path to the input directory of the model.') + p.add('--data_path', required=True, help='Path to the data necessary for the model.') + p.add('--output_dir', required=True, help='Path to the output directory of the model.') + p.add('--output_name', required=True, help="Name of the output file. You can add the string '' that " + + "will be substitute by the starting date of the simulation day.") + p.add('--start_date', required=True, help='Starting Date to simulate (UTC)') + p.add('--end_date', required=False, help='If you want to simulate more than one day you have to specify the ' + + 'ending date of simulation in this parameter. ' + + 'If it is not set end_date = start_date.') + + p.add('--output_timestep_type', required=True, help='Type of timestep.', + type=str, choices=['hourly', 'daily', 'monthly', 'yearly']) + p.add('--output_timestep_num', required=True, help='Number of timesteps to simulate.', type=int) + p.add('--output_timestep_freq', required=True, help='Frequency between timesteps.', type=int) + + p.add('--output_model', required=True, help='Name of the output model.', choices=['MONARCH', 'CMAQ', 'WRF_CHEM']) + p.add('--output_attributes', required=False, help='Path to the file that contains the global attributes.') + + p.add('--domain_type', required=True, help='Type of domain to simulate.', + choices=['global', 'lcc', 'rotated', 'mercator']) + p.add('--auxiliar_files_path', required=True, help='Path to the directory where the necessary auxiliary ' + + 'files will be created if them are not created yet.') + + p.add('--vertical_description', required=True, help='Path to the file that contains the vertical description ' + + 'of the desired output.') + + # Global options + p.add('--inc_lat', required=False, help='Latitude resolution for a global domain.', type=float) + p.add('--inc_lon', required=False, help='Longitude resolution for a global domain.', type=float) + + # Rotated options + p.add('--centre_lat', required=False, help='Central geographic latitude of grid (non-rotated degrees). Corresponds to the TPH0D parameter in NMMB-MONARCH.', type=float) + p.add('--centre_lon', required=False, help='Central geographic longitude of grid (non-rotated degrees, positive east). Corresponds to the TLM0D parameter in NMMB-MONARCH.', type=float) + p.add('--west_boundary', required=False, help="Grid's western boundary from center point (rotated degrees). Corresponds to the WBD parameter in NMMB-MONARCH.", type=float) + p.add('--south_boundary', required=False, help="Grid's southern boundary from center point (rotated degrees). Corresponds to the SBD parameter in NMMB-MONARCH.", type=float) + p.add('--inc_rlat', required=False, help='Latitudinal grid resolution (rotated degrees). Corresponds to the DPHD parameter in NMMB-MONARCH.', type=float) + p.add('--inc_rlon', required=False, help='Longitudinal grid resolution (rotated degrees). Corresponds to the DLMD parameter in NMMB-MONARCH.', type=float) + + # Lambert conformal conic options + p.add('--lat_1', required=False, help='Standard parallel 1 (in deg). Corresponds to the P_ALP parameter of the GRIDDESC file.', type=float) + p.add('--lat_2', required=False, help='Standard parallel 2 (in deg). Corresponds to the P_BET parameter of the GRIDDESC file.', type=float) + p.add('--lon_0', required=False, help='Longitude of the central meridian (degrees). Corresponds to the P_GAM parameter of the GRIDDESC file.', type=float) + p.add('--lat_0', required=False, help='Latitude of the origin of the projection (degrees). Corresponds to the Y_CENT parameter of the GRIDDESC file.', type=float) + p.add('--nx', required=False, help='Number of grid columns. Corresponds to the NCOLS parameter of the GRIDDESC file.', type=float) + p.add('--ny', required=False, help='Number of grid rows. Corresponds to the NROWS parameter of the GRIDDESC file.', type=float) + p.add('--inc_x', required=False, help='X-coordinate cell dimension (meters). Corresponds to the XCELL parameter of the GRIDDESC file.', type=float) + p.add('--inc_y', required=False, help='Y-coordinate cell dimension (meters). Corresponds to the YCELL parameter of the GRIDDESC file.', type=float) + p.add('--x_0', required=False, help='X-coordinate origin of grid (meters). Corresponds to the XORIG parameter of the GRIDDESC file.', type=float) + p.add('--y_0', required=False, help='Y-coordinate origin of grid (meters). Corresponds to the YORIG parameter of the GRIDDESC file.', type=float) + + # Mercator + p.add('--lat_ts', required=False, help='...', type=float) + + p.add('--cross_table', required=True, help='Path to the file that contains the information ' + + 'of the datasets to use.') + p.add('--p_vertical', required=True, help='Path to the file that contains all the needed vertical profiles.') + p.add('--p_month', required=True, help='Path to the file that contains all the needed monthly profiles.') + p.add('--p_day', required=True, help='Path to the file that contains all the needed daily profiles.') + p.add('--p_hour', required=True, help='Path to the file that contains all the needed hourly profiles.') + p.add('--p_speciation', required=True, help='Path to the file that contains all the needed ' + + 'speciation profiles.') + p.add('--molecular_weights', required=True, help='Path to the file that contains the molecular weights ' + + 'of the input pollutants.') + p.add('--world_info', required=True, help='Path to the file that contains the world information ' + + 'like timezones, ISO codes, ...') + + options = p.parse_args() + for item in vars(options): + is_str = False + exec ("is_str = str == type(options.{0})".format(item)) + if is_str: + exec("options.{0} = options.{0}.replace('', options.input_dir)".format(item)) + exec("options.{0} = options.{0}.replace('', options.domain_type)".format(item)) + if options.domain_type == 'global': + exec("options.{0} = options.{0}.replace('', '{1}_{2}')".format( + item, options.inc_lat, options.inc_lon)) + elif options.domain_type == 'rotated': + exec("options.{0} = options.{0}.replace('', '{1}_{2}')".format( + item, options.inc_rlat, options.inc_rlon)) + elif options.domain_type == 'lcc' or options.domain_type == 'mercator': + exec("options.{0} = options.{0}.replace('', '{1}_{2}')".format( + item, options.inc_x, options.inc_y)) + + options.start_date = self._parse_start_date(options.start_date) + options.end_date = self._parse_end_date(options.end_date, options.start_date) + + self.create_dir(options.output_dir) + self.create_dir(options.auxiliar_files_path) + + return options + + def get_output_name(self, date): + """ + Generates the full path of the output replacing by YYYYMMDDHH, YYYYMMDD, YYYYMM or YYYY depending on the + output_timestep_type. + + :param date: Date of the day to simulate. + :type: datetime.datetime + + :return: Complete path to the output file. + :rtype: str + """ + import os + if self.options.output_timestep_type == 'hourly': + file_name = self.options.output_name.replace('', date.strftime('%Y%m%d%H')) + elif self.options.output_timestep_type == 'daily': + file_name = self.options.output_name.replace('', date.strftime('%Y%m%d')) + elif self.options.output_timestep_type == 'monthly': + file_name = self.options.output_name.replace('', date.strftime('%Y%m')) + elif self.options.output_timestep_type == 'yearly': + file_name = self.options.output_name.replace('', date.strftime('%Y')) + else: + file_name = self.options.output_name + full_path = os.path.join(self.options.output_dir, file_name) + return full_path + + @staticmethod + def create_dir(path): + """ + Creates the given folder if it is not created yet. + + :param path: Path to create. + :type path: str + """ + import os + from mpi4py import MPI + icomm = MPI.COMM_WORLD + comm = icomm.Split(color=0, key=0) + rank = comm.Get_rank() + + if rank == 0: + if not os.path.exists(path): + os.makedirs(path) + + comm.Barrier() + + @staticmethod + def _parse_bool(str_bool): + """ + Parse the giving string into a boolean. + The accepted options for a True value are: 'True', 'true', 'T', 't', 'Yes', 'yes', 'Y', 'y', '1' + The accepted options for a False value are: 'False', 'false', 'F', 'f', 'No', 'no', 'N', 'n', '0' + + If the sting is not in the options it will release a WARNING and the return value will be False. + + :param str_bool: String to convert to boolean. + :return: bool + """ + true_options = ['True', 'true', 'T', 't', 'Yes', 'yes', 'Y', 'y', '1', 1, True] + false_options = ['False', 'false', 'F', 'f', 'No', 'no', 'N', 'n', '0', 0, False, None] + + if str_bool in true_options: + return True + elif str_bool in false_options: + return False + else: + print 'WARNING: Boolean value not contemplated use {0} for True values and {1} for the False ones'.format( + true_options, false_options + ) + print '/t Using False as default' + return False + + @staticmethod + def _parse_start_date(str_date): + """ + Parses the date form string to datetime. + It accepts several ways to introduce the date: + YYYYMMDD, YYYY/MM/DD, YYYYMMDDhh, YYYYYMMDD.hh, YYYY/MM/DD_hh:mm:ss, YYYY-MM-DD_hh:mm:ss, + YYYY/MM/DD hh:mm:ss, YYYY-MM-DD hh:mm:ss, YYYY/MM/DD_hh, YYYY-MM-DD_hh. + + :param str_date: Date to the day to simulate in string format. + :type str_date: str + + :return: Date to the day to simulate in datetime format. + :rtype: datetime.datetime + """ + from datetime import datetime + format_types = ['%Y%m%d', '%Y%m%d%H', '%Y%m%d.%H', '%Y/%m/%d_%H:%M:%S', '%Y-%m-%d_%H:%M:%S', + '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S', '%Y/%m/%d_%H', '%Y-%m-%d_%H', '%Y/%m/%d'] + + date = None + for date_format in format_types: + try: + date = datetime.strptime(str_date, date_format) + break + except ValueError as e: + if e.message == 'day is out of range for month': + raise ValueError(e) + + if date is None: + raise ValueError("Date format '{0}' not contemplated. Use one of this: {1}".format(str_date, format_types)) + + return date + + def _parse_end_date(self, end_date, start_date): + """ + Parses the end date. + If it's not defined it will be the same date that start_date (to do only one day). + + :param end_date: Date to the last day to simulate in string format. + :type end_date: str + + :param start_date: Date to the first day to simulate. + :type start_date: datetime.datetime + + :return: Date to the last day to simulate in datetime format. + :rtype: datetime.datetime + """ + if end_date is None: + return start_date + else: + return self._parse_start_date(end_date) + + def set_log_level(self): + """ + Defines the log_level using the common script settings. + """ + import settings + settings.define_global_vars(self.options.log_level) + + +if __name__ == '__main__': + config = Config() + print config.options diff --git a/hermesv3_gr/config/settings.py b/hermesv3_gr/config/settings.py new file mode 100644 index 0000000..789a24d --- /dev/null +++ b/hermesv3_gr/config/settings.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +import numpy as np + +global refresh_log + + +global precision +precision = np.float64 + +global writing_serial +writing_serial = False + +global compressed_netcdf +compressed_netcdf = False + +if not writing_serial: + compressed_netcdf = False + +global icomm +global comm +global rank +global size + +global log_level +global log_file +global df_times + + +def define_global_vars(in_log_level): + # TODO definir millor que significa cada log level + """ + Defines the global values of log_level. + + :param in_log_level: Level of detail of the information given on STDOUT [1, 2, 3]. + :type in_log_level: int + + :return: global values of Log Level. + :rtype: bool + """ + from mpi4py import MPI + + global icomm + global comm + global rank + global size + + icomm = MPI.COMM_WORLD + comm = icomm.Split(color=0, key=0) + rank = comm.Get_rank() + size = comm.Get_size() + + global log_level + log_level = in_log_level + + # global log_level_1 + # global log_level_2 + # global log_level_3 + # + # if log_level is 1 and rank == 0: + # log_level_1 = True + # log_level_2 = False + # log_level_3 = False + # elif log_level is 2 and rank == 0: + # log_level_1 = True + # log_level_2 = True + # log_level_3 = False + # elif log_level is 3 and rank == 0: + # log_level_1 = True + # log_level_2 = True + # log_level_3 = True + # else: + # log_level_1 = False + # log_level_2 = False + # log_level_3 = False + # + # return log_level_1, log_level_2, log_level_3 + + +def define_log_file(log_path, date): + log_path = os.path.join(log_path, 'logs') + if not os.path.exists(log_path): + os.makedirs(log_path) + + log_path = os.path.join(log_path, 'HERMESv3_{0}_Rank{1}_Procs{2}.log'.format( + date.strftime('%Y%m%d%H'), str(rank).zfill(4), str(size).zfill(4))) + if os.path.exists(log_path): + os.remove(log_path) + + global log_file + + log_file = open(log_path, mode='w') + + +def define_times_file(): + import pandas as pd + global df_times + + df_times = pd.DataFrame(columns=['Class', 'Function', rank]) + + +def write_log(msg, level=1): + if log_level >= level: + log_file.write(msg + '\n') + log_file.flush() + + +def write_time(module, func, time, level=1): + global df_times + if log_level >= level: + df_times = df_times.append({'Class': module, 'Function': func, rank: time}, ignore_index=True) + + +def finish_logs(output_dir, date): + import pandas as pd + from functools import reduce + log_file.close() + + global df_times + df_times = df_times.groupby(['Class', 'Function']).sum().reset_index() + data_frames = comm.gather(df_times, root=0) + if rank == 0: + times_path = os.path.join(output_dir, 'logs', 'HERMESv3_{0}_times_Procs{1}.csv'.format( + date.strftime('%Y%m%d%H'), str(size).zfill(4))) + if os.path.exists(times_path): + os.remove(times_path) + df_merged = reduce(lambda left, right: pd.merge(left, right, on=['Class', 'Function'], how='outer'), data_frames) + df_merged['min'] = df_merged.loc[:, range(size)].min(axis=1) + df_merged['max'] = df_merged.loc[:, range(size)].max(axis=1) + df_merged['mean'] = df_merged.loc[:, range(size)].mean(axis=1) + + df_merged.to_csv(times_path) + comm.Barrier() diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py new file mode 100755 index 0000000..2ebd719 --- /dev/null +++ b/hermesv3_gr/hermes.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import sys +import os +from mpi4py import MPI + +from timeit import default_timer as gettime + +from hermesv3_gr.config import settings +from hermesv3_gr.config.config import Config +from hermesv3_gr.modules.emision_inventories.emission_inventory import EmissionInventory +from hermesv3_gr.modules.vertical.vertical import VerticalDistribution +from hermesv3_gr.modules.temporal.temporal import TemporalDistribution +from hermesv3_gr.modules.writing.writer_cmaq import WriterCmaq + +from hermesv3_gr.tools.netcdf_tools import * +# import pyextrae.sequential as pyextrae + +global full_time + +class Hermes(object): + """ + Interface class for HERMESv3. + """ + def __init__(self, config, new_date=None): + from hermesv3_gr.modules.grids.grid import Grid + from hermesv3_gr.modules.temporal.temporal import TemporalDistribution + from hermesv3_gr.modules.writing.writer import Writer + global full_time + st_time = full_time = gettime() + + self.config = config + self.options = config.options + + # Updating starting date + if new_date is not None: + self.options.start_date = new_date + + config.set_log_level() + settings.define_log_file(self.options.output_dir, self.options.start_date) + settings.define_times_file() + + settings.write_log('Starting HERMESv3 initialization:') + + if self.options.output_model in ['CMAQ', 'WRF_CHEM'] and self.options.domain_type == 'global': + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Global domain is not aviable for {0} output model.'.format(self.options.output_model)) + sys.exit(1) + + self.levels = VerticalDistribution.get_vertical_output_profile(self.options.vertical_description) + + self.grid = Grid.select_grid( + self.options.domain_type, self.options.vertical_description, self.options.output_timestep_num, + self.options.auxiliar_files_path, self.options.inc_lat, self.options.inc_lon, self.options.centre_lat, + self.options.centre_lon, self.options.west_boundary, self.options.south_boundary, self.options.inc_rlat, + self.options.inc_rlon, self.options.lat_1, self.options.lat_2, self.options.lon_0, self.options.lat_0, + self.options.nx, self.options.ny, self.options.inc_x, self.options.inc_y, self.options.x_0, + self.options.y_0, self.options.lat_ts) + + self.emission_list = EmissionInventory.make_emission_list(self.options, self.grid, self.levels, self.options.start_date) + + self.delta_hours = TemporalDistribution.calculate_delta_hours( + self.options.start_date, self.options.output_timestep_type, self.options.output_timestep_num, + self.options.output_timestep_freq) + + self.writer = Writer.get_writer( + self.options.output_model, self.config.get_output_name(self.options.start_date), self.grid, + self.levels, self.options.start_date, self.delta_hours, self.options.output_attributes, compress=settings.compressed_netcdf, + parallel=not settings.writing_serial) + + settings.write_log('End of HERMESv3 initialization.') + settings.write_time('HERMES', 'Init', gettime() - st_time, level=1) + + # @profile + def main(self): + """ + Main functionality of the model. + """ + from multiprocessing import Process, Queue, cpu_count + from threading import Thread + import copy + import gc + import numpy as np + from datetime import timedelta + from cf_units import Unit + + st_time = gettime() + settings.write_log('') + settings.write_log('***** Starting HERMESv3 *****') + num = 1 + for ei in self.emission_list: + settings.write_log('Processing emission inventory {0} for the sector {1} ({2}/{3}):'.format( + ei.inventory_name, ei.sector, num, len(self.emission_list))) + num += 1 + + ei.do_regrid() + + if ei.vertical is not None: + settings.write_log("\tCalculating vertical distribution.", level=2) + if ei.source_type == 'area': + ei.vertical_factors = ei.vertical.calculate_weights() + elif ei.source_type == 'point': + ei.calculate_altitudes(self.options.vertical_description) + ei.point_source_by_cell() + # To avoid use point source as area source when is going to apply vertical factors while writing. + ei.vertical = None + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('Unrecognized emission source type {0}'.format(ei.source_type)) + sys.exit(1) + + if ei.temporal is not None: + ei.temporal_factors = ei.temporal.calculate_3d_temporal_factors() + if ei.speciation is not None: + ei.emissions = ei.speciation.do_speciation(ei.emissions) + + self.writer.write(self.emission_list) + + settings.write_log("***** HERMESv3 simulation finished successfully *****") + settings.write_time('HERMES', 'main', gettime() - st_time) + settings.write_time('HERMES', 'TOTAL', gettime() - full_time) + settings.finish_logs(self.options.output_dir, self.options.start_date) + + if self.options.start_date < self.options.end_date: + return self.options.start_date + timedelta(days=1) + + return None + + +if __name__ == '__main__': + date = Hermes(Config()).main() + while date is not None: + date = Hermes(Config(), new_date=date).main() + sys.exit(0) diff --git a/hermesv3_gr/modules/__init__.py b/hermesv3_gr/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/emision_inventories/__init__.py b/hermesv3_gr/modules/emision_inventories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/emision_inventories/emission_inventory.py b/hermesv3_gr/modules/emision_inventories/emission_inventory.py new file mode 100644 index 0000000..2f90120 --- /dev/null +++ b/hermesv3_gr/modules/emision_inventories/emission_inventory.py @@ -0,0 +1,522 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +from timeit import default_timer as get_time + +import hermesv3_gr.config.settings as settings +from hermesv3_gr.modules.regrid.regrid_conservative import ConservativeRegrid +from hermesv3_gr.modules.vertical.vertical import VerticalDistribution +from hermesv3_gr.modules.temporal.temporal import TemporalDistribution +from hermesv3_gr.modules.speciation.speciation import Speciation + + +class EmissionInventory(object): + """ + Class that defines the content and the methodology for the area emission inventories + + :param current_date: Date of the day to simulate. + :type current_date: datetime.datetime + + :param inventory_name: Name of the inventory to use. + :type inventory_name: str + + :param sector: Name of the sector of the inventory to use. + :type sector: str + + :param pollutants: List of the pollutant name to take into account. + :type pollutants: list of str + + :param inputs_path: Path where are stored all the datasets to use. eg: /esarchive/recon/jrc/htapv2/monthly_mean + :type inputs_path: str + + :param input_frequency: Frequency of the inputs. [yearly, monthly, daily] + :type input_frequency: str + + :param reference_year: year of reference of the information of the dataset. + :type reference_year: int + + :param factors: Description of the scale factors per country. (e.g. SPN 1.5, CHN 3.) + :type factors: str + + :param regrid_mask: Description of the masking countries (adding e.g. + SPN AND) (subtracting e.g. - SPN) + :type regrid_mask: str + + :param p_vertical: ID of the vertical profile to use. + :type p_vertical: str + + :param p_month: ID of the temporal monthly profile to use. + :type p_month: str + + :param p_day: ID of the temporal daily profile to use. + :type p_day: str + + :param p_hour: ID of the temporal hourly profile to use. + :type p_hour: str + + :param p_speciation: ID of the speciation profile to use. + :type p_speciation: str + """ + def __init__(self, options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, + input_frequency, vertical_output_profile, reference_year=2010, factors=None, regrid_mask=None, + p_vertical=None, p_month=None, p_day=None, p_hour=None, p_speciation=None): + from hermesv3_gr.modules.masking.masking import Masking + + st_time = get_time() + settings.write_log('\t\tCreating area source emission inventory.', level=3) + + # Emission Inventory parameters + self.source_type = source_type + self.date = current_date + self.inventory_name = inventory_name + self.sector = sector + self.reference_year = reference_year + self.inputs_path = inputs_path + self.input_frequency = input_frequency + self.grid = grid + + # Profiles + p_vertical = self.get_profile(p_vertical) + p_month = self.get_profile(p_month) + p_day = self.get_profile(p_day) + p_hour = self.get_profile(p_hour) + p_speciation = self.get_profile(p_speciation) + + # Creating Masking Object + # It will also create the WoldMasks necessaries + self.masking = Masking( + options.world_info, factors, regrid_mask, grid, + world_mask_file=os.path.join(os.path.dirname(options.auxiliar_files_path), + '{0}_WorldMask.nc'.format(inventory_name))) + + self.pollutant_dicts = self.create_pollutants_dicts(pollutants) + + self.masking.check_regrid_mask(self.pollutant_dicts[0]['path']) + + # Creating Regrid Object + # It will also create the WoldMasks necessaries + if self.source_type == 'area': + self.regrid = ConservativeRegrid( + self.pollutant_dicts, + os.path.join(options.auxiliar_files_path, + "Weight_Matrix_{0}_{1}.nc".format(self.inventory_name, settings.size)), + grid, masking=self.masking) + + # Creating Vertical Object + if p_vertical is not None: + self.vertical = VerticalDistribution( + self.get_profile(p_vertical), vertical_profile_path=options.p_vertical, + vertical_output_profile=vertical_output_profile) + else: + self.vertical = None + settings.write_log('\t\tNone vertical profile set.', level=2) + self.vertical_factors = None + + # Creating Temporal Object + # It will also create the necessaries timezone files + if not((p_month is None) and (p_day is None) and (p_hour is None)): + self.temporal = TemporalDistribution( + current_date, options.output_timestep_type, options.output_timestep_num, options.output_timestep_freq, + options.p_month, p_month, options.p_day, p_day, options.p_hour, p_hour, options.world_info, + options.auxiliar_files_path, grid) + else: + self.temporal = None + settings.write_log('\t\tNone temporal profile set.', level=2) + self.temporal_factors = None + + # Creating Speciation Object + if p_speciation is not None: + self.speciation = Speciation(p_speciation, options.p_speciation, options.molecular_weights) + else: + self.speciation = None + settings.write_log('\t\tNone speciation profile set.', level=2) + + self.vertical_weights = None + + self.emissions = [] + + settings.write_time('EmissionInventory', 'Init', get_time() - st_time, level=3) + + return None + + def create_pollutants_dicts(self, pollutants): + pollutant_list = [] + + for pollutant_name in pollutants: + pollutant_list.append( + {'name': pollutant_name, + 'path': self.get_input_path(pollutant_name), + 'Dataset': "{0}_{1}".format(self.inventory_name, self.sector)} + ) + return pollutant_list + + @staticmethod + def get_profile(id_aux): + """ + Parse the id of the profiles. + + :param id_aux: ID of the profile. + :type id_aux: str + + :return: ID of the profile parsed. + :rtype: str + """ + import pandas as pd + + if pd.isnull(id_aux): + return None + else: + return id_aux + + def get_input_path(self, pollutant=None): + """ + Completes the path of the input file that contains the needed information of the given pollutant. + + :param pollutant: Name of the pollutant. + :type pollutant: str + + :param extension: Extension of the input file + + :return: Full path of the needed file. + :rtype: str + """ + import pandas as pd + + if self.source_type == 'area': + extension = 'nc' + elif self.source_type == 'point': + extension = 'csv' + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Unknown source type {0}'.format(self.source_type)) + sys.exit(1) + + # Finding upper folder + if pd.isnull(self.sector): + upper_folder = '{0}'.format(pollutant) + else: + upper_folder = '{0}_{1}'.format(pollutant, self.sector) + + # Finding pollutant folder and filename. + if self.input_frequency == 'yearly': + file_name = "{0}_{1}.{2}".format(pollutant, self.reference_year, extension) + elif self.input_frequency == 'monthly': + file_name = "{0}_{1}{2}.{3}".format(pollutant, self.reference_year, self.date.strftime("%m"), extension) + elif self.input_frequency == 'daily': + file_name = "{0}_{1}{2}{3}.{4}".format(pollutant, self.reference_year, self.date.strftime("%m"), + self.date.strftime("%D"), extension) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise ValueError( + "ERROR: frequency {0} not implemented. Use yearly, monthly or daily.".format(self.input_frequency)) + sys.exit(1) + + # Filename + file_path = os.path.join(self.inputs_path, upper_folder, file_name) + + # Checking input file + if not os.path.exists(file_path): + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise IOError('ERROR: File {0} not found.'.format(file_path)) + sys.exit(1) + + return file_path + + def do_regrid(self): + + st_time = get_time() + + settings.write_log("\tRegridding", level=2) + regridded_emissions = self.regrid.start_regridding() + for emission in regridded_emissions: + dict_aux = {'name': emission['name'], 'data': emission['data'], 'units': 'm'} + self.emissions.append(dict_aux) + settings.write_time('EmissionInventory', 'do_regrid', get_time() - st_time, level=2) + + @staticmethod + def make_emission_list(options, grid, vertical_output_profile, date): + """ + Extracts the information of the cross table to read all the needed emissions. + + :param options: Full list of parameters given by passing argument or in the configuration file. + :type options: Namespace + + :param date: Date to the day to simulate. + :type date: datetime.datetime + + :return: List of Emission inventories already loaded. + :rtype: list of EmissionInventory + """ + import pandas as pd + import re + from gfas_emission_inventory import GfasEmissionInventory + from point_source_emission_inventory import PointSourceEmissionInventory + + st_time = get_time() + settings.write_log('Loading emissions') + + path = options.cross_table + df = pd.read_csv(path, sep=';', index_col=False) + for column in ['ei', 'sector', 'ref_year', 'active', 'factor_mask', 'regrid_mask', 'pollutants', 'path', + 'frequency', 'source_type', 'p_vertical', 'p_month', 'p_day', 'p_hour', 'p_speciation']: + df_cols = list(df.columns.values) + if column not in df_cols: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Column {0} is not in the {1} file.'.format(column, path)) + sys.exit(1) + df = df[df['active'] == 1] + num = 1 + emission_inventory_list = [] + for i, emission_inventory in df.iterrows(): + settings.write_log('\tLoading emission {0}/{1} (Inventory: {2}; Sector: {3})'.format( + num, len(df), emission_inventory.ei, emission_inventory.sector), level=1) + num += 1 + pollutants = list(map(str, re.split(', |,|; |;| ', emission_inventory.pollutants))) + + try: + # gridded temporal profile + p_month = emission_inventory.p_month.replace('', options.input_dir) + except AttributeError: + p_month = emission_inventory.p_month + + emission_inventory_path = emission_inventory.path.replace('', options.data_path) + emission_inventory_path = emission_inventory_path.replace('', options.input_dir) + + if emission_inventory.source_type == 'area': + if emission_inventory.ei == 'GFASv12': + emission_inventory_list.append( + GfasEmissionInventory(options, grid, date, emission_inventory.ei, + emission_inventory.source_type, emission_inventory.sector, pollutants, + emission_inventory_path, + emission_inventory.frequency, vertical_output_profile, + reference_year=emission_inventory.ref_year, + factors=emission_inventory.factor_mask, + regrid_mask=emission_inventory.regrid_mask, + p_vertical=emission_inventory.p_vertical, + p_month=p_month, + p_day=emission_inventory.p_day, + p_hour=emission_inventory.p_hour, + p_speciation=emission_inventory.p_speciation)) + else: + emission_inventory_list.append( + EmissionInventory(options, grid, date, emission_inventory.ei, emission_inventory.source_type, + emission_inventory.sector, pollutants, + emission_inventory_path, + emission_inventory.frequency, vertical_output_profile, + reference_year=emission_inventory.ref_year, + factors=emission_inventory.factor_mask, + regrid_mask=emission_inventory.regrid_mask, + p_vertical=emission_inventory.p_vertical, + p_month=p_month, + p_day=emission_inventory.p_day, + p_hour=emission_inventory.p_hour, + p_speciation=emission_inventory.p_speciation)) + elif emission_inventory.source_type == 'point': + emission_inventory_list.append( + PointSourceEmissionInventory(options, grid, date, emission_inventory.ei, + emission_inventory.source_type, emission_inventory.sector, pollutants, + emission_inventory_path, + emission_inventory.frequency, vertical_output_profile, + reference_year=emission_inventory.ref_year, + factors=emission_inventory.factor_mask, + regrid_mask=emission_inventory.regrid_mask, + p_vertical=emission_inventory.p_vertical, + p_month=p_month, + p_day=emission_inventory.p_day, + p_hour=emission_inventory.p_hour, + p_speciation=emission_inventory.p_speciation)) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise ValueError("ERROR: The emission inventory source type '{0}'".format(emission_inventory.source_type) + + " is not implemented. Use 'area' or 'point'") + sys.exit(1) + settings.write_log('', level=2) + settings.write_time('EmissionInventory', 'make_emission_list', get_time() - st_time, level=3) + + return emission_inventory_list + + # @staticmethod + # def create_output_shape(vertical_description_path, timestep_num, grid): + # """ + # Create a shape with the output format + # + # :param vertical_description_path: path to the file that contain the vertical description of the required output + # file. + # :type vertical_description_path: str + # + # :param timestep_num: Number of time steps. + # :type timestep_num: int + # + # :param grid: 3D Grid (time, latitude, longitude) of the output file. + # :type grid: Grid + # + # :return: Shape of the output file (4D: time, level, latitude, longitude). + # :rtype: tuple + # """ + # from hermesv3_gr.modules.vertical.vertical import VerticalDistribution + # + # if settings.log_level_3: + # st_time = get_time() + # else: + # st_time = None + # if vertical_description_path is 1: + # levels = [0] + # else: + # levels = VerticalDistribution.get_vertical_output_profile(vertical_description_path) + # if grid.grid_type == 'lcc': + # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) + # elif grid.grid_type == 'rotated': + # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) + # else: + # shape = (timestep_num, len(levels), len(grid.center_latitudes), len(grid.center_longitudes)) + # + # if settings.log_level_3: + # print "TIME -> EmissionInventory.create_output_shape: {0} s".format(round(get_time() - st_time, 2)) + # + # return shape + # + # @staticmethod + # def create_output_shape_parallel(vertical_description_path, timestep_num, grid): + # """ + # Create a shape with the output format + # + # :param vertical_description_path: path to the file that contain the vertical description of the required output + # file. + # :type vertical_description_path: str + # + # :param timestep_num: Number of time steps. + # :type timestep_num: int + # + # :param grid: 3D Grid (time, latitude, longitude) of the output file. + # :type grid: Grid + # + # :return: Shape of the output file (4D: time, level, latitude, longitude). + # :rtype: tuple + # """ + # from hermesv3_gr.modules.vertical.vertical import VerticalDistribution + # + # if settings.log_level_3: + # st_time = get_time() + # else: + # st_time = None + # if vertical_description_path is 1: + # levels = [0] + # else: + # levels = VerticalDistribution.get_vertical_output_profile(vertical_description_path) + # if grid.grid_type == 'lcc': + # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) + # elif grid.grid_type == 'rotated': + # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) + # else: + # shape = (timestep_num, len(levels), grid.x_upper_bound - grid.x_lower_bound, grid.y_upper_bound - grid.y_lower_bound) + # + # if settings.log_level_3: + # print "TIME -> EmissionInventory.create_output_shape: {0} s".format(round(get_time() - st_time, 2)) + # + # return shape + + @staticmethod + def create_aux_output_emission_list(speciation_profile_path): + """ + Creates the list of output pollutants + + :param speciation_profile_path: Path to the file that contains all the speciation profiles. + :type speciation_profile_path: str + + :param shape: Shape of the output. + :type shape: tuple + + :return: Empty list of the output pollutants. + """ + # print 'OUT_PROFILE_SHAPE', shape + import pandas as pd + import numpy as np + + if settings.log_level_3: + st_time = get_time() + else: + st_time = None + + output_emission_list = [] + + df_speciation = pd.read_csv(speciation_profile_path, sep=';', nrows=2) + del df_speciation['ID'] + for column in df_speciation: + output_emission_list.append({ + 'name': column, + 'units': df_speciation[column][0], + 'long_name': df_speciation[column][1], + }) + + if settings.log_level_3: + print "TIME -> EmissionInventory.create_aux_output_emission_list: {0} s".format( + round(get_time() - st_time, 2)) + + return output_emission_list + + @staticmethod + def create_aux_output_emission_list_full(speciation_profile_path, shape): + """ + Creates the list of output pollutants + + :param speciation_profile_path: Path to the file that contains all the speciation profiles. + :type speciation_profile_path: str + + :param shape: Shape of the output. + :type shape: tuple + + :return: Empty list of the output pollutants. + """ + # print 'OUT_PROFILE_SHAPE', shape + import pandas as pd + import numpy as np + + if settings.log_level_3: + st_time = get_time() + else: + st_time = None + + output_emission_list = [] + + df_speciation = pd.read_csv(speciation_profile_path, sep=';', nrows=2) + del df_speciation['ID'] + for column in df_speciation: + output_emission_list.append({ + 'name': column, + 'units': df_speciation[column][0], + 'long_name': df_speciation[column][1], + 'data': np.zeros(shape) + # 'data': np.empty(shape) + }) + + if settings.log_level_3: + print "TIME -> EmissionInventory.create_aux_output_emission_list: {0} s".format( + round(get_time() - st_time, 2)) + + return output_emission_list + + +if __name__ == "__main__": + pass diff --git a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py new file mode 100755 index 0000000..63243b1 --- /dev/null +++ b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +from emission_inventory import EmissionInventory + + +class GfasEmissionInventory(EmissionInventory): + """ + Class that defines the content and the methodology for the GFAS emission inventories + + :param current_date: Date of the day to simulate. + :type current_date: datetime.datetime + + :param inventory_name: Name of the inventory to use. + :type inventory_name: str + + :param sector: Name of the sector of the inventory to use. + :type sector: str + + :param pollutants: List of the pollutant name to take into account. + :type pollutants: list of str + + :param frequency: Frequency of the inputs. [yearly, monthly, daily] + :type frequency: str + + :param reference_year: year of reference of the information of the dataset. + :type reference_year: int + + :param factors: NOT IMPLEMENTED YET + :type factors: NOT IMPLEMENTED YET + + :param regrid_mask: NOT IMPLEMENTED YET + :type regrid_mask: NOT IMPLEMENTED YET + + :param p_vertical: ID of the vertical profile to use. + :type p_vertical: str + + :param p_month: ID of the temporal monthly profile to use. + :type p_month: str + + :param p_day: ID of the temporal daily profile to use. + :type p_day: str + + :param p_hour: ID of the temporal hourly profile to use. + :type p_hour: str + + :param p_speciation: ID of the speciation profile to use. + :type p_speciation: str + """ + + def __init__(self, options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, + frequency, vertical_output_profile, + reference_year=2010, factors=None, regrid_mask=None, p_vertical=None, p_month=None, p_day=None, + p_hour=None, p_speciation=None): + from hermesv3_gr.modules.vertical.vertical_gfas import GfasVerticalDistribution + + st_time = gettime() + settings.write_log('\t\tCreating GFAS emission inventory.', level=3) + + super(GfasEmissionInventory, self).__init__( + options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, frequency, + vertical_output_profile, + reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, p_vertical=None, + p_month=p_month, p_day=p_day, p_hour=p_hour, p_speciation=p_speciation) + + self.approach = self.get_approach(p_vertical) + self.method = self.get_method(p_vertical) + + # self.altitude = self.get_altitude() + + self.vertical = GfasVerticalDistribution(vertical_output_profile, self.approach, self.get_altitude()) + + settings.write_time('GFAS_EmissionInventory', 'Init', gettime() - st_time, level=3) + + return None + + def get_input_path(self, pollutant=None, extension='nc'): + """ + Completes the path of the NetCDF that contains the needed information of the given pollutant. + + :param pollutant: Name of the pollutant of the NetCDF. + :type pollutant: str + + :return: Full path of the needed NetCDF. + :rtype: str + """ + st_time = gettime() + + netcdf_path = os.path.join(self.inputs_path, 'multivar', 'ga_{0}.{1}'.format( + self.date.strftime('%Y%m%d'), extension)) + + settings.write_time('GfasEmissionInventory', 'get_input_path', gettime() - st_time, level=3) + + return netcdf_path + + def get_altitude(self): + """ + Extracts the altitude values depending on the choosen method. + + :return: Array with the alittude of each fire. + :rtype: numpy.ndarray + """ + from hermesv3_gr.tools.netcdf_tools import extract_vars + + st_time = gettime() + + if self.method == 'sovief': + alt_var = 'apt' + elif self.method == 'prm': + alt_var = 'mami' + else: + alt_var = None + + print "ERROR: Only 'sovief' and 'prm' methods are accepted." + + [alt] = extract_vars(self.get_input_path(), [alt_var]) + + alt = alt['data'] + + settings.write_time('GfasEmissionInventory', 'get_altitude', gettime() - st_time, level=3) + return alt + + def get_approach(self, p_vertical): + """ + Extracts the given approach value. + + :return: Approach value + :rtype: str + """ + import re + + st_time = gettime() + + return_value = None + aux_list = re.split(', |,| , | ,', p_vertical) + for element in aux_list: + aux_value = re.split('=| =|= | = ', element) + if aux_value[0] == 'approach': + return_value = aux_value[1] + + settings.write_time('GfasEmissionInventory', 'get_approach', gettime() - st_time, level=3) + + return return_value + + def get_method(self, p_vertical): + """ + Extracts the given method value. + + :return: Method value + :rtype: str + """ + import re + + st_time = gettime() + + return_value = None + aux_list = re.split(', |,| , | ,', p_vertical) + for element in aux_list: + aux_value = re.split('=| =|= | = ', element) + if aux_value[0] == 'method': + return_value = aux_value[1] + + settings.write_time('GfasEmissionInventory', 'get_method', gettime() - st_time, level=3) + + return return_value + + def do_vertical_allocation(self, values): + """ + Allocates the fire emissions on their top level. + + :param values: 2D array with the fire emissions + :type values: numpy.array + + :return: Emissions already allocated on the top altitude of each fire. + :rtype: numpy.array + """ + st_time = gettime() + + return_value = self.vertical.do_vertical_interpolation_allocation(values, self.altitude) + + settings.write_time('GfasEmissionInventory', 'do_vertical_allocation', gettime() - st_time, level=3) + + return return_value + + def do_regrid(self): + + st_time = gettime() + settings.write_log("\tRegridding", level=2) + + for i in xrange(len(self.emissions)): + self.emissions[i]["data"] = self.do_vertical_allocation(self.emissions[i]["data"]) + + regridded_emissions = self.regrid.start_regridding(gfas=True, vertical=self.vertical) + + for emission in regridded_emissions: + dict_aux = {'name': emission['name'], 'data': emission['data'], 'units': 'm'} + # dict_aux['data'] = dict_aux['data'].reshape((1,) + dict_aux['data'].shape) + self.emissions.append(dict_aux) + self.vertical = None + + settings.write_time('GfasEmissionInventory', 'do_regrid', gettime() - st_time, level=2) + + # def do_vertical(self, values): + # """ + # Distribute the emissions on the top layer into their low layers depending on the method + # + # :param values: Emissions to distribute. + # :type values: numpy.ndarray + # + # :return: Emission already distributed. + # :rtype: numpy.ndarray + # """ + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # if settings.log_level_1: + # print "\tVertical interpolation (Inventory: {0}; Sector: {1})".format(self.inventory_name, self.sector) + # + # return_value = self.vertical.do_vertical_interpolation(values) + # + # if settings.log_level_3: + # print "TIME -> EmissionInventory.do_vertical: {0} s".format(round(gettime() - st_time, 2)) + # + # return return_value + # + # def process_emissions(self, auxiliary_files_path, grid_type, vertical_description_path, vertical_profile_path, + # p_month, p_day, p_hour, date, timestep_type, timestep_num, timestep_freq, world_info, + # speciation_profile_path, molecular_weights_path, fluxes=False, + # inc_lat=None, inc_lon=None, + # new_pole_longitude_degrees=None, new_pole_latitude_degrees=None, centre_lat=None, centre_lon=None, + # west_boundary=None, south_boundary=None, inc_rlat=None, inc_rlon=None, + # lat_1=None, lat_2=None, lon_0=None, lat_0=None, nx=None, ny=None, inc_x=None, inc_y=None, + # x_0=None, y_0=None, + # is_nmmb=False): + # """ + # Manages all the process to get the desired output. + # + # :param auxiliary_files_path: Path to the folder where will be stored the weight matrix files if them are not created yet. + # :type auxiliary_files_path: str + # + # :param grid_type: Type of the destination grid + # :type grid_type: str + # + # :param inc_lat: Resolution of the latitude coordinates. + # :type inc_lat: float + # + # :param inc_lon: Resolution of the longitude coordinates. + # :type inc_lon: float + # + # :param vertical_profile_path: Path to the file that contains all the vertical profiles. + # :type vertical_profile_path: str + # + # :param vertical_description_path: path to the file that contain the vertical description of the required output + # file. + # :type vertical_description_path: str + # + # :param speciation_profile_path: Path to the file that contains all the speciation profiles. + # :type speciation_profile_path: str + # + # :param molecular_weights_path: Path to the file that contains all the needed molecular weights. + # :type molecular_weights_path: str + # + # :param is_nmmb: Indicates if the required ouptut is for NMMB-MONARCH + # :type is_nmmb: bool + # """ + # from hermesv3_gr.modules.vertical.vertical_gfas import GfasVerticalDistribution + # from hermesv3_gr.modules.regrid.regrid_conservative import ConservativeRegrid + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # self.emissions = self.read_emissions() + # self.masking.check_regrid_mask(self.get_input_path(pollutant=self.pollutant_dicts[0])) + # + # self.vertical = GfasVerticalDistribution(vertical_description_path, self.approach) + # + # weight_matrix_file = os.path.join(auxiliary_files_path, "Weight_Matrix_{0}.nc".format(self.inventory_name)) + # self.regrid = ConservativeRegrid(self.get_input_path(), weight_matrix_file, grid_type, auxiliary_files_path, + # masking=self.masking, inc_lat=inc_lat, inc_lon=inc_lon, + # centre_lat=centre_lat, + # centre_lon=centre_lon, west_boundary=west_boundary, + # south_boundary=south_boundary, inc_rlat=inc_rlat, inc_rlon=inc_rlon, + # lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, lat_0=lat_0, nx=nx, ny=ny, inc_x=inc_x, + # inc_y=inc_y, x_0=x_0, y_0=y_0, + # is_nmmb=is_nmmb) + # + # for i in xrange(len(self.emissions)): + # self.emissions[i]["data"] = self.do_vertical_allocation(self.emissions[i]["data"]) + # self.emissions[i]["data"] = self.do_regrid(self.emissions[i]["data"]) + # self.emissions[i]["data"] = self.do_vertical(self.emissions[i]["data"]) + # self.do_temporal(date, timestep_type, timestep_num, timestep_freq, p_month, p_day, p_hour, world_info, + # auxiliary_files_path, self.regrid.grid) + # self.do_speciation(speciation_profile_path, molecular_weights_path, self.regrid.grid.cell_area, fluxes) + # if is_nmmb and self.regrid.grid.grid_type == 'global': + # self.unify_borders() + # self.add_ghost_cell() + # + # if settings.log_level_3: + # print "TIME -> EmissionInventory.process_emissions: {0} s".format(round(gettime() - st_time, 2)) + +if __name__ == "__main__": + pass diff --git a/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py new file mode 100755 index 0000000..f9098a4 --- /dev/null +++ b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +from emission_inventory import EmissionInventory + + +class PointSourceEmissionInventory(EmissionInventory): + """ + Class that defines the content and the methodology for the Point Source emission inventories + + :param options: Place where are stored all the arguments. + :type options: namespace + + :param grid: Destination grid object. + :type grid: Grid + + :param current_date: Date of the day to simulate. + :type current_date: datetime.datetime + + :param inventory_name: Name of the inventory to use. + :type inventory_name: str + + :param sector: Name of the sector of the inventory to use. + :type sector: str + + :param pollutants: List of the pollutant name to take into account. + :type pollutants: list of str + + :param path: Path where are stored all the datasets to use. eg: /esarchive/recon/jrc/htapv2/monthly_mean + :type path: str + + :param frequency: Frequency of the inputs. [yearly, monthly, daily] + :type frequency: str + + :param reference_year: year of reference of the information of the dataset. + :type reference_year: int + + :param factors: Description of the scale factors per country. (e.g. Spain 1.5, China 3.) + :type factors: str + + :param regrid_mask: Description of the masking countries (adding e.g. + Spain Andorra) (subtracting e.g. - Spain) + :type regrid_mask: str + + :param p_vertical: ID of the vertical profile to use. + :type p_vertical: str + + :param p_month: ID of the temporal monthly profile to use. + :type p_month: str + + :param p_day: ID of the temporal daily profile to use. + :type p_day: str + + :param p_hour: ID of the temporal hourly profile to use. + :type p_hour: str + + :param p_speciation: ID of the speciation profile to use. + :type p_speciation: str + """ + + def __init__(self, options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, + frequency, vertical_output_profile, reference_year=2010, factors=None, regrid_mask=None, + p_vertical=None, p_month=None, p_day=None, p_hour=None, p_speciation=None): + + st_time = gettime() + settings.write_log('\t\tCreating point source emission inventory.', level=3) + + super(PointSourceEmissionInventory, self).__init__( + options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, frequency, + vertical_output_profile, reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, + p_vertical=p_vertical, p_month=p_month, p_day=p_day, p_hour=p_hour, p_speciation=p_speciation) + + self.crs = {'init': 'epsg:4326'} + self.location = None + self.area = None + self.vertical = 'custom' + + settings.write_time('PointSourceEmissionInventory', 'Init', gettime() - st_time, level=3) + + return None + + def do_regrid(self): + """ + Allocates the point source emission on the correspondent cell (getting the ID of the cell). + + :return: True when everything is correct. + :rtype: bool + """ + import pandas as pd + import geopandas as gpd + from shapely.geometry import Point + + st_time = gettime() + settings.write_log("\tAllocating point sources on grid:", level=2) + + num = 1 + for pollutant in self.pollutant_dicts: + if self.location is None: + grid_shape = self.grid.to_shapefile(full_grid=False) + + settings.write_log('\t\tPollutant {0} ({1}/{2})'.format( + pollutant['name'], num, len(self.pollutant_dicts)), level=3) + num += 1 + + df = pd.read_csv(pollutant['path']) + + geometry = [Point(xy) for xy in zip(df.Lon, df.Lat)] + df = gpd.GeoDataFrame(df.loc[:, ['Emis', 'Alt_Injection']], crs=self.crs, geometry=geometry) + + df = df.to_crs(grid_shape.crs) + df = gpd.sjoin(df, grid_shape, how="inner", op='intersects') + + # Drops duplicates when the point source is on the boundary of the cell + df = df[~df.index.duplicated(keep='first')] + + if self.location is None: + self.location = df.loc[:, ['Alt_Injection', 'FID']] + self.area = self.grid.cell_area.flatten()[self.location['FID'].values] + + dict_aux = { + 'name': pollutant['name'], + 'units': '...', + 'data': df.loc[:, 'Emis'].values / self.area + } + + self.emissions.append(dict_aux) + settings.write_time('PointSourceEmissionInventory', 'do_regrid', gettime() - st_time, level=2) + return True + + def calculate_altitudes(self, vertical_description_path): + """ + Calculates the number layer to allocate the point source. + + :param vertical_description_path: Path to the file that contains the vertical description + :type vertical_description_path: str + + :return: True + :rtype: bool + """ + import pandas as pd + + st_time = gettime() + settings.write_log("\t\tCalculating vertical allocation.", level=3) + df = pd.read_csv(vertical_description_path, sep=';') + # df.sort_values(by='height_magl', ascending=False, inplace=True) + self.location['layer'] = None + + for i, line in df.iterrows(): + self.location.loc[self.location['Alt_Injection'] <= line['height_magl'], 'layer'] = line['Ilayer'] - 1 + self.location.loc[self.location['Alt_Injection'] <= line['height_magl'], 'Alt_Injection'] = None + del self.location['Alt_Injection'] + + settings.write_time('PointSourceEmissionInventory', 'calculate_altitudes', gettime() - st_time, level=2) + + return True + + def point_source_by_cell(self): + """ + Sums the different emissions that are allocated in the same cell and layer. + + :return: None + """ + import pandas as pd + import geopandas as gpd + from shapely.geometry import Point + + st_time = gettime() + + for emission in self.emissions: + + aux_df = self.location.copy() + aux_df['Emis'] = emission['data'] + aux_df = aux_df.groupby(['FID', 'layer']).sum() + aux_df.reset_index(inplace=True) + emission['data'] = aux_df['Emis'] + + self.location = aux_df.loc[:, ['FID', 'layer']] + + settings.write_time('PointSourceEmissionInventory', 'Init', gettime() - st_time, level=3) + + return None + + +if __name__ == "__main__": + pass diff --git a/hermesv3_gr/modules/grids/__init__.py b/hermesv3_gr/modules/grids/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/grids/grid.py b/hermesv3_gr/modules/grids/grid.py new file mode 100644 index 0000000..55af335 --- /dev/null +++ b/hermesv3_gr/modules/grids/grid.py @@ -0,0 +1,556 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +from timeit import default_timer as gettime +import numpy as np +import ESMF + +import hermesv3_gr.config.settings as settings + + +class Grid(object): + """ + Grid object that contains the information of the output grid. + + :param grid_type: Type of the output grid [global, rotated, lcc, mercator]. + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + :param temporal_path: Path to the temporal folder. + :type temporal_path: str + + :param nprocs: Number of processors to slice the map. + :type nprocs: int + """ + def __init__(self, grid_type, vertical_description_path, temporal_path): + st_time = gettime() + # settings.write_log('Creating Grid...', level=1) + + # Defining class atributes + self.procs_array = None + self.nrows = 0 + self.ncols = 0 + + self.grid_type = grid_type + self.vertical_description = self.set_vertical_levels(vertical_description_path) + self.center_latitudes = None + self.center_longitudes = None + self.boundary_latitudes = None + self.boundary_longitudes = None + + self.cell_area = None + if settings.rank == 0: + if not os.path.exists(os.path.join(temporal_path)): + os.makedirs(os.path.join(temporal_path)) + settings.comm.Barrier() + + self.coords_netcdf_file = os.path.join(temporal_path, 'temporal_coords.nc') + self.temporal_path = temporal_path + self.shapefile_path = None + + self.esmf_grid = None + self.x_lower_bound = None + self.x_upper_bound = None + self.y_lower_bound = None + self.y_upper_bound = None + self.shape = None + + settings.write_time('Grid', 'Init', gettime() - st_time, level=1) + + return None + + @staticmethod + def create_esmf_grid_from_file(file_name, sphere=True): + import ESMF + + st_time = gettime() + settings.write_log('\t\tCreating ESMF grid from file {0}'.format(file_name), level=3) + + # ESMF.Manager(debug=True) + + grid = ESMF.Grid(filename=file_name, filetype=ESMF.FileFormat.GRIDSPEC, is_sphere=sphere, + add_corner_stagger=True) + + settings.write_time('Grid', 'create_esmf_grid_from_file', gettime() - st_time, level=3) + return grid + + + @staticmethod + def select_grid(grid_type, vertical_description_path, timestep_num, temporal_path, inc_lat, inc_lon, + centre_lat, centre_lon, west_boundary, south_boundary, inc_rlat, inc_rlon, + lat_1, lat_2, lon_0, lat_0, nx, ny, inc_x, inc_y, x_0, y_0, lat_ts): + # TODO describe better the rotated parameters + """ + Creates a Grid object depending on the grid type. + + :param grid_type: type of grid to create [global, rotated, lcc, mercator] + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + :param timestep_num: Number of timesteps. + :type timestep_num: int + + :param temporal_path: Path to the temporal folder. + :type temporal_path: str + + :param inc_lat: [global] Increment between latitude centroids (degrees). + :type inc_lat: float + + :param inc_lon: [global] Increment between longitude centroids (degrees). + :type inc_lon: float + + :param centre_lat: [rotated] + :type centre_lat: float + + :param centre_lon: [rotated] + :type centre_lon: float + + :param west_boundary: [rotated] + :type west_boundary: float + + :param south_boundary: [rotated] + :type south_boundary: float + + :param inc_rlat: [rotated] Increment between rotated latitude centroids (degrees). + :type inc_rlat: float + + :param inc_rlon: [rotated] Increment between rotated longitude centroids (degrees). + :type inc_rlon: float + + :param lat_ts: [mercator] + :type lat_ts: float + + :param lat_1: [lcc] Value of the Lat1 for the LCC grid type. + :type lat_1: float + + :param lat_2: [lcc] Value of the Lat2 for the LCC grid type. + :type lat_2: float + + :param lon_0: [lcc, mercator] Value of the Lon0 for the LCC grid type. + :type lon_0: float + + :param lat_0: [lcc] Value of the Lat0 for the LCC grid type. + :type lat_0: float + + :param nx: [lcc, mercator] Number of cells on the x dimension. + :type nx: int + + :param ny: [lcc, mercator] Number of cells on the y dimension. + :type ny: int + + :param inc_x: [lcc, mercator] Increment between x dimensions cell centroids (metres). + :type inc_x: int + + :param inc_y: [lcc, mercator] Increment between y dimensions cell centroids (metres). + :type inc_y: int + + :param x_0: [lcc, mercator] Value of the X0 for the LCC grid type. + :type x_0: float + + :param y_0: [lcc, mercator] Value of the Y0 for the LCC grid type. + :type y_0: float + + :return: Grid object. It will return a GlobalGrid, RotatedGrid or LccGrid depending on the type. + :rtype: Grid + """ + + st_time = gettime() + settings.write_log('Selecting grid', level=1) + + # Creating a different object depending on the grid type + if grid_type == 'global': + from hermesv3_gr.modules.grids.grid_global import GlobalGrid + grid = GlobalGrid(grid_type, vertical_description_path, timestep_num, temporal_path, inc_lat, inc_lon) + + elif grid_type == 'rotated': + from hermesv3_gr.modules.grids.grid_rotated import RotatedGrid + grid = RotatedGrid(grid_type, vertical_description_path, timestep_num, temporal_path, + centre_lat, centre_lon, west_boundary, south_boundary, inc_rlat, inc_rlon) + + elif grid_type == 'lcc': + from hermesv3_gr.modules.grids.grid_lcc import LccGrid + grid = LccGrid(grid_type, vertical_description_path, timestep_num, temporal_path, lat_1, lat_2, lon_0, + lat_0, nx, ny, inc_x, inc_y, x_0, y_0) + + elif grid_type == 'mercator': + from hermesv3_gr.modules.grids.grid_mercator import MercatorGrid + grid = MercatorGrid(grid_type, vertical_description_path, timestep_num, temporal_path, lat_ts, lon_0, + nx, ny, inc_x, inc_y, x_0, y_0) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise NotImplementedError("The grid type {0} is not implemented.".format(grid_type) + + " Use 'global', 'rotated' or 'lcc'.") + sys.exit(1) + + settings.write_time('Grid', 'select_grid', gettime() - st_time, level=3) + + return grid + + @staticmethod + def set_vertical_levels(vertical_description_path): + """ + Extracts the vertical levels. + + :param vertical_description_path: path to the file that contain the vertical description of the required output + file. + :type vertical_description_path: str + + :return: Vertical levels. + :rtype: list of int + """ + import pandas as pd + + st_time = gettime() + settings.write_log('\t\tSetting vertical levels', level=3) + + df = pd.read_csv(vertical_description_path, sep=';') + + heights = df.height_magl.values + + settings.write_time('Grid', 'set_vertical_levels', gettime() - st_time, level=3) + + return heights + + def write_coords_netcdf(self): + """ + Writes the temporal file with the coordinates of the output needed to generate the weight matrix. + If it is already well created it will only add the cell_area parameter. + """ + # TODO Not to write two NetCDF. Open one and modify it. + from hermesv3_gr.tools.netcdf_tools import write_netcdf + + st_time = gettime() + settings.write_log('\twrite_coords_netcdf', level=3) + + if not self.chech_coords_file(): + # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + RegularLatLon=True) + + # Calculates the cell area of the auxiliary NetCDF file + self.cell_area = self.get_cell_area() + + # Re-writes the NetCDF adding the cell area + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + cell_area=self.cell_area, boundary_latitudes=self.boundary_latitudes, + boundary_longitudes=self.boundary_longitudes, RegularLatLon=True) + else: + self.cell_area = self.get_cell_area() + + settings.write_time('Grid', 'write_coords_netcdf', gettime() - st_time, level=3) + + def get_cell_area(self): + """ + Calculates the cell area of the grid. + + :return: Area of each cell of the grid. + :rtype: numpy.array + """ + from cdo import Cdo + from netCDF4 import Dataset + + st_time = gettime() + settings.write_log('\t\tGetting cell area from {0}'.format(self.coords_netcdf_file), level=3) + + # Initialises the CDO + cdo = Cdo() + # Creates a temporal file 's' with the cell area + s = cdo.gridarea(input=self.coords_netcdf_file) + # Get the cell area of the temporal file + nc_aux = Dataset(s, mode='r') + cell_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + settings.write_time('Grid', 'get_cell_area', gettime() - st_time, level=3) + + return cell_area + + @staticmethod + def create_regular_grid_1d_array(center, inc, boundary): + """ + Creates a regular grid giving the center, boundary and increment. + + :param center: Center of the coordinates. + :type center: float + + :param inc: Resolution: Increment between cells. + :type inc: float + + :param boundary: Limit of the coordinates: Distance between the first cell and the center. + :type boundary: float + + :return: 1D array with the coordinates. + :rtype: numpy.array + """ + + st_time = gettime() + + # Calculates first center point. + origin = center - abs(boundary) + # Calculates the quantity of cells. + n = (abs(boundary) / inc) * 2 + # Calculates all the values + values = np.arange(origin + inc, origin + (n * inc) - inc + inc / 2, inc, dtype=np.float) + + settings.write_time('Grid', 'create_regular_grid_1d_array', gettime() - st_time, level=3) + + return values + + @staticmethod + def create_bounds(coords, inc, number_vertices=2, inverse=False): + """ + Calculates the vertices coordinates. + + :param coords: Coordinates in degrees (latitude or longitude) + :type coords: numpy.array + + :param inc: Increment between center values. + :type inc: float + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the + boundaries (by default 2). + :type number_vertices: int + + :param inverse: For some grid latitudes. + :type inverse: bool + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.array + """ + st_time = gettime() + settings.write_log('\t\t\tCreating boundaries.', level=3) + + # Creates new arrays moving the centers half increment less and more. + coords_left = coords - inc / 2 + coords_right = coords + inc / 2 + + # Defining the number of corners needed. 2 to regular grids and 4 for irregular ones. + if number_vertices == 2: + # Creates an array of N arrays of 2 elements to store the floor and the ceil values for each cell + bound_coords = np.dstack((coords_left, coords_right)) + bound_coords = bound_coords.reshape((len(coords), number_vertices)) + elif number_vertices == 4: + # Creates an array of N arrays of 4 elements to store the corner values for each cell + # It can be stored in clockwise starting form the left-top element, or in inverse mode. + if inverse: + bound_coords = np.dstack((coords_left, coords_left, coords_right, coords_right)) + + else: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + if settings.rank == 0: + raise ValueError('ERROR: The number of vertices of the boundaries must be 2 or 4.') + settings.write_log('ERROR: Check the .err file to get more info.') + sys.exit(1) + + settings.write_time('Grid', 'create_bounds', gettime() - st_time, level=3) + + return bound_coords + + def get_coordinates_2d(self): + """ + Returns the coordinates but in a 2D format. + + A regular grid only needs two 1D arrays (latitudes and longitudes) to define a grid. + This method is to convert this two 1D arrays into 2D arrays replicating the info of each value. + + :return: Tuple with 2 fields, the first the 2D latitude coordinate, and the second for the 2D longitude + coordinate. + :rtype: tuple + """ + st_time = gettime() + settings.write_log('\t\tGetting 2D coordinates from ESMPy Grid', level=3) + + lat = self.esmf_grid.get_coords(1, ESMF.StaggerLoc.CENTER).T + lon = self.esmf_grid.get_coords(0, ESMF.StaggerLoc.CENTER).T + + settings.write_time('Grid', 'get_coordinates_2d', gettime() - st_time, level=3) + + return lat, lon + + def is_shapefile(self): + return os.path.exists(self.shapefile_path) + + def to_shapefile(self, full_grid=True): + import geopandas as gpd + import pandas as pd + import numpy as np + import pyproj + from shapely.geometry import Polygon, Point + + st_time = gettime() + # settings.write_log('\t\tGetting grid shapefile', level=3) + + if full_grid: + self.shapefile_path = os.path.join(self.temporal_path, 'shapefile') + else: + self.shapefile_path = os.path.join(self.temporal_path, 'shapefiles_n{0}'.format(settings.size)) + + if settings.rank == 0: + if not os.path.exists(self.shapefile_path): + os.makedirs(self.shapefile_path) + if full_grid: + self.shapefile_path = os.path.join(self.shapefile_path, 'grid_shapefile.shp') + else: + self.shapefile_path = os.path.join(self.shapefile_path, 'grid_shapefile_{0}.shp'.format(settings.rank)) + + done = self.is_shapefile() + + if not done: + settings.write_log('\t\tGrid shapefile not done. Lets try to create it.', level=3) + # Create Shapefile + + # Use the meters coordiantes to create the shapefile + + y = self.boundary_latitudes + x = self.boundary_longitudes + # sys.exit() + + if self.grid_type == 'global': + x = x.reshape((x.shape[1], x.shape[2])) + y = y.reshape((y.shape[1], y.shape[2])) + + + # x_aux = np.empty((x.shape[0], y.shape[0], 4)) + # x_aux[:, :, 0] = x[:, np.newaxis, 0] + # x_aux[:, :, 1] = x[:, np.newaxis, 1] + # x_aux[:, :, 2] = x[:, np.newaxis, 1] + # x_aux[:, :, 3] = x[:, np.newaxis, 0] + aux_shape = (y.shape[0], x.shape[0], 4) + x_aux = np.empty(aux_shape) + x_aux[:, :, 0] = x[np.newaxis, :, 0] + x_aux[:, :, 1] = x[np.newaxis, :, 1] + x_aux[:, :, 2] = x[np.newaxis, :, 1] + x_aux[:, :, 3] = x[np.newaxis, :, 0] + + x = x_aux + # print x + del x_aux + + # y_aux = np.empty((x.shape[0], y.shape[0], 4)) + # y_aux[:, :, 0] = y[np.newaxis, :, 0] + # y_aux[:, :, 1] = y[np.newaxis, :, 0] + # y_aux[:, :, 2] = y[np.newaxis, :, 1] + # y_aux[:, :, 3] = y[np.newaxis, :, 1] + + y_aux = np.empty(aux_shape) + y_aux[:, :, 0] = y[:, np.newaxis, 0] + y_aux[:, :, 1] = y[:, np.newaxis, 0] + y_aux[:, :, 2] = y[:, np.newaxis, 1] + y_aux[:, :, 3] = y[:, np.newaxis, 1] + + # print y_aux + y = y_aux + del y_aux + + # exit() + + if not full_grid: + y = y[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound, :] + x = x[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound, :] + + aux_b_lats = y.reshape((y.shape[0] * y.shape[1], y.shape[2])) + aux_b_lons = x.reshape((x.shape[0] * x.shape[1], x.shape[2])) + + # The regular lat-lon projection has only 2 (laterals) points for each cell instead of 4 (corners) + # if aux_b_lats.shape[1] == 2: + # aux_b = np.empty((aux_b_lats.shape[0], 4)) + # aux_b[:, 0] = aux_b_lats[:, 0] + # aux_b[:, 1] = aux_b_lats[:, 0] + # aux_b[:, 2] = aux_b_lats[:, 1] + # aux_b[:, 3] = aux_b_lats[:, 1] + # aux_b_lats = aux_b + # + # if aux_b_lons.shape[1] == 2: + # aux_b = np.empty((aux_b_lons.shape[0], 4)) + # aux_b[:, 0] = aux_b_lons[:, 0] + # aux_b[:, 1] = aux_b_lons[:, 1] + # aux_b[:, 2] = aux_b_lons[:, 1] + # aux_b[:, 3] = aux_b_lons[:, 0] + # aux_b_lons = aux_b + + # Create one dataframe with 8 columns, 4 points with two coordinates each one + df_lats = pd.DataFrame(aux_b_lats, columns=['b_lat_1', 'b_lat_2', 'b_lat_3', 'b_lat_4']) + df_lons = pd.DataFrame(aux_b_lons, columns=['b_lon_1', 'b_lon_2', 'b_lon_3', 'b_lon_4']) + df = pd.concat([df_lats, df_lons], axis=1) + + # Substituate 8 columns by 4 with the two coordinates + df['p1'] = zip(df.b_lon_1, df.b_lat_1) + del df['b_lat_1'], df['b_lon_1'] + df['p2'] = zip(df.b_lon_2, df.b_lat_2) + del df['b_lat_2'], df['b_lon_2'] + df['p3'] = zip(df.b_lon_3, df.b_lat_3) + del df['b_lat_3'], df['b_lon_3'] + df['p4'] = zip(df.b_lon_4, df.b_lat_4) + del df['b_lat_4'], df['b_lon_4'] + + # Make a list of list of tuples + # [[(point_1.1), (point_1.2), (point_1.3), (point_1.4)], + # [(point_2.1), (point_2.2), (point_2.3), (point_2.4)], ...] + list_points = df.as_matrix() + del df['p1'], df['p2'], df['p3'], df['p4'] + + + # List of polygons from the list of points + geometry = [Polygon(list(points)) for points in list_points] + # geometry = [] + # for point in list_points: + # print point + # geometry.append(Polygon(list(point))) + # print geometry[0] + # sys.exit() + # print len(geometry), len(df), + + gdf = gpd.GeoDataFrame(df, crs={'init': 'epsg:4326'}, geometry=geometry) + gdf = gdf.to_crs(self.crs) + + gdf['FID'] = gdf.index + + gdf.to_file(self.shapefile_path) + else: + settings.write_log('\t\tGrid shapefile already done. Lets try to read it.', level=3) + gdf = gpd.read_file(self.shapefile_path) + + settings.write_time('Grid', 'to_shapefile', gettime() - st_time, level=1) + + return gdf + + def chech_coords_file(self): + """ + Checks if the auxiliary coordinates file is created well. + + :return: True: if it is well created. + :rtype: bool + """ + # TODO better check by partition size + return os.path.exists(self.coords_netcdf_file) + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/grids/grid_global.py b/hermesv3_gr/modules/grids/grid_global.py new file mode 100644 index 0000000..28e563f --- /dev/null +++ b/hermesv3_gr/modules/grids/grid_global.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from mpi4py import MPI + +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +from grid import Grid + + +class GlobalGrid(Grid): + """ + Global grid object that contains all the information to do a global output. + + :param grid_type: Type of the output grid [global, rotated, lcc, mercator]. + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + :param timestep_num: Number of timesteps. + :type timestep_num: int + + :param temporal_path: Path to the temporal folder. + :type temporal_path: str + + :param inc_lat: Increment between latitude centroids. + :type inc_lat: float + + :param inc_lon: Increment between longitude centroids. + :type inc_lon: float + + :param center_longitude: Location of the longitude of the center cell. + Default = 0 + :type center_longitude: float + """ + + def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, inc_lat, inc_lon, + center_longitude=float(0)): + import ESMF + + st_time = gettime() + settings.write_log('\tCreating Global grid.', level=2) + + # Initialize the class using parent + super(GlobalGrid, self).__init__(grid_type, vertical_description_path, temporal_path) + + self.center_lat = float(0) + self.center_lon = center_longitude + self.inc_lat = inc_lat + self.inc_lon = inc_lon + + self.crs = {'init': 'epsg:4326'} + self.create_coords() + + if not os.path.exists(self.coords_netcdf_file): + if settings.rank == 0: + super(GlobalGrid, self).write_coords_netcdf() + settings.comm.Barrier() + + self.esmf_grid = super(GlobalGrid, self).create_esmf_grid_from_file(self.coords_netcdf_file) + + self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + + settings.write_time('GlobalGrid', 'Init', gettime() - st_time, level=1) + + return None + + def create_coords(self): + """ + Create the coordinates for a global domain. + """ + import numpy as np + + st_time = gettime() + settings.write_log('\t\tCreating global coordinates', level=3) + + self.center_latitudes = self.create_regular_grid_1d_array(self.center_lat, self.inc_lat, -90) + self.boundary_latitudes = self.create_bounds(self.center_latitudes, self.inc_lat) + + # ===== Longitudes ===== + self.center_longitudes = self.create_regular_grid_1d_array(self.center_lon, self.inc_lon, -180) + if len(self.center_longitudes)//2 < settings.size: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("ERROR: Maximum number of processors exceeded. " + + "It has to be less or equal than {0}.".format(len(self.center_longitudes)//2)) + sys.exit(1) + self.boundary_longitudes = self.create_bounds(self.center_longitudes, self.inc_lon) + + # Creating special cells with half cell on le left and right border + lat_origin = self.center_lat - abs(-90) + lon_origin = self.center_lon - abs(-180) + n_lat = (abs(-90) / self.inc_lat) * 2 + n_lon = (abs(-180) / self.inc_lon) * 2 + self.center_latitudes = np.concatenate([ + [lat_origin + self.inc_lat / 2 - self.inc_lat / 4], self.center_latitudes, + [lat_origin + (n_lat * self.inc_lat) - self.inc_lat / 2 + self.inc_lat / 4]]) + + self.center_longitudes = np.concatenate([ + [lon_origin + self.inc_lon / 2 - self.inc_lon / 4], self.center_longitudes, + [lon_origin + (n_lon * self.inc_lon) - self.inc_lon / 2 + self.inc_lon / 4]]) + + self.boundary_latitudes = np.concatenate([ + [[lat_origin, lat_origin + self.inc_lat / 2]], self.boundary_latitudes, + [[lat_origin + (n_lat * self.inc_lat) - self.inc_lat / 2, lat_origin + (n_lat * self.inc_lat)]]]) + + self.boundary_longitudes = np.concatenate([ + [[lon_origin, lon_origin + self.inc_lon / 2]], self.boundary_longitudes, + [[lon_origin + (n_lon * self.inc_lon) - self.inc_lon / 2, lon_origin + (n_lon * self.inc_lon)]]],) + + self.boundary_latitudes = self.boundary_latitudes.reshape((1,) + self.boundary_latitudes.shape) + self.boundary_longitudes = self.boundary_longitudes.reshape((1,) + self.boundary_longitudes.shape) + + settings.write_time('GlobalGrid', 'create_coords', gettime() - st_time, level=2) + + +if __name__ == '__main__': + pass + diff --git a/hermesv3_gr/modules/grids/grid_lcc.py b/hermesv3_gr/modules/grids/grid_lcc.py new file mode 100644 index 0000000..ca553b6 --- /dev/null +++ b/hermesv3_gr/modules/grids/grid_lcc.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from mpi4py import MPI +from timeit import default_timer as gettime + + +import hermesv3_gr.config.settings as settings +from grid import Grid + + +class LccGrid(Grid): + """ + Lambert Conformal Conic (LCC) grid object that contains all the information to do a lcc output. + + :param grid_type: Type of the output grid [global, rotated, lcc, mercator]. + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + :param timestep_num: Number of timesteps. + :type timestep_num: int + + :param temporal_path: Path to the temporal folder. + :type temporal_path: str + + :param lat_1: Value of the Lat1 for the LCC grid type. + :type lat_1: float + + :param lat_2: Value of the Lat2 for the LCC grid type. + :type lat_2: float + + :param lon_0: Value of the Lon0 for the LCC grid type. + :type lon_0: float + + :param lat_0: Value of the Lat0 for the LCC grid type. + :type lat_0: float + + :param nx: Number of cells on the x dimension. + :type nx: int + + :param ny: Number of cells on the y dimension. + :type ny: int + + :param inc_x: Increment between x dimensions cell centroids (metres). + :type inc_x: int + + :param inc_y: Increment between y dimensions cell centroids (metres). + :type inc_y: int + + :param x_0: Value of the X0 for the LCC grid type. + :type x_0: float + + :param y_0: Value of the Y0 for the LCC grid type. + :type y_0: float + + :param earth_radius: Radius of the Earth (metres). + Default = 6370000.000 + :type earth_radius: float + """ + + def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, lat_1, lat_2, lon_0, lat_0, + nx, ny, inc_x, inc_y, x_0, y_0, earth_radius=6370000.000): + import ESMF + st_time = gettime() + settings.write_log('\tCreating Lambert Conformal Conic (LCC) grid.', level=2) + + # Initialises with parent class + super(LccGrid, self).__init__(grid_type, vertical_description_path, temporal_path) + + # Setting parameters + self.lat_1 = lat_1 + self.lat_2 = lat_2 + self.lon_0 = lon_0 + self.lat_0 = lat_0 + self.nx = nx + self.ny = ny + self.inc_x = inc_x + self.inc_y = inc_y + self.x_0 = x_0 + (inc_x / 2) + self.y_0 = y_0 + (inc_y / 2) + self.earth_radius = earth_radius + + # UTM coordinates + self.x = None + self.y = None + + # Creating coordinates + self.crs = "+proj=lcc +lat_1={0} +lat_2={1} +lat_0={2} +lon_0={3} +x_0={4} +y_0={5} +datum=WGS84 +units=m".format( + self.lat_1, self.lat_2, self.lat_0, self.lon_0, 0, 0) + self.create_coords() + + if not os.path.exists(self.coords_netcdf_file): + if settings.rank == 0: + # super(LccGrid, self).write_coords_netcdf() + self.write_coords_netcdf() + settings.comm.Barrier() + + self.esmf_grid = super(LccGrid, self).create_esmf_grid_from_file(self.coords_netcdf_file, sphere=False) + # + self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + # print 'Rank {0} _3_\n'.format(settings.rank) + settings.comm.Barrier() + # print 'Rank {0} _4_\n'.format(settings.rank) + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + + settings.write_time('LccGrid', 'Init', gettime() - st_time, level=1) + + return None + + def write_coords_netcdf(self): + """ + Writes the temporal file with the coordinates of the output needed to generate the weight matrix. + If it is already well created it will only add the cell_area parameter. + """ + from hermesv3_gr.tools.netcdf_tools import write_netcdf + + st_time = gettime() + settings.write_log('\tWriting {0} file.'.format(self.coords_netcdf_file), level=3) + + if not self.chech_coords_file(): + # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + LambertConformalConic=True, lcc_x=self.x, lcc_y=self.y, + lat_1_2="{0}, {1}".format(self.lat_1, self.lat_2), lon_0=self.lon_0, lat_0=self.lat_0) + + # Calculates the cell area of the auxiliary NetCDF file + self.cell_area = self.get_cell_area() + + # Re-writes the NetCDF adding the cell area + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + cell_area=self.cell_area, + LambertConformalConic=True, lcc_x=self.x, lcc_y=self.y, + lat_1_2="{0}, {1}".format(self.lat_1, self.lat_2), lon_0=self.lon_0, lat_0=self.lat_0) + else: + self.cell_area = self.get_cell_area() + + settings.write_time('LccGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + + def create_coords(self): + """ + Create the coordinates for a lambert conformal conic domain. + """ + import numpy as np + from pyproj import Proj + + st_time = gettime() + settings.write_log('\t\tCreating lcc coordinates', level=3) + + # Creates a regular grid in metres (Two 1D arrays) + self.x = np.arange(self.x_0, self.x_0 + self.inc_x * self.nx, self.inc_x, dtype=np.float) + if len(self.x)//2 < settings.size: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("ERROR: Maximum number of processors exceeded. " + + "It has to be less or equal than {0}.".format(len(self.x)//2)) + sys.exit(1) + self.y = np.arange(self.y_0, self.y_0 + self.inc_y * self.ny, self.inc_y, dtype=np.float) + + # 1D to 2D + x = np.array([self.x] * len(self.y)) + y = np.array([self.y] * len(self.x)).T + + # Create UTM bounds + y_b = super(LccGrid, self).create_bounds(y, self.inc_y, number_vertices=4, inverse=True) + x_b = super(LccGrid, self).create_bounds(x, self.inc_x, number_vertices=4) + + # Creates the LCC projection + projection = Proj( + proj='lcc', + ellps='WGS84', + R=self.earth_radius, + lat_1=self.lat_1, + lat_2=self.lat_2, + lon_0=self.lon_0, + lat_0=self.lat_0, + to_meter=1, + x_0=0, + y_0=0, + a=self.earth_radius, + k_0=1.0) + + # UTM to LCC + self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) + self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) + + settings.write_time('LccGrid', 'create_coords', gettime() - st_time, level=2) + + +if __name__ == '__main__': + pass + diff --git a/hermesv3_gr/modules/grids/grid_mercator.py b/hermesv3_gr/modules/grids/grid_mercator.py new file mode 100644 index 0000000..d3fc6e1 --- /dev/null +++ b/hermesv3_gr/modules/grids/grid_mercator.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from mpi4py import MPI +from timeit import default_timer as gettime + + +import hermesv3_gr.config.settings as settings +from grid import Grid + + +class MercatorGrid(Grid): + """ + Mercator grid object that contains all the information to do a mercator output. + + :param grid_type: Type of the output grid [global, rotated, lcc, mercator]. + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + :param timestep_num: Number of timesteps. + :type timestep_num: int + + :param temporal_path: Path to the temporal folder. + :type temporal_path: str + + :param lat_1: Value of the Lat1 for the LCC grid type. + :type lat_1: float + + :param lat_2: Value of the Lat2 for the LCC grid type. + :type lat_2: float + + :param lon_0: Value of the Lon0 for the LCC grid type. + :type lon_0: float + + :param lat_0: Value of the Lat0 for the LCC grid type. + :type lat_0: float + + :param nx: Number of cells on the x dimension. + :type nx: int + + :param ny: Number of cells on the y dimension. + :type ny: int + + :param inc_x: Increment between x dimensions cell centroids (metres). + :type inc_x: int + + :param inc_y: Increment between y dimensions cell centroids (metres). + :type inc_y: int + + :param x_0: Value of the X0 for the LCC grid type. + :type x_0: float + + :param y_0: Value of the Y0 for the LCC grid type. + :type y_0: float + + :param earth_radius: Radius of the Earth (metres). + Default = 6370000.000 + :type earth_radius: float + """ + + def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, lat_ts, lon_0, + nx, ny, inc_x, inc_y, x_0, y_0, earth_radius=6370000.000): + import ESMF + st_time = gettime() + settings.write_log('\tCreating Mercator grid.', level=2) + + # Initialises with parent class + super(MercatorGrid, self).__init__(grid_type, vertical_description_path, temporal_path) + + # Setting parameters + self.lat_ts = lat_ts + self.lon_0 = lon_0 + self.nx = nx + self.ny = ny + self.inc_x = inc_x + self.inc_y = inc_y + self.x_0 = x_0 + (inc_x / 2) + self.y_0 = y_0 + (inc_y / 2) + self.earth_radius = earth_radius + + # UTM coordinates + self.x = None + self.y = None + + # Creating coordinates + self.crs = "+proj=merc +a={2} +b={2} +lat_ts={0} +lon_0={1}".format(self.lat_ts, self.lon_0, earth_radius) + + self.create_coords() + + if not os.path.exists(self.coords_netcdf_file): + if settings.rank == 0: + self.write_coords_netcdf() + settings.comm.Barrier() + + self.esmf_grid = super(MercatorGrid, self).create_esmf_grid_from_file(self.coords_netcdf_file, sphere=False) + # + self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + # print 'Rank {0} _3_\n'.format(settings.rank) + settings.comm.Barrier() + # print 'Rank {0} _4_\n'.format(settings.rank) + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + + settings.write_time('MercatorGrid', 'Init', gettime() - st_time, level=1) + + def write_coords_netcdf(self): + """ + Writes the temporal file with the coordinates of the output needed to generate the weight matrix. + If it is already well created it will only add the cell_area parameter. + """ + from hermesv3_gr.tools.netcdf_tools import write_netcdf + + st_time = gettime() + + if not self.chech_coords_file(): + # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + Mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) + + # Calculates the cell area of the auxiliary NetCDF file + self.cell_area = self.get_cell_area() + + # Re-writes the NetCDF adding the cell area + write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [ + {'name': 'var_aux', + 'units': '', + 'data': 0} + ], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + cell_area=self.cell_area, + Mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) + else: + self.cell_area = self.get_cell_area() + + settings.write_time('MercatorGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + + def create_coords(self): + """ + Create the coordinates for a lambert conformal conic domain. + """ + import numpy as np + from pyproj import Proj + + st_time = gettime() + + # Creates a regular grid in metres (Two 1D arrays) + self.x = np.arange(self.x_0, self.x_0 + self.inc_x * self.nx, self.inc_x, dtype=np.float) + if len(self.x)//2 < settings.size: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("ERROR: Maximum number of processors exceeded. " + + "It has to be less or equal than {0}.".format(len(self.x)//2)) + sys.exit(1) + self.y = np.arange(self.y_0, self.y_0 + self.inc_y * self.ny, self.inc_y, dtype=np.float) + + # 1D to 2D + x = np.array([self.x] * len(self.y)) + y = np.array([self.y] * len(self.x)).T + + # Create UTM bounds + y_b = super(MercatorGrid, self).create_bounds(y, self.inc_y, number_vertices=4, inverse=True) + x_b = super(MercatorGrid, self).create_bounds(x, self.inc_x, number_vertices=4) + + # Creates the LCC projection + projection = Proj(self.crs) + + # UTM to Mercator + self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) + self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) + + settings.write_time('MercatorGrid', 'create_coords', gettime() - st_time, level=3) + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/grids/grid_rotated.py b/hermesv3_gr/modules/grids/grid_rotated.py new file mode 100644 index 0000000..f945612 --- /dev/null +++ b/hermesv3_gr/modules/grids/grid_rotated.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +from timeit import default_timer as gettime +import sys +import os +from mpi4py import MPI + +import hermesv3_gr.config.settings as settings +from grid import Grid + + +class RotatedGrid(Grid): + # TODO Rotated options description + """ + :param grid_type: Type of the output grid [global, rotated, lcc, mercator]. + :type grid_type: str + + :param vertical_description_path: Path to the file that contains the vertical description. + :type vertical_description_path: str + + + :param timestep_num: Number of timesteps. + :type timestep_num: int + """ + + def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, centre_lat, centre_lon, + west_boundary, south_boundary, inc_rlat, inc_rlon): + import ESMF + + st_time = gettime() + settings.write_log('\tCreating Rotated grid.', level=2) + + # Initialises with parent class + super(RotatedGrid, self).__init__(grid_type, vertical_description_path, temporal_path) + + # Setting parameters + self.new_pole_longitude_degrees = -180 + centre_lon + self.new_pole_latitude_degrees = centre_lat # 90 - centre_lat + self.centre_lat = centre_lat + self.centre_lon = centre_lon + self.west_boundary = west_boundary # + inc_rlon #/ 2 + self.south_boundary = south_boundary # + inc_rlat #/ 2 + self.inc_rlat = inc_rlat + self.inc_rlon = inc_rlon + self.n_lat = int((abs(south_boundary) / inc_rlat) * 2 + 1) + self.n_lon = int((abs(west_boundary) / inc_rlon) * 2 + 1) + + # Rotated coordinates + self.rlat = None + self.rlon = None + + # Create coordinates + self.crs = {'init': 'epsg:4326'} + self.create_coords() + + if not os.path.exists(self.coords_netcdf_file): + if settings.rank == 0: + # super(RotatedGrid, self).write_coords_netcdf() + self.write_coords_netcdf() + settings.comm.Barrier() + + # self.write_coords_netcdf() + + self.esmf_grid = super(RotatedGrid, self).create_esmf_grid_from_file(self.coords_netcdf_file, sphere=False) + + self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + + settings.write_time('RotatedGrid', 'Init', gettime() - st_time, level=1) + + return None + + def create_coords(self): + """ + Create the coordinates for a rotated domain. + """ + from hermesv3_gr.tools.coordinates_tools import create_regular_rotated + import numpy as np + + st_time = gettime() + settings.write_log('\t\tCreating rotated coordinates.', level=3) + + # Create rotated coordinates + (self.rlat, self.rlon, br_lats_single, br_lons_single) = create_regular_rotated( + self.south_boundary, self.west_boundary, self.inc_rlat, self.inc_rlon, self.n_lat, self.n_lon) + if len(self.rlon)//2 < settings.size: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("ERROR: Maximum number of processors exceeded. " + + "It has to be less or equal than {0}.".format(len(self.rlon)//2)) + sys.exit(1) + # 1D to 2D + c_lats = np.array([self.rlat] * len(self.rlon)).T + c_lons = np.array([self.rlon] * len(self.rlat)) + + # Create rotated boundary coordinates + b_lats = super(RotatedGrid, self).create_bounds(c_lats, self.inc_rlat, number_vertices=4, inverse=True) + b_lons = super(RotatedGrid, self).create_bounds(c_lons, self.inc_rlon, number_vertices=4) + + # Rotated to Lat-Lon + self.boundary_longitudes, self.boundary_latitudes = self.rotated2latlon(b_lons, b_lats) + self.center_longitudes, self.center_latitudes = self.rotated2latlon(c_lons, c_lats) + + settings.write_time('RotatedGrid', 'create_coords', gettime() - st_time, level=2) + + def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180): + """ + Calculates the unrotated coordinates using the rotated ones. + + :param lon_deg: Rotated longitude coordinate. + :type lon_deg: numpy.array + + :param lat_deg: Rotated latitude coordinate. + :type lat_deg: numpy.array + + :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360) + :type lon_min: float + + :return: Unrotated coordinates. Longitudes, Latitudes + :rtype: tuple(numpy.array, numpy.array) + """ + import numpy as np + import math + + st_time = gettime() + settings.write_log('\t\t\tTransforming rotated coordinates to latitude, longitude coordinates.', level=3) + + # TODO Document this function + degrees_to_radians = math.pi / 180. + # radians_to_degrees = 180. / math.pi + + # Positive east to negative east + # self.new_pole_longitude_degrees -= 180 + + tph0 = self.new_pole_latitude_degrees * degrees_to_radians + tlm = lon_deg * degrees_to_radians + tph = lat_deg * degrees_to_radians + tlm0d = self.new_pole_longitude_degrees + ctph0 = np.cos(tph0) + stph0 = np.sin(tph0) + + stlm = np.sin(tlm) + ctlm = np.cos(tlm) + stph = np.sin(tph) + ctph = np.cos(tph) + + # Latitude + sph = (ctph0 * stph) + (stph0 * ctph * ctlm) + # if sph > 1.: + # sph = 1. + # if sph < -1.: + # sph = -1. + # print type(sph) + sph[sph > 1.] = 1. + sph[sph < -1.] = -1. + + aph = np.arcsin(sph) + aphd = aph / degrees_to_radians + + # Longitude + anum = ctph * stlm + denom = (ctlm * ctph - stph0 * sph) / ctph0 + relm = np.arctan2(anum, denom) - math.pi + almd = relm / degrees_to_radians + tlm0d + + # if almd < min_lon: + # almd += 360 + # elif almd > max_lon: + # almd -= 360 + almd[almd > (lon_min + 360)] -= 360 + almd[almd < lon_min] += 360 + + settings.write_time('RotatedGrid', 'rotated2latlon', gettime() - st_time, level=3) + + return almd, aphd + + def write_coords_netcdf(self): + """ + Writes the temporal file with the coordinates of the output needed to generate the weight matrix. + If it is already well created it will only add the cell_area parameter. + """ + from hermesv3_gr.modules.writing.writer import Writer + + st_time = gettime() + settings.write_log('\tWriting {0} file.'.format(self.coords_netcdf_file), level=3) + + if not self.chech_coords_file(): + # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. + Writer.write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, + north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) + + # Calculates the cell area of the auxiliary NetCDF file + self.cell_area = self.get_cell_area() + + # Re-writes the NetCDF adding the cell area + Writer.write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, + [{'name': 'var_aux', 'units': '', 'data': 0}], + boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + cell_area=self.cell_area, + Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, + north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) + else: + self.cell_area = self.get_cell_area() + + settings.write_time('RotatedGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/masking/__init__.py b/hermesv3_gr/modules/masking/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py new file mode 100644 index 0000000..d8c7d24 --- /dev/null +++ b/hermesv3_gr/modules/masking/masking.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +import os +import sys +from warnings import warn as warning + + +class Masking(object): + def __init__(self, world_info, factors_mask_values, regrid_mask_values, grid, world_mask_file=None): + from timezonefinder import TimezoneFinder + + st_time = gettime() + settings.write_log('\t\tCreating mask.', level=2) + + self.adding = None + self.world_info = world_info + self.country_codes = self.get_country_codes() + self.world_mask_file = world_mask_file + self.factors_mask_values = self.parse_factor_values(factors_mask_values) + self.regrid_mask_values = self.parse_masking_values(regrid_mask_values) + self.regrid_mask = None + self.scale_mask = None + self.tf = TimezoneFinder() + + self.grid = grid + + settings.write_time('Masking', 'Init', gettime() - st_time, level=3) + + return None + + def get_country_codes(self): + import pandas as pd + + st_time = gettime() + # settings.write_log('\t\t\tGetting country codes.', level=3) + + # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) + df = pd.read_csv(self.world_info, sep=';') + del df['time_zone'], df['time_zone_code'] + df = df.drop_duplicates().dropna() + df = df.set_index('country_code_alpha') + countries_dict = df.to_dict() + countries_dict = countries_dict['country_code'] + + settings.write_time('Masking', 'get_country_codes', gettime() - st_time, level=3) + return countries_dict + + @staticmethod + def partlst(lst, n): + import itertools + """Partition @lst in @n balanced parts, in given order""" + parts, rest = divmod(len(lst), n) + lstiter = iter(lst) + for j in xrange(n): + plen = len(lst) / n + (1 if rest > 0 else 0) + rest -= 1 + yield list(itertools.islice(lstiter, plen)) + + def create_country_iso(self, in_nc): + import numpy as np + from hermesv3_gr.tools.netcdf_tools import extract_vars + from hermesv3_gr.modules.writing.writer import Writer + + st_time = gettime() + settings.write_log('\t\t\tCreating {0} file.'.format(self.world_mask_file), level=2) + # output_path = os.path.join(output_dir, 'iso.nc') + + lat_o, lon_o = extract_vars(in_nc, ['lat', 'lon']) + lon = np.array([lon_o['data']] * len(lat_o['data'])) + lat = np.array([lat_o['data']] * len(lon_o['data'])).T + + dst_var = [] + num = 0 + points = np.array(zip(lat.flatten(), lon.flatten())) + + points_list = list(self.partlst(points, settings.size)) + + for lat_aux, lon_aux in points_list[settings.rank]: + num += 1 + + settings.write_log("\t\t\t\tlat:{0}, lon:{1} ({2}/{3})".format( + lat_aux, lon_aux, num, len(points_list[settings.rank])), level=3) + + tz = self.find_timezone(lat_aux, lon_aux) + tz_id = self.get_iso_code_from_tz(tz) + dst_var.append(tz_id) + dst_var = np.array(dst_var) + dst_var = settings.comm.gather(dst_var, root=0) + + if settings.rank == 0: + dst_var = np.concatenate(dst_var) + dst_var = dst_var.reshape((1,) + lat.shape) + data = [{ + 'name': 'timezone_id', + 'units': '', + 'data': dst_var, + }] + Writer.write_netcdf(self.world_mask_file, lat, lon, data, RegularLatLon=True) + settings.comm.Barrier() + + settings.write_time('Masking', 'create_country_iso', gettime() - st_time, level=3) + + return True + + def find_timezone(self, latitude, longitude): + + st_time = gettime() + + if longitude < -180: + longitude += 360 + elif longitude > +180: + longitude -= 360 + + tz = self.tf.timezone_at(lng=longitude, lat=latitude) + + settings.write_time('Masking', 'find_timezone', gettime() - st_time, level=3) + + return tz + + def get_iso_code_from_tz(self, tz): + import pandas as pd + + st_time = gettime() + + zero_values = [None, ] + if tz in zero_values: + return 0 + + df = pd.read_csv(self.world_info, sep=';') + code = df.country_code[df.time_zone == tz].values + + settings.write_time('Masking', 'get_iso_code_from_tz', gettime() - st_time, level=3) + + return code[0] + + def parse_factor_values(self, values): + import re + + st_time = gettime() + + if type(values) != str: + return None + values = list(map(str, re.split(' , |, | ,|,', values))) + scale_dict = {} + for element in values: + element = list(map(str, re.split(' | ', element))) + scale_dict[int(self.country_codes[element[0]])] = element[1] + + settings.write_log('\t\t\tApplying scaling factors for {0}.'.format(values), level=3) + settings.write_time('Masking', 'parse_factor_values', gettime() - st_time, level=3) + + return scale_dict + + def parse_masking_values(self, values): + """ + + :param values: + :return: + :rtype: list + """ + import re + + st_time = gettime() + + if type(values) != str: + return None + values = list(map(str, re.split(' , |, | ,|,| ', values))) + if values[0] == '+': + self.adding = True + elif values[0] == '-': + self.adding = False + else: + if len(values) > 0: + settings.write_log('WARNING: Check the .err file to get more info. Ignoring mask') + if settings.rank == 0: + warning("WARNING: The list of masking does not start with '+' or '-'. Ignoring mask.") + return None + code_list = [] + for country in values[1:]: + code_list.append(int(self.country_codes[country])) + + if self.adding: + settings.write_log("\t\t\tCreating mask to do {0} countries.".format(values[1:]), level=3) + else: + settings.write_log("\t\t\tCreating mask to avoid {0} countries.".format(values[1:]), level=3) + settings.write_time('Masking', 'parse_masking_values', gettime() - st_time, level=3) + + return code_list + + def check_regrid_mask(self, input_file): + + if self.regrid_mask_values is not None: + if not os.path.exists(self.world_mask_file): + self.create_country_iso(input_file) + self.regrid_mask = self.custom_regrid_mask() + if self.factors_mask_values is not None: + if not os.path.exists(self.world_mask_file): + self.create_country_iso(input_file) + self.scale_mask = self.custom_scale_mask() + + def custom_regrid_mask(self): + import numpy as np + from netCDF4 import Dataset + + st_time = gettime() + + netcdf = Dataset(self.world_mask_file, mode='r') + values = netcdf.variables['timezone_id'][:] + netcdf.close() + + if self.adding: + mask = np.zeros(values.shape) + for code in self.regrid_mask_values: + mask[values == code] = 1 + else: + mask = np.ones(values.shape) + for code in self.regrid_mask_values: + mask[values == code] = 0 + + settings.write_time('Masking', 'custom_regrid_mask', gettime() - st_time, level=3) + + return mask + + def custom_scale_mask(self): + import numpy as np + from hermesv3_gr.tools.netcdf_tools import extract_vars + + st_time = gettime() + + [values] = extract_vars(self.world_mask_file, ['timezone_id']) + + values = values['data'] + mask = np.ones(values.shape) + for code, factor in self.factors_mask_values.iteritems(): + mask[values == code] = factor + + settings.write_time('Masking', 'custom_scale_mask', gettime() - st_time, level=3) + + return mask + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/regrid/__init__.py b/hermesv3_gr/modules/regrid/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py new file mode 100644 index 0000000..b8c8273 --- /dev/null +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -0,0 +1,281 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +import numpy as np +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings + + +class Regrid(object): + """ + Regrid class that contains all the needed information to do the regrid. + + :param input_file: Path to the input emission file. + :type input_file: str + + :param pollutants: Pollutants available of the input file. + :type pollutants: list + + :param weight_matrix_file: Path to the weight matrix. + :type weight_matrix_file: str + + :param grid: Grid object. + :type grid: Grid + + :param masking: Masking object. + Default = None + :type masking: Masking + """ + def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): + st_time = gettime() + settings.write_log('\t\t\tInitializing Regrid.', level=3) + + self.grid = grid + self.pollutant_dicts = pollutant_dicts + self.weight_matrix_file = weight_matrix_file + self.masking = masking + + if not self.is_created_weight_matrix(erase=False): + settings.write_log("\t\t\tWeight matrix {0} is not created. ".format(weight_matrix_file) + + "Trying to create it", level=1) + settings.comm.Barrier() + self.create_weight_matrix() + + settings.write_time('Regrid', 'Init', round(gettime() - st_time), level=3) + + return None + + def create_weight_matrix(self): + """ + This function is not used because all the child classes have to implement it. + """ + pass + # implemented on inner class + + def apply_weights(self, values): + """ + Calculates the regridded values using the ESMF algorithm for a 3D array. + + :param values: Input values to regrid + :type values: numpy.array + + :return: Values already regridded. + :rtype: numpy.array + """ + from netCDF4 import Dataset + + st_time = gettime() + + # Read weight matrix + nc_weights = Dataset(self.weight_matrix_file, mode='r') + + src_indices = nc_weights.variables['src_indices'][:] + max_index = nc_weights.variables['dst_indices'][:].max() + 1 + dst_indices_counts = nc_weights.variables['dst_indices_count'][:] + weights = nc_weights.variables['weights'][:] + + nc_weights.close() + + # Do masking + if self.masking.regrid_mask is not None: + values = np.multiply(values, self.masking.regrid_mask, dtype=settings.precision) + # Do scalling + if self.masking.scale_mask is not None: + values = np.multiply(values, self.masking.scale_mask, dtype=settings.precision) + values = values.reshape(values.shape[1], values.shape[2] * values.shape[3]) + + # Expand src values + src_aux = np.take(values, src_indices, axis=1) + + # Apply weights + dst_field_aux = np.multiply(src_aux, weights, dtype=settings.precision) + + # Reduce dst values + dst_field = self.reduce_dst_field(dst_field_aux, dst_indices_counts, max_index) + + settings.write_time('Regrid', 'apply_weights', gettime() - st_time, level=3) + + return dst_field + + @staticmethod + def reduce_dst_field(dst_field_extended, dst_indices, max_index): + """ + Reduces the values of the regridded data. + eg: + dst_field_extended = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + goes to: 0 0 0 1 1 1 2 2 2 2 + + dst_indices = [3, 6, 10] + + result = [0+1+2, 3+4+5+, 6+7+8+9] + result = [3, 12, 30] + + :param dst_field_extended: Array with as many elements as interconnections between src and dst with the dst values + to be gathered. + :type dst_field_extended: numpy.array + + :param dst_indices: Array with the last element index to + :type dst_indices: numpy.array + + :param max_index: + :type max_index: int + + :return: + :rtype: numpy.array + """ + st_time = gettime() + + # Create new + dst_field = np.zeros((dst_field_extended.shape[0], max_index), dtype=settings.precision) + # dst_field = np.zeros((dst_field_extended.shape[0], self.grid.shape[-1] * self.grid.shape[-2]), dtype=settings.precision) + + previous = 0 + count = 0 + for i in dst_indices: + try: + dst_field[:, count] = dst_field_extended[:, previous:i].sum(axis=1, dtype=settings.precision) + except: + pass + count += 1 + previous = i + + settings.write_time('Regrid', 'reduce_dst_field', gettime() - st_time, level=3) + + return dst_field + + # @staticmethod + # def reduce_dst_field_test1(dst_field_aux, dst_indices_counts, max_values): + # import numexpr as ne + # import numpy as np + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # dst_indices_counts_2 = np.cumsum(dst_indices_counts) + # + # dst_field = np.zeros((dst_field_aux.shape[0], max_values), dtype=settings.precision) + # previous = 0 + # count = 0 + # a = 0 + # # print dst_indices_counts_2 + # for i in dst_indices_counts_2: + # # print count + # # dst_field[:, count] = ne.evaluate('sum(dst_field_aux[:, {0}:{1}], 0)'.format(previous, i, 0), out= a) + # dst_field[:, count] = ne.evaluate ('sum(u)', {'u': dst_field_aux[:, previous:i]}) + # + # count += 1 + # previous = i + # + # if settings.log_level_3: + # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(gettime() - st_time, 2)) + # + # return dst_field + # + # @staticmethod + # def reduce_dst_field_test2(dst_field_aux, dst_indices, max_values): + # import numpy as np + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # # print dst_field_aux[0: 10], len(dst_field_aux), dst_field_aux.shape + # # print dst_indices[0: 10], len(dst_indices), dst_indices.shape + # print '1' + # unique, counts = np.unique(dst_indices, return_counts=True) + # print '2' + # counts_2 = np.cumsum(counts) + # print '3' + # + # # print unique[0: 10], len(unique), unique.shape + # # print counts[0: 10], len(counts), counts.shape, type(counts) + # + # dst_field_aux_2 = np.array_split(dst_field_aux, counts_2, axis=1) + # print '4' + # + # print dst_field_aux_2[0: 10], len(dst_field_aux_2), dst_field_aux_2.shape + # + # exit() + # + # dst_indices_counts_2 = np.cumsum(dst_indices_counts) + # print 'max values', max_values + # print dst_indices_counts[0:10], len(dst_indices_counts) + # print dst_indices_counts_2[0:10], len(dst_indices_counts_2) + # exit() + # dst_field = np.zeros((dst_field_aux.shape[0], max_values), dtype=settings.precision) + # previous = 0 + # count = 0 + # # print dst_indices_counts_2 + # for i in dst_indices_counts_2: + # # print count + # dst_field[:, count] = dst_field_aux[:, previous:i].sum(axis=1, dtype=settings.precision) + # + # count += 1 + # previous = i + # + # if settings.log_level_3: + # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(gettime() - st_time, 2)) + # + # return dst_field + + def is_created_weight_matrix(self, erase=False): + """ + Checks if the weight matrix is created + + :return: Boolean that indicates if the weight matrix is already created. + :rtype: bool + """ + if erase and settings.rank == 0: + if os.path.exists(self.weight_matrix_file): + os.remove(self.weight_matrix_file) + + return os.path.exists(self.weight_matrix_file) + + # def start_regridding(self, values): + # """ + # Start the regridding process for the emission. + # It will create the weight matrix if it is not already created. + # + # :param values: Input values to regrid. + # :type values: numpy.ndarray + # + # :return: Values already regridded. + # :rtype: numpy.ndarray + # """ + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # values = self.apply_weights(values) + # + # if settings.log_level_3: + # print 'TIME -> Regrid.start_regridding: {0} s'.format(round(gettime() - st_time, 2)) + # + # return values + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/regrid/regrid_conservative.py b/hermesv3_gr/modules/regrid/regrid_conservative.py new file mode 100644 index 0000000..ee29353 --- /dev/null +++ b/hermesv3_gr/modules/regrid/regrid_conservative.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +import numpy as np +from timeit import default_timer as gettime +import ESMF + +import hermesv3_gr.config.settings as settings +from regrid import Regrid + +# from mpi4py import MPI +# icomm = MPI.COMM_WORLD +# comm = icomm.Split(color=0, key=0) +# nprocs = comm.Get_size() +# rank = comm.Get_rank() + + +class ConservativeRegrid(Regrid): + """ + ConservativeRegrid class that contains all the needed information to do the conservative regrid. + + :param input_file: Path to the input emission file. + :type input_file: str + + :param pollutants: Pollutants available of the input file. + :type pollutants: list + + :param weight_matrix_file: Path to the weight matrix that will be created if it is not created yet. + :type weight_matrix_file: str + + :param grid: ... + :type grid: Grid + + :param masking: ... + :type masking: Masking + """ + def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): + st_time = gettime() + settings.write_log('\t\tInitializing Conservative regrid.', level=2) + + super(ConservativeRegrid, self).__init__(pollutant_dicts, weight_matrix_file, grid, masking=masking) + + settings.write_time('ConservativeRegrid', 'Init', gettime() - st_time, level=2) + + return None + + def create_weight_matrix(self): + """ + Calls to ESMF_RegridWeightGen to generate the weight matrix. + """ + + st_time = gettime() + + src_grid = self.grid.create_esmf_grid_from_file(self.pollutant_dicts[0]['path']) + src_field = ESMF.Field(src_grid, name='my input field') + src_field.read(filename=self.pollutant_dicts[0]['path'], variable=self.pollutant_dicts[0]['name'], timeslice=0) + + dst_grid = self.grid.esmf_grid + dst_field = ESMF.Field(dst_grid, name='my outut field') + regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.CONSERVE, )#src_mask_values=self.masking.regrid_mask) + # regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.BILINEAR, )#src_mask_values=self.masking.regrid_mask) + + settings.write_time('ConservativeRegrid', 'create_weight_matrix', gettime() - st_time, level=1) + + def start_regridding(self, gfas=False, vertical=None): + from hermesv3_gr.tools.netcdf_tools import extract_vars + + st_time = gettime() + + weights = self.read_weight_matrix() + + dst_field_list = [] + num =1 + for pollutant_single_dict in self.pollutant_dicts: + settings.write_log('\t\tPollutant {0} ({1}/{2})'.format( + pollutant_single_dict['name'], num, len(self.pollutant_dicts)), level=3) + num += 1 + + [values] = extract_vars(pollutant_single_dict['path'], [pollutant_single_dict['name']]) + values = values['data'] + if gfas: + values = vertical.do_vertical_interpolation_allocation(values, vertical.altitude) + # Do masking + if self.masking.regrid_mask is not None: + values = np.multiply(values, self.masking.regrid_mask, dtype=settings.precision) + # Do scalling + if self.masking.scale_mask is not None: + values = np.multiply(values, self.masking.scale_mask, dtype=settings.precision) + if gfas: + values = values.reshape((values.shape[-3], values.shape[-2] * values.shape[-1],)) + else: + values = values.reshape((1, values.shape[-2] * values.shape[-1],)) + + unique, counts = np.unique(weights['row'], return_counts=True) + new_dst_indices = np.cumsum(counts) + + # Expand src values + src_aux = np.take(values, weights['col'], axis=1) + + # Apply weights + dst_field_aux = np.multiply(src_aux, weights['S'], dtype=settings.precision) + + # Reduce dst values + dst_field = self.reduce_dst_field(dst_field_aux, new_dst_indices, self.grid.shape[-1] * self.grid.shape[-2]) + + if gfas: + dst_field = vertical.do_vertical_interpolation(dst_field) + dst_field = dst_field.reshape((self.grid.shape[-3], self.grid.shape[-2], self.grid.shape[-1],)) + else: + dst_field = dst_field.reshape((self.grid.shape[-2], self.grid.shape[-1],)) + + dst_field_list.append({'data': dst_field, 'name': pollutant_single_dict['name']}) + + settings.write_time('ConservativeRegrid', 'start_regridding', gettime() - st_time, level=3) + return dst_field_list + + # def start_carles_regridding_1(self): + # from hermesv3_gr.tools.netcdf_tools import extract_vars + # + # weights = self.read_weight_matrix() + # print 'Weights Rank {3} col: {0} row: {1} S: {2}'.format(weights['col'].shape, weights['row'].shape, + # weights['S'].shape, settings.rank) + # + # dst_field_list = [] + # for pollutant_single_dict in self.pollutant_dicts: + # if settings.log_level_2: + # print '\t\t {0}'.format(pollutant_single_dict['name']) + # # dst_field = np.zeros(((self.grid.x_upper_bound - self.grid.x_lower_bound)*(self.grid.y_upper_bound - self.grid.y_lower_bound),)) + # dst_field = np.zeros((weights['max'],)) + # + # [values] = extract_vars(pollutant_single_dict['path'], [pollutant_single_dict['name']]) + # values = values['data'].reshape((values['data'].shape[-1]*values['data'].shape[-2],)) + # # print 'VALUES SUM {0}'.format(values.sum()) + # # print dst_field.shape, weights['row'].max() + # # print values.shape + # # exit() + # # print 'Weights Rank {0} dst_field: {1} '.format(rank, dst_field.shape) + # # if rank == 1: + # if True: + # for i in xrange(len(weights['S'])): + # try: + # dst_field[weights['row'][i] - 1] += weights['S'][i] * values[weights['col'][i] - 1] + # # print '{0}/{1} {2}%'.format(i+1, len(weights['S']), round((i+1)*100/len(weights['S'])),2) + # # print values[weights['col'][i] - 1], weights['col'][i] - 1 + # # print dst_field[weights['row'][i] - 1], weights['S'][i], values[weights['col'][i] - 1] + # except: + # print values[weights['col'][i] - 1] + # print weights['S'][i] + # print dst_field[weights['row'][i] - 1] + # dst_field = dst_field.reshape(len(self.grid.center_latitudes),len(self.grid.center_longitudes)) + # dst_field = dst_field[self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] + # + # print 'RESHAPE: {0}'.format((self.grid.x_upper_bound - self.grid.x_lower_bound, self.grid.y_upper_bound - self.grid.y_lower_bound)) + # dst_field_list.append({'data': dst_field, 'name': pollutant_single_dict['name']}) + # + # # dst_field_list.append({'data': dst_field.reshape((self.grid.x_upper_bound - self.grid.x_lower_bound, self.grid.y_upper_bound - self.grid.y_lower_bound)), 'name': pollutant_single_dict['name']}) + # # dst_field_list.append({'data': dst_field.reshape((self.grid.y_upper_bound - self.grid.y_lower_bound,self.grid.x_upper_bound - self.grid.x_lower_bound,)), 'name': pollutant_single_dict['name']}) + # return dst_field_list + + def read_weight_matrix(self): + from netCDF4 import Dataset + dict_aux = {} + nc = Dataset(self.weight_matrix_file, mode='r') + + dict_aux['col'] = nc.variables['col'][:] + dict_aux['row'] = nc.variables['row'][:] + dict_aux['S'] = nc.variables['S'][:] + nc.close() + dict_aux['max'] = dict_aux['row'].max() + + dict_aux['col'] -= 1 + dict_aux['row'] -= 1 + + if settings.size != 1: + inc = dict_aux['row'][:-1] - dict_aux['row'][1:] + index = np.where(inc > inc.max() * 0.5)[0] + index = np.concatenate([[0], index]) + + try: + if settings.rank != 0: + dict_aux['col'] = dict_aux['col'][index[settings.rank] +1: index[settings.rank + 1] + 1] + dict_aux['row'] = dict_aux['row'][index[settings.rank] +1: index[settings.rank + 1] + 1] + dict_aux['S'] = dict_aux['S'][index[settings.rank] +1: index[settings.rank + 1] + 1] + else: + dict_aux['col'] = dict_aux['col'][: index[settings.rank + 1] + 1] + dict_aux['row'] = dict_aux['row'][: index[settings.rank + 1] + 1] + dict_aux['S'] = dict_aux['S'][: index[settings.rank + 1] + 1] + except IndexError: + dict_aux['col'] = dict_aux['col'][index[settings.rank] + 1:] + dict_aux['row'] = dict_aux['row'][index[settings.rank] + 1:] + dict_aux['S'] = dict_aux['S'][index[settings.rank] + 1:] + + return dict_aux + + def wait_to_weightmatrix(self): + import time + + find = False + + while not find: + if os.path.exists(self.weight_matrix_file): + pre_size = 0 + post_size = 1 + print "I'm {0}".format(settings.rank), 'Writing Weight Matrix {0}'.format(self.weight_matrix_file) + # find = True + while pre_size != post_size: + print "I'm {0}".format(settings.rank), pre_size, post_size + pre_size = post_size + post_size = os.path.getsize(self.weight_matrix_file) + time.sleep(1) + find = True + print "I'm {0}".format(settings.rank), 'FINISHED' + else: + time.sleep(5) + print "I'm {0}".format(settings.rank), 'Waiting Weight Matrix' + + def re_order_weight_matrix(self): + """ + Takes the ESMF Weight Matrix and re-order it in our way to re-write it. + + It will re-order and re-name the variables: + + dst_indices = ordered (row) + src_indices = ordered (col) + weights = ordered (S) + + dst_indices_count: + dst_indices = [0, 0, 0, 1, 1, 1, 2, 2, 2, 2] + dst_indices_count = [ 3, 6, 10] + + """ + from netCDF4 import Dataset + import numpy as np + + if settings.log_level_3: + st_time = gettime() + + # Read ESMF Weight matrix NetCDF + nc_weights = Dataset(self.weight_matrix_file, mode='r') + + row = nc_weights.variables['row'][:] + col = nc_weights.variables['col'][:] + s = nc_weights.variables['S'][:] + + nc_weights.close() + + # Re-order and re-name values + indices = np.argsort(row) + dst_indices = np.array(row)[indices] - 1 + weights = np.array(s, dtype=settings.precision)[indices] + src_indices = np.array(col)[indices] - 1 + + # Short indices + # dst_indices = [0, 0, 0, 1, 1, 1, 2, 2, 2, 2] + # new_dst_indices = [3, 6, 10] + unique, counts = np.unique(dst_indices, return_counts=True) + new_dst_indices = np.cumsum(counts) + + # Create new weight matrix NetCDF + nc_weights = Dataset(self.weight_matrix_file, mode='w') + + nc_weights.createDimension('dim', len(indices)) + nc_weights.createDimension('dim2', new_dst_indices.shape[0]) + + nc_dst_indices = nc_weights.createVariable('dst_indices', 'i', ('dim',), zlib=True) + nc_dst_indices[:] = dst_indices + + nc_dst_indices_count = nc_weights.createVariable('dst_indices_count', 'i', ('dim2',), zlib=True) + nc_dst_indices_count[:] = new_dst_indices + + nc_src_indices = nc_weights.createVariable('src_indices', 'i', ('dim',), zlib=True) + nc_src_indices[:] = src_indices + + weights_var = nc_weights.createVariable('weights', 'd', ('dim',), zlib=True) + weights_var[:] = weights + + nc_weights.close() + + if settings.log_level_3: + print 'TIME -> ConservativeRegrid.re_order_weight_matrix: {0} s'.format(round(gettime() - st_time, 2)) + + def apply_weights_test(self, values): + """ + Calculates the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. + + :param values: Input values to regrid. + :type values: numpy.ndarray + + :return: Values already regridded. + :rtype: numpy.ndarray + """ + from netCDF4 import Dataset + if settings.log_level_3: + st_time = gettime() + dst_field = super(ConservativeRegrid, self).apply_weights(values) + + nc_weights = Dataset(self.weight_matrix_file, mode='r') + + n_b = nc_weights.dimensions['n_b'].size + frac_b = nc_weights.variables['frac_b'][:] + + nc_weights.close() + + # ! Adjust destination field by fraction + # do i=1, n_b + # if (frac_b(i) .ne. 0.0) then + # dst_field(i)=dst_field(i)/frac_b(i) + # endif + # enddo + for i in xrange(n_b): + if frac_b[i] != 0: + dst_field[:, i] = dst_field[:, i] / frac_b[i] + if settings.log_level_3: + print 'TIME -> ConservativeRegrid.apply_weights: {0} s'.format(round(gettime() - st_time, 2)) + return dst_field + + def apply_weights(self, values): + """ + Calculates the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. + + :param values: Input values to regrid. + :type values: numpy.array + + :return: Values already regridded. + :rtype: numpy.array + """ + + dst_field = super(ConservativeRegrid, self).apply_weights(values) + + return dst_field + + +if __name__ == '__main__': + pass + diff --git a/hermesv3_gr/modules/speciation/__init__.py b/hermesv3_gr/modules/speciation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/speciation/speciation.py b/hermesv3_gr/modules/speciation/speciation.py new file mode 100644 index 0000000..d145c34 --- /dev/null +++ b/hermesv3_gr/modules/speciation/speciation.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +from warnings import warn as warning + + +class Speciation(object): + """ + Speciation class that contains all the needed information to do the speciation. + + :param speciation_id: ID of the speciation profile that have to be in the speciation profile file. + :type speciation_id: str + + :param speciation_profile_path: Path to the file that contains all the speciation profiles. + :type speciation_profile_path: str + + :param molecular_weights_path: Path to the file that contains all the needed molecular weights. + :type molecular_weights_path: str + """ + def __init__(self, speciation_id, speciation_profile_path, molecular_weights_path): + st_time = gettime() + settings.write_log('\t\tInitializing Speciation.', level=2) + + self.id = speciation_id + self.speciation_profile = self.get_speciation_profile(speciation_profile_path) + self.molecular_weights_path = molecular_weights_path + self.molecular_weights = self.extract_molecular_weights(molecular_weights_path) + + settings.write_time('Speciation', 'Init', gettime() - st_time, level=2) + + return None + + def get_speciation_profile(self, speciation_profile_path): + """ + Extracts the speciation information as a dictionary with the destiny pollutant as key and the formula as value. + + :param speciation_profile_path: + :type speciation_profile_path: + + :return: List of dictionaries. Each dictionary has the keys 'name', 'formula', 'units' and 'long_name. + :rtype: list + """ + import pandas as pd + + st_time = gettime() + settings.write_log("\t\t\tGetting speciation profile id '{0}' from {1} .".format( + self.id, speciation_profile_path), level=3) + + df = pd.read_csv(speciation_profile_path, sep=';') + + try: + formulas_dict = df.loc[df[df.ID == self.id].index[0]].to_dict() + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Speciation profile ID {0} is not in the {1} file.'.format( + self.id, speciation_profile_path)) + sys.exit(1) + formulas_dict.pop('ID', None) + units_dict = df.loc[df[df.ID == 'units'].index[0]].to_dict() + + units_dict.pop('ID', None) + long_name_dict = df.loc[df[df.ID == 'short_description'].index[0]].to_dict() + long_name_dict.pop('ID', None) + profile_list = [] + for key in formulas_dict.iterkeys(): + profile_list.append({ + 'name': key, + 'formula': formulas_dict[key], + 'units': units_dict[key], + 'long_name': long_name_dict[key] + }) + + settings.write_time('Speciation', 'get_speciation_profile', gettime() - st_time, level=3) + return profile_list + + @staticmethod + def extract_molecular_weights(molecular_weights_path): + """ + Extracts the molecular weights for each pollutant as a dictionary with the name of the pollutant as key and the + molecular weight as value. + + :param molecular_weights_path: Path to the CSV that contains all the molecular weights. + :type molecular_weights_path: str + + :return: Dictionary with the name of the pollutant as key and the molecular weight as value. + :rtype: dict + """ + import pandas as pd + + st_time = gettime() + + df = pd.read_csv(molecular_weights_path, sep=';') + + dict_aux = {} + + for i, element in df.iterrows(): + dict_aux.update({element.Specie: element.MW}) + + settings.write_time('Speciation', 'extract_molecular_weights', gettime() - st_time, level=3) + + return dict_aux + + def do_speciation(self, emission_list): + """ + Manages all the process to speciate the emissions. + + :param emission_list: List of emissions to speciate. + :type emission_list: list + + :return: List of emissions already speciated. + :rtype: list + """ + from cf_units import Unit + import numpy as np + + st_time = gettime() + settings.write_log("\tSpeciating", level=2) + + input_pollutants = [] + # Apply conversion factor to the input pollutants + for emission in emission_list: + try: + emission['data'] = np.array(emission['data'] / self.molecular_weights[emission['name']], + dtype=settings.precision) + except KeyError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise KeyError('ERROR: {0} pollutant is not in the molecular weights file {1} .'.format( + emission['name'], self.molecular_weights_path)) + sys.exit(1) + exec ("{0} = np.array(emission['data'], dtype=settings.precision)".format(emission['name'])) + emission['units'] = '' + input_pollutants.append(emission['name']) + + del emission_list + + speciated_emissions = [] + num = 0 + + for pollutant in self.speciation_profile: + formula = str(pollutant['formula']) + used_poll = [] + for in_p in input_pollutants: + if in_p in formula: + used_poll.append(in_p) + for poll_rem in used_poll: + input_pollutants.remove(poll_rem) + num += 1 + if formula != 'nan': + settings.write_log("\t\tPollutant {0} using the formula {0}={3} ({1}/{2})".format( + pollutant['name'], num, len(self.speciation_profile), formula), level=3) + + dict_aux = {'name': pollutant['name'], + 'units': pollutant['units'], + 'long_name': pollutant['long_name']} + if formula is '0' or formula is 0: + dict_aux.update({'data': 0}) + else: + try: + dict_aux.update({'data': np.array(eval(formula), dtype=settings.precision)}) + except NameError as e: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError( + "Error in speciation profile {0}: ".format(self.id) + + "The output specie {0} cannot be calculated ".format(pollutant['name']) + + "with the expression {0} because{1}".format(formula, e.message)) + else: + sys.exit(1) + speciated_emissions.append(dict_aux) + else: + settings.write_log("\t\tPollutant {0} does not have formula. Ignoring. ({1}/{2})".format( + pollutant['name'], num, len(self.speciation_profile)), level=3) + if len(input_pollutants) > 0: + settings.write_log("WARNING: The input pollutants {0} do not appear in the speciation profile {1}.".format( + input_pollutants, self.id)) + if settings.rank == 0: + warning("WARNING: The input pollutants {0} do not appear in the speciation profile {1}.".format( + input_pollutants, self.id)) + settings.write_time('Speciation', 'do_speciation', gettime() - st_time, level=3) + + return speciated_emissions + + def get_long_name(self, name): + + st_time = gettime() + value = '' + for pollutant in self.speciation_profile: + if pollutant['name'] == name: + value = pollutant['long_name'] + + settings.write_time('Speciation', 'get_long_name', gettime() - st_time, level=3) + return value + + def get_units(self, name): + + st_time = gettime() + value = '' + + for pollutant in self.speciation_profile: + if pollutant['name'] == name: + value = pollutant['units'] + + settings.write_time('Speciation', 'get_units', gettime() - st_time, level=3) + return value + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/temporal/__init__.py b/hermesv3_gr/modules/temporal/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py new file mode 100644 index 0000000..a56a348 --- /dev/null +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -0,0 +1,1017 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +import os +import sys +import numpy as np + + +class TemporalDistribution(object): + """ + TemporalDistribution class that contains all the information for the temporal disaggregation. + + :param starting_date: Date of the first timestep. + :type starting_date: datetime.datetime + + :param timestep_type: Relation between timesteps. It can be hourly, monthly or yearly. + :type timestep_type: str + + :param timestep_num: Quantity of timesteps. + :type timestep_num: int + + :param timestep_freq: Quantity of timestep_type between timesteps. + eg: If timestep_type = hourly; timestep_freq = 2; The difference between time of each timestep is 2 hours. + :type timestep_freq: int + + :param monthly_profile_path: Path to the file that contains all the monthly profiles. + :type monthly_profile_path: str + + :param month_profile_id: ID of the monthly profile to use. + :type month_profile_id: str + + :param daily_profile_path: Path to the file that contains all the daily profiles. + :type daily_profile_path: str + + :param daily_profile_id: ID of the daily profile to use. + :type daily_profile_id: str + + :param hourly_profile_path: Path to the file that contains all the hourly profiles. + :type hourly_profile_path: str + + :param hourly_profile_id: ID of the hourly profile to use. + :type hourly_profile_id: str + + :param world_info_path: Path to the file that contains the necessary information to do the NetCDF of timezones. + :type world_info_path: str + + :param auxiliar_files_dir: Path to the directory where will be all the needed auxiliar files like the NetCDf of + timezones. + :type auxiliar_files_dir: str + """ + + def __init__(self, starting_date, timestep_type, timestep_num, timestep_freq, monthly_profile_path, + month_profile_id, daily_profile_path, daily_profile_id, hourly_profile_path, hourly_profile_id, + world_info_path, auxiliar_files_dir, grid): + from timezonefinder import TimezoneFinder + + import pandas as pd + + st_time = gettime() + settings.write_log('\t\tInitializing Temporal.', level=2) + + self.grid = grid + + self.starting_date = starting_date + + self.timestep_type = timestep_type + self.timestep_num = timestep_num + self.timestep_freq = timestep_freq + + self.ending_date = self.calculate_ending_date() + if month_profile_id is not None: + if len(month_profile_id) > 4: + if os.path.exists(month_profile_id): + self.monthly_profile = self.read_gridded_profile(month_profile_id, 'Fmonth') + self.monthly_profile = self.monthly_profile.reshape( + (self.monthly_profile.shape[0], self.monthly_profile.shape[1] * self.monthly_profile.shape[2])) + + self.monthly_profile_path = month_profile_id + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise IOError('ERROR: Gridded monthly profile file not found: {0}'.format(month_profile_id)) + sys.exit(1) + else: + self.monthly_profile = self.get_temporal_monthly_profile(monthly_profile_path, month_profile_id) + else: + self.monthly_profile = None + settings.write_log("\t\t\tNo temporal monthly profile is set.", level=3) + + self.daily_profile_id = daily_profile_id + self.daily_profile_path = daily_profile_path + if daily_profile_id is not None: + settings.write_log("\t\t\tGetting temporal daily profile id '{0}' from {1} .".format( + daily_profile_id, daily_profile_path), level=3) + else: + settings.write_log("\t\t\tNo temporal daily profile is set.", level=3) + + self.hourly_profile_path = hourly_profile_path + if hourly_profile_id is not None and len(hourly_profile_id) is 4: + self.hourly_profile = self.get_temporal_hourly_profile(hourly_profile_id) + settings.write_log("\t\t\tGetting temporal hourly profile id '{0}' from {1} .".format( + hourly_profile_id, hourly_profile_path), level=3) + else: + if hourly_profile_id is None: + settings.write_log("\t\t\tNo temporal hourly profile is set.", level=3) + else: + settings.write_log("\t\t\tGetting temporal hourly profile ids {0} from {1} .".format( + hourly_profile_id, hourly_profile_path), level=3) + self.hourly_profile = hourly_profile_id + + self.world_info = world_info_path + self.netcdf_timezones = os.path.join(auxiliar_files_dir, 'timezones.nc') + + self.hours_since = [] + + self.world_info_df = pd.read_csv(self.world_info, sep=';') + self.tf = TimezoneFinder() + + if not self.is_created_netcdf_timezones(): + settings.write_log("\t\tTimezones netCDF is not created. Lets try to create it.", level=1) + self.create_netcdf_timezones(grid) + self.timezones_array = self.calculate_timezones() + + settings.write_time('TemporalDistribution', 'Init', gettime() - st_time, level=2) + + return None + + def calculate_ending_date(self): + """ + Calculates the date of the last timestep. + + :return: Date of the last timestep + :rtype: datetime.datetime + """ + from datetime import timedelta + + st_time = gettime() + + if self.timestep_type == 'hourly': + end_date = self.starting_date + (self.timestep_num - 1) * timedelta(hours=self.timestep_freq) + elif self.timestep_type == 'daily': + end_date = self.starting_date + (self.timestep_num - 1) * timedelta(hours=self.timestep_freq * 24) + elif self.timestep_type == 'monthly': + delta_year = (self.timestep_num - 1) * self.timestep_freq // 12 + delta_month = (self.timestep_num - 1) * self.timestep_freq % 12 + end_date = self.starting_date.replace(year=self.starting_date.year + delta_year, + month=self.starting_date.month + delta_month) + elif self.timestep_type == 'yearly': + delta_year = (self.timestep_num - 1) * self.timestep_freq + end_date = self.starting_date.replace(year=self.starting_date.year + delta_year) + else: + end_date = self.starting_date + + settings.write_time('TemporalDistribution', 'calculate_ending_date', gettime() - st_time, level=3) + + return end_date + + def calculate_timedelta(self, date): + """ + Calculates the difference of time to the next timestep. + + :param date: Date of the current timestep. + :type date: datetime.datetime + + :return: Difference of time to the next timestep. + :rtype: datetime.timedelta + """ + from datetime import timedelta + from calendar import monthrange, isleap + + st_time = gettime() + + if self.timestep_type == 'hourly': + delta = timedelta(hours=self.timestep_freq) + elif self.timestep_type == 'daily': + delta = timedelta(hours=self.timestep_freq * 24) + elif self.timestep_type == 'monthly': + days = monthrange(date.year, date.month)[1] + delta = timedelta(hours=days * 24) + elif self.timestep_type == 'yearly': + if isleap(date.year): + delta = timedelta(hours=366 * 24) + else: + delta = timedelta(hours=365 * 24) + else: + delta = None + + settings.write_time('TemporalDistribution', 'calculate_ending_date', gettime() - st_time, level=3) + + return delta + + def get_tz_from_id(self, tz_id): + """ + Extracts the timezone (string format) for the given id (int). + + :param tz_id: ID of the timezone. + :type tz_id: int + + :return: Timezone + :rtype: str + """ + + tz = self.world_info_df.time_zone[self.world_info_df.time_zone_code == tz_id].values + + return tz[0] + + def get_id_from_tz(self, tz): + """ + Extracts the id (int) for the given timezone (string format). + + :param tz: Timezone of the ID. + :type tz: str + + :return: ID + :rtype: int + """ + + tz_id = self.world_info_df.time_zone_code[self.world_info_df.time_zone == tz].values + + try: + tz_id = tz_id[0] + except IndexError as e: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise ValueError("ERROR: The timezone '{0}' is not in the {1} file. ".format(tz, self.world_info) + + "Please add it as a new line with an unique time_zone_code " + + "and the corresponding country and country_code.") + sys.exit(1) + + return tz_id + + @staticmethod + def parse_tz(tz): + """ + Parses the timezone (string format). It is needed because some libraries have more timezones than others and it + tries to simplify setting the strange ones into the nearest common one. + Examples: + 'America/Punta_Arenas': 'America/Santiago', + 'Europe/Astrakhan': 'Europe/Moscow', + 'Asia/Atyrau': 'Asia/Aqtau', + 'Asia/Barnaul': 'Asia/Almaty', + 'Europe/Saratov': 'Europe/Moscow', + 'Europe/Ulyanovsk': 'Europe/Moscow', + 'Europe/Kirov': 'Europe/Moscow', + 'Asia/Tomsk': 'Asia/Novokuznetsk', + 'America/Fort_Nelson': 'America/Vancouver' + + :param tz: Not parsed timezone. + :type tz: str + + :return: Parsed timezone + :rtype: str + """ + tz_dict = { + 'America/Punta_Arenas': 'America/Santiago', + 'Europe/Astrakhan': 'Europe/Moscow', + 'Asia/Atyrau': 'Asia/Aqtau', + 'Asia/Barnaul': 'Asia/Almaty', + 'Europe/Saratov': 'Europe/Moscow', + 'Europe/Ulyanovsk': 'Europe/Moscow', + 'Europe/Kirov': 'Europe/Moscow', + 'Asia/Tomsk': 'Asia/Novokuznetsk', + 'America/Fort_Nelson': 'America/Vancouver', + 'Asia/Famagusta': 'Asia/Nicosia', + } + + if tz in tz_dict.iterkeys(): + tz = tz_dict[tz] + + return tz + + def find_closest_timezone(self, latitude, longitude): + """ + Finds the closest timezone for the given coordinates. + + :param latitude: Latitude coordinate to find timezone. + :type latitude: float + + :param longitude: Longitude coordinate fo find the timezone. + :type longitude: float + + :return: Nearest timezone of the given coordinates. + :rtype: str + """ + + st_time = gettime() + + dg = 0 + tz = None + while tz is None: + tz = self.tf.closest_timezone_at(lng=longitude, lat=latitude, delta_degree=dg) + dg += 1 + + settings.write_time('TemporalDistribution', 'find_closest_timezone', gettime() - st_time, level=3) + + return tz + + def is_created_netcdf_timezones(self): + """ + Checks if the NetCDF of timezones is created + + :return: True if it is already created. + :rtype: bool + """ + return os.path.exists(self.netcdf_timezones) + + def create_netcdf_timezones(self, grid): + """ + Creates a NetCDF with the timezones in the resolution of the given grid. + + :param grid: Grid object with the coordinates. + :type grid: Grid + + :return: True if it is created. + :rtype: bool + """ + from hermesv3_gr.tools.netcdf_tools import write_netcdf + + st_time = gettime() + settings.write_log("\t\tCreating {0} file.".format(self.netcdf_timezones), level=2) + + lat, lon = grid.get_coordinates_2d() + total_lat = settings.comm.gather(lat, root=0) + total_lon = settings.comm.gather(lon, root=0) + + dst_var = [] + + num = 0 + points = zip(lat.flatten(), lon.flatten()) + # points = points[534000:] + # print len(points) + for lat_aux, lon_aux in points: + num += 1 + settings.write_log("\t\t\tlat:{0}, lon:{1} ({2}/{3})".format(lat_aux, lon_aux, num, len(points)), level=3) + tz = self.find_closest_timezone(lat_aux, lon_aux) + tz_id = self.get_id_from_tz(tz) + dst_var.append(tz_id) + dst_var = np.array(dst_var) + dst_var = dst_var.reshape((1,) + lat.shape) + dst_var = settings.comm.gather(dst_var, root=0) + if settings.rank == 0: + for var in dst_var: + print var.shape + total_lat = np.concatenate(total_lat, axis=1) + total_lon = np.concatenate(total_lon, axis=1) + dst_var = np.concatenate(dst_var, axis=2) + data = [{'name': 'timezone_id', 'units': '', 'data': dst_var}] + + write_netcdf(self.netcdf_timezones, total_lat, total_lon, data, RegularLatLon=True) + settings.comm.Barrier() + + settings.write_time('TemporalDistribution', 'create_netcdf_timezones', gettime() - st_time, level=2) + + return True + + def read_gridded_profile(self, path, value): + from netCDF4 import Dataset + + st_time = gettime() + + settings.write_log('\t\t\tGetting gridded temporal monthly profile from {0} .'.format(path), level=3) + + nc_in = Dataset(path) + profile = nc_in.variables[value][:, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] + nc_in.close() + + profile[profile <= 0] = 1 + + settings.write_time('TemporalDistribution', 'read_gridded_profile', gettime() - st_time, level=3) + + return profile + + def calculate_timezones(self): + """ + Extracts the timezones ID's from the NetCDF and convert them to the timezone (str). + + :return: Array with the timezone of each cell. + :rtype: numpy.chararray + """ + from netCDF4 import Dataset + from hermesv3_gr.tools.netcdf_tools import extract_vars + + st_time = gettime() + + nc_in = Dataset(self.netcdf_timezones) + timezones = nc_in.variables['timezone_id'][:, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound].astype(int) + + nc_in.close() + tz_list = np.chararray(timezones.shape, itemsize=32) + for id_aux in xrange(timezones.min(), timezones.max() + 1): + try: + tz = self.get_tz_from_id(id_aux) + tz_list[timezones == id_aux] = tz + except: + pass + settings.write_time('TemporalDistribution', 'calculate_timezones', gettime() - st_time, level=3) + + return tz_list + + def calculate_2d_temporal_factors(self, date): + """ + Calculates the temporal factor to correct the input data of the given date for each cell. + + :param date: Date of the current timestep. + :type date: datetime.datetime + + :return: 2D array with the factors to correct the input data to the date of this timestep. + :rtype: numpy.array + """ + import pytz + import pandas as pd + + st_time = gettime() + + df = pd.DataFrame(self.timezones_array.flatten(), columns=['tz']) + df['i'] = df.index + + df['utc'] = pd.to_datetime(date) + try: + df['local'] = df.groupby('tz')['utc'].apply( + lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert(x.name).dt.tz_localize(None)) + except Exception: + df['local'] = df.groupby('tz')['utc'].apply( + lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert( + self.parse_tz(x.name)).dt.tz_localize(None)) + df.set_index('local', inplace=True) + + df['month'] = df.index.month + df['day'] = df.index.weekday + df['hour'] = df.index.hour + + if self.hourly_profile is not None: + if type(self.hourly_profile) is dict: + df['hour_factor'] = df['hour'].map(self.hourly_profile) + else: + profile_ids = self.parse_hourly_profile_id() + weekday_profile = self.get_temporal_hourly_profile(profile_ids['weekday']) + saturday_profile = self.get_temporal_hourly_profile(profile_ids['saturday']) + sunday_profile = self.get_temporal_hourly_profile(profile_ids['sunday']) + df['weekday'] = df['hour'].map(weekday_profile) + df['saturday'] = df['hour'].map(saturday_profile) + df['sunday'] = df['hour'].map(sunday_profile) + + del df['tz'], df['utc'] + df['hour_factor'] = 0 + + df.loc[df['day'] <= 4, 'hour_factor'] = df['weekday'][df['day'] <= 4].values + df.loc[df['day'] == 5, 'hour_factor'] = df['saturday'][df['day'] == 5].values + df.loc[df['day'] == 6, 'hour_factor'] = df['sunday'][df['day'] == 6].values + + del df['weekday'], df['saturday'], df['sunday'] + else: + df['hour_factor'] = 1 + del df['hour'] + + if self.daily_profile_id is not None: + daily_profile = self.get_temporal_daily_profile(date) + df['day_factor'] = df['day'].map(daily_profile) + else: + df['day_factor'] = 1 + del df['day'] + + if self.monthly_profile is None: + df['month_factor'] = 1 + elif type(self.monthly_profile) == dict: + df['month_factor'] = df['month'].map(self.monthly_profile) + elif type(self.monthly_profile) == np.ndarray: + for m, df_aux in df.groupby('month'): + try: + df.loc[df['month'] == m, 'month_factor'] = \ + self.monthly_profile[m-1, df.loc[df['month'] == m, 'i'].values] + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise IOError("The gridded temporal profile {0} ".format(self.monthly_profile_path) + + "is not on the output grid resolution.") + sys.exit(1) + else: + df['month_factor'] = 1 + del df['month'] + + df['factor'] = df['month_factor'] * df['day_factor'] * df['hour_factor'] + + # TODO make timezones_aray 2D + factors = np.array(df['factor'].values).reshape((self.timezones_array.shape[1], self.timezones_array.shape[2])) + del df + + settings.write_time('TemporalDistribution', 'calculate_2d_temporal_factors', gettime() - st_time, level=3) + + return factors + + def calculate_3d_temporal_factors(self): + """ + Calculates the temporal factor to correct the input data of the given date for each cell. + + :param date: Date of the current timestep. + :type date: datetime.datetime + + :param timezones: Array of the timezones + :type timezones: numpy.chararray + + :return: 3D array with the factors to correct the input data to the date of this timestep. + :rtype: numpy.array + """ + st_time = gettime() + settings.write_log("\tCalculating temporal factors.", level=2) + + factors = [] + date_aux = self.starting_date + count = 0 + + while date_aux <= self.ending_date: + count += 1 + settings.write_log("\t\t{0} temporal factor ({1}/{2}).".format( + date_aux.strftime('%Y/%m/%d %H:%M:%S'), count, self.timestep_num), level=3) + + factors.append(self.calculate_2d_temporal_factors(date_aux)) + + d = date_aux - self.starting_date + self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour + date_aux = date_aux + self.calculate_timedelta(date_aux) + + factors = np.array(factors) + + settings.write_time('TemporalDistribution', 'calculate_3d_temporal_factors', gettime() - st_time, level=3) + return factors + + def parse_hourly_profile_id(self): + """ + Parses the hourly profile ID to get a dictionary with the ID for "weekday", "saturday" and "sunday" + :return: + """ + import re + + dict_aux = {} + list_aux = list(map(str, re.split(' , | ,|, |,| ', self.hourly_profile))) + for element in list_aux: + key_value_list = list(map(str, re.split(':| :| :| : |=| =| =| = ', element))) + dict_aux[key_value_list[0]] = key_value_list[1] + + return dict_aux + + def get_temporal_hourly_profile(self, profile_id, date=None): + """ + Extracts the hourly profile of the given ID in a dictionary format. + The hour (0 to 23) is the key (int) and the value (float) is the factor. + + :param profile_id: ID of the hourly profile to use. + :type profile_id: str + + :param date: Date of the timestep to simulate. Not necessary for a single ID. + :type date: datetime.datetime + + :return: Hourly profile where the hour (0 to 23) is the key (int) and the value (float) is the factor. + :rtype: dict + """ + import pandas as pd + + st_time = gettime() + # settings.write_log("\t\t\tGetting temporal hourly profile '{0}' from {1} .".format( + # profile_id, self.hourly_profile_path), level=3) + if date is None: + df = pd.read_csv(self.hourly_profile_path) + try: + profile = df.loc[df[df.TP_H == profile_id].index[0]].to_dict() + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Hourly profile ID {0} is not in the {1} file.'.format(profile_id, self.hourly_profile_path)) + sys.exit(1) + profile.pop('TP_H', None) + profile = {int(k): float(v) for k, v in profile.items()} + else: + # print self.hourly_profile + profile = None + settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', gettime() - st_time, level=3) + + return profile + + def get_temporal_daily_profile(self, date): + """ + Extracts the daily profile of the given ID in a dictionary format. + The weekday (0 to 6) is the key (int) and the value (float) is the factor. + + :param date: Date of the timestep to simulate. + :type date: datetime.datetime + + :return: Daily profile where the weekday (0 to 6) is the key (int) and the value (float) is the factor. + :rtype: dict + """ + import pandas as pd + + st_time = gettime() + + if self.daily_profile_id is not None: + df = pd.read_csv(self.daily_profile_path) + try: + profile = df.loc[df[df.TP_D == self.daily_profile_id].index[0]].to_dict() + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Daily profile ID {0} is not in the {1} file.'.format(self.daily_profile_id, self.daily_profile_path)) + sys.exit(1) + profile.pop('TP_D', None) + profile_aux = {int(k): float(v) for k, v in profile.items()} + rebalance_factor = self.calculate_rebalance_factor(profile_aux, date) + profile = {int(k): float(v) + rebalance_factor for k, v in profile.items()} + else: + profile = None + + settings.write_time('TemporalDistribution', 'get_temporal_daily_profile', gettime() - st_time, level=3) + + return profile + + def calculate_rebalance_factor(self, profile, date): + """ + Calculates the necessary factor make consistent the full month data. This is needed for the months that if you + sum the daily factor of each day of the month it doesn't sum as the number of days of the month. + + :param profile: Daily profile. + :type profile: dict + + :param date: Date of the timestep to simulate. + :type date: datetime.datetime + + :return: Rebalance factor to be sum to the daily factor. + :rtype: float + """ + st_time = gettime() + + weekdays = self.calculate_weekdays(date) + rebalance_factor = self.calculate_weekday_factor_full_month(profile, weekdays) + + settings.write_time('TemporalDistribution', 'calculate_rebalance_factor', gettime() - st_time, level=3) + + return rebalance_factor + + @staticmethod + def calculate_weekday_factor_full_month(profile, weekdays): + """ + Operates with all the days of the month to get the sum of daily factors of the full month. + + :param profile: + :param weekdays: + :return: + """ + st_time = gettime() + + weekdays_factors = 0 + num_days = 0 + for day in xrange(7): + weekdays_factors += profile[day] * weekdays[day] + num_days += weekdays[day] + + settings.write_time('TemporalDistribution', 'calculate_weekday_factor_full_month', gettime() - st_time, level=3) + + return (num_days - weekdays_factors) / num_days + + @staticmethod + def calculate_weekdays(date): + from calendar import monthrange, weekday, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY + + st_time = gettime() + + weekdays = [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY] + days = [weekday(date.year, date.month, d + 1) for d in xrange(monthrange(date.year, date.month)[1])] + weekdays_dict = {} + count = 0 + for day in weekdays: + weekdays_dict[count] = days.count(day) + + count += 1 + + settings.write_time('TemporalDistribution', 'calculate_weekdays', gettime() - st_time, level=3) + return weekdays_dict + + @staticmethod + def get_temporal_monthly_profile(profile_path, profile_id): + """ + Extracts the monthly profile of the given ID in a dictionary format. + The month (1 to 12) is the key (int) and the value (float) is the factor. + + :param profile_path: Path to the file that contains all the monthly profiles. + :type profile_path: str + + :param profile_id: ID of the monthly profile to use. + :type profile_id: str + + :return: Monthly profile where the month (1 to 12) is the key (int) and the value (float) is the factor. + :rtype: dict + """ + import pandas as pd + + st_time = gettime() + + settings.write_log("\t\t\tGetting temporal monthly profile id '{0}' from {1} .".format( + profile_id, profile_path), level=3) + + if profile_id is not None: + df = pd.read_csv(profile_path) + try: + profile = df.loc[df[df.TP_M == profile_id].index[0]].to_dict() + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Monthly profile ID {0} is not in the {1} file.'.format(profile_id, profile_path)) + sys.exit(1) + profile.pop('TP_M', None) + profile = {int(k): float(v) for k, v in profile.items()} + else: + profile = None + + settings.write_time('TemporalDistribution', 'get_temporal_monthly_profile', gettime() - st_time, level=2) + + return profile + + @staticmethod + def calculate_delta_hours(st_date, time_step_type, time_step_num, time_step_freq): + from datetime import timedelta + from calendar import monthrange + + st_time = gettime() + + settings.write_log('Calculating time array of {0} time steps starting from {1}.'.format( + time_step_num, st_date.strftime('%Y/%m/%d %H:%M:%S'))) + + if time_step_type == 'hourly': + end_date = st_date + (time_step_num - 1) * timedelta(hours=time_step_freq) + elif time_step_type == 'daily': + end_date = st_date + (time_step_num - 1) * timedelta(hours=time_step_freq * 24) + elif time_step_type == 'monthly': + delta_year = (time_step_num - 1) * time_step_freq // 12 + delta_month = (time_step_num - 1) * time_step_freq % 12 + end_date = st_date.replace(year=st_date.year + delta_year, + month=st_date.month + delta_month) + elif time_step_type == 'yearly': + delta_year = (time_step_num - 1) * time_step_freq + end_date = st_date.replace(year=st_date.year + delta_year) + else: + end_date = st_date + + date_aux = st_date + hours_since = [] + while date_aux <= end_date: + d = date_aux - st_date + hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour + + if time_step_type == 'hourly': + delta = timedelta(hours=time_step_freq) + elif time_step_type == 'daily': + delta = timedelta(hours=time_step_freq * 24) + elif time_step_type == 'monthly': + days = monthrange(date_aux.year, date_aux.month)[1] + delta = timedelta(hours=days * 24) + elif time_step_type == 'yearly': + if isleap(date_aux.year): + delta = timedelta(hours=366 * 24) + else: + delta = timedelta(hours=365 * 24) + else: + delta = None + + date_aux = date_aux + delta + + settings.write_time('TemporalDistribution', 'calculate_delta_hours', gettime() - st_time, level=2) + + return hours_since + + # # @profile + # def do_temporal(self, data, grid): + # """ + # Manages all the steps to get the temporal disaggregation. + # + # :param data: list of emission to disaggregate. + # :type data: list + # + # :param grid: Destination grid. + # :type grid: Grid + # + # :return: Same data but already temporally disaggregated. + # :rtype: list + # """ + # import copy + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # # if not self.is_created_netcdf_timezones(): + # # print "The timezones NetCDF is not created. Lets try to create it." + # # self.create_netcdf_timezones(grid) + # # self.timezones_array = self.calculate_timezones() + # + # data_empty = [] + # for element in data: + # # print element + # dict_aux = element.copy() + # # dict_aux['data'] = [] + # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) + # data_empty.append(dict_aux) + # data_to_fill = copy.deepcopy(data_empty) + # + # date_aux = self.starting_date + # count = 0 + # while date_aux <= self.ending_date: + # count += 1 + # if settings.log_level_2: + # print '\t\tDoing {0}/{1} time step.'.format(count, self.timestep_num) + # temporal_data = self.calculate_time_step(data, data_empty, date_aux) + # + # # Copy data to final + # for i in xrange(len(data_to_fill)): + # if temporal_data[i]['data'] is 0: + # data_to_fill[i]['data'] = 0 + # else: + # if data_to_fill[i]['data'] is None: + # data_to_fill[i]['data'] = temporal_data[i]['data'].reshape( + # (1,) + temporal_data[i]['data'].shape) + # else: + # data_to_fill[i]['data'] = np.append(data_to_fill[i]['data'], + # temporal_data[i]['data'].reshape( + # (1,) + temporal_data[i]['data'].shape), + # axis=0) + # + # # factors = self.calculate_2d_temporal_factors(date_aux) + # # for i in xrange(len(data_empty)): + # # if data[i]['data'] is not 0: + # # aux_data = data[i]['data'] * factors + # # if count == 1: + # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) + # # else: + # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) + # # else: + # # data_empty[i]['data'] = data[i]['data'] + # + # d = date_aux - self.starting_date + # self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour + # date_aux = date_aux + self.calculate_timedelta(date_aux) + # + # if settings.log_level_3: + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # + # return data_to_fill + # + # # @profile + # def do_temporal_and_write(self, data, grid, nc_out): + # """ + # Manages all the steps to get the temporal disaggregation. + # + # :param data: list of emission to disaggregate. + # :type data: list + # + # :param grid: Destination grid. + # :type grid: Grid + # + # :return: Same data but already temporally disaggregated. + # :rtype: list + # """ + # from hermesv3_gr.tools.netcdf_tools import fill_netcdf + # import copy + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # if not self.is_created_netcdf_timezones(): + # print "The timezones NetCDF is not created. Lets try to create it." + # self.create_netcdf_timezones(grid) + # # self.timezones_array = self.calculate_timezones() + # + # data_empty = [] + # for element in data: + # # print element + # dict_aux = element.copy() + # # dict_aux['data'] = [] + # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) + # data_empty.append(dict_aux) + # data_to_fill = copy.deepcopy(data_empty) + # + # date_aux = self.starting_date + # count = 0 + # while date_aux <= self.ending_date: + # + # if settings.log_level_2: + # print '\t\tDoing {0}/{1} time step.'.format(count + 1, self.timestep_num) + # temporal_data = self.calculate_time_step(data, data_empty, date_aux) + # + # fill_netcdf(count, nc_out, temporal_data) + # + # # Copy data to final + # if False: + # # for i in xrange(len(data_to_fill)): + # if temporal_data[i]['data'] is 0: + # data_to_fill[i]['data'] = 0 + # else: + # if data_to_fill[i]['data'] is None: + # data_to_fill[i]['data'] = temporal_data[i]['data'].reshape( + # (1,) + temporal_data[i]['data'].shape) + # else: + # data_to_fill[i]['data'] = np.append(data_to_fill[i]['data'], + # temporal_data[i]['data'].reshape( + # (1,) + temporal_data[i]['data'].shape), + # axis=0) + # + # # factors = self.calculate_2d_temporal_factors(date_aux) + # # for i in xrange(len(data_empty)): + # # if data[i]['data'] is not 0: + # # aux_data = data[i]['data'] * factors + # # if count == 1: + # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) + # # else: + # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) + # # else: + # # data_empty[i]['data'] = data[i]['data'] + # + # d = date_aux - self.starting_date + # self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour + # date_aux = date_aux + self.calculate_timedelta(date_aux) + # count += 1 + # + # if settings.log_level_3: + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # + # # @profile + # def do_time_step(self, date, data, grid): + # """ + # Manages all the steps to get the temporal disaggregation. + # + # :param data: list of emission to disaggregate. + # :type data: list + # + # :param grid: Destination grid. + # :type grid: Grid + # + # :return: Same data but already temporally disaggregated. + # :rtype: list + # """ + # from hermesv3_gr.tools.netcdf_tools import fill_netcdf + # import copy + # + # if settings.log_level_3: + # st_time = gettime() + # else: + # st_time = None + # + # if not self.is_created_netcdf_timezones(): + # print "The timezones NetCDF is not created. Lets try to create it." + # self.create_netcdf_timezones(grid) + # # self.timezones_array = self.calculate_timezones() + # + # data_empty = [] + # for element in data: + # # print element + # dict_aux = element.copy() + # # dict_aux['data'] = [] + # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) + # data_empty.append(dict_aux) + # # data_to_fill = copy.deepcopy(data_empty) + # + # temporal_data = self.calculate_time_step(data, data_empty, date) + # + # if settings.log_level_3: + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # return temporal_data + # + # def calculate_time_step(self, data, data_empty, date): + # factors = self.calculate_2d_temporal_factors(date) + # for i in xrange(len(data_empty)): + # if data[i]['data'] is not 0: + # # print "data[i]['data'].shape, factors.shape", data[i]['data'].shape, factors.shape + # data_empty[i]['data'] = data[i]['data'] * factors + # # if data_empty[i]['data'] is None: + # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) + # # else: + # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) + # else: + # data_empty[i]['data'] = data[i]['data'] + # return data_empty + # + # @staticmethod + # def make_empty_copy(data): + # temporal_data = [] + # for element in data: + # # print element + # dict_aux = element.copy() + # # dict_aux['data'] = [] + # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) + # temporal_data.append(dict_aux) + # + # return temporal_data + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/vertical/__init__.py b/hermesv3_gr/modules/vertical/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py new file mode 100644 index 0000000..bd7f3f8 --- /dev/null +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings + + +class VerticalDistribution(object): + """ + VerticalDistribution class that contains all the information to do the vertical distribution. + + :param vertical_id: ID of the vertical profile that appears in the vertical profile file. + :type vertical_id: str + + :param vertical_profile_path: Path to the file that contains all the vertical profiles. + :type vertical_profile_path: str + + :param vertical_output_profile: path to the file that contain the vertical description of the required output + file. + :type vertical_output_profile: str + """ + def __init__(self, vertical_id, vertical_profile_path, vertical_output_profile): + st_time = gettime() + settings.write_log('\t\tInitializing Vertical.', level=2) + + self.id = vertical_id + + self.output_heights = vertical_output_profile + self.vertical_profile = self.get_vertical_profile(vertical_profile_path) + + settings.write_time('VerticalDistribution', 'Init', gettime() - st_time, level=2) + + def get_vertical_profile(self, path): + """ + Extracts the vertical v_profile from the vertical v_profile file. + + :param path: Path to the file that contains all the vertical profiles. + :type path: str + + :return: List of tuples of two values. Te first value of the tuple is the height of the layer and the second + value is the quantity (%) of pollutant that goes into this layer. + :rtype: list of tuple + """ + import pandas as pd + import re + + st_time = gettime() + settings.write_log("\t\t\tGetting vertical profile id '{0}' from {1} .".format(self.id, path), level=3) + + df = pd.read_csv(path, sep=';') + try: + v_profile = df.loc[df[df.ID == self.id].index[0]].to_dict() + except IndexError: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError('ERROR: Vertical profile ID {0} is not in the {1} file.'.format(self.id, path)) + sys.exit(1) + v_profile.pop('ID', None) + v_profile['layers'] = list(map(int, re.split(', |,|; |;| ', v_profile['layers']))) + v_profile['weights'] = list(map(float, re.split(', |,|; |;| ', v_profile['weights']))) + + if len(v_profile['layers']) != len(v_profile['weights']): + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("ERROR: The number of layers and numbers os weight have to have the same length." + + " The v_profile '{0}' of the '{1}' file doesn't match.".format(self.id, path)) + sys.exit(1) + else: + return_value = zip(v_profile['layers'], v_profile['weights']) + + settings.write_time('VerticalDistribution', 'get_vertical_profile', gettime() - st_time, level=3) + + return return_value + + @staticmethod + def get_vertical_output_profile(path): + """ + Extracts the vertical description of the desired output. + + :param path: Path to the file that contains the output vertical description. + :type path: str + + :return: Heights of the output vertical layers. + :rtype: list + """ + import pandas as pd + + st_time = gettime() + settings.write_log('Calculating vertical levels from {0} .'.format(path)) + + df = pd.read_csv(path, sep=';') + + heights = df.height_magl.values + + settings.write_time('VerticalDistribution', 'get_vertical_output_profile', gettime() - st_time, level=3) + + return heights + + @staticmethod + def get_weights(prev_layer, layer, in_weight, output_vertical_profile): + """ + Calculates the weights for the given layer. + + :param prev_layer: Altitude of the low layer. 0 if it's the first. + :type prev_layer: float + + :param layer: Altitude of the current layer. + :type layer: float + + :param in_weight: Weights + :param output_vertical_profile: + :return: + """ + st_time = gettime() + + output_vertical_profile_aux = [s for s in output_vertical_profile if s >= prev_layer] + output_vertical_profile_aux = [s for s in output_vertical_profile_aux if s < layer] + + output_vertical_profile_aux = [prev_layer] + output_vertical_profile_aux + [layer] + + index = len([s for s in output_vertical_profile if s < prev_layer]) + origin_diff_factor = in_weight / (layer - prev_layer) + weight_list = [] + for i in xrange(len(output_vertical_profile_aux) - 1): + weight = (abs(output_vertical_profile_aux[i] - output_vertical_profile_aux[i + 1])) * origin_diff_factor + weight_list.append({'index': index, 'weight': weight}) + index += 1 + + settings.write_time('VerticalDistribution', 'get_weights', gettime() - st_time, level=3) + + return weight_list + + def calculate_weights(self): + """ + Calculates the weights for all the vertical layers. + + :return: Weights that goes to each layer. + :rtype: list of float + """ + import numpy as np + + st_time = gettime() + settings.write_log("\t\tCalculating vertical weights.", level=3) + + weights = np.zeros(len(self.output_heights)) + prev_layer = 0 + for layer, weight in self.vertical_profile: + if weight != float(0): + for element in self.get_weights(prev_layer, layer, weight, self.output_heights): + weights[element['index']] += element['weight'] + + prev_layer = layer + + settings.write_time('VerticalDistribution', 'calculate_weights', gettime() - st_time, level=3) + + return weights + + @staticmethod + # @profile + def apply_weights(data, weights): + """ + Calculates the vertical distribution using the given data and weights. + + :param data: Emissions to be vertically distributed. + :type data: numpy.ndarray + + :param weights: Weights of each layer. + :type weights: list of float + + :return: Emissions already vertically distributed. + :rtype: numpy.ndarray + """ + import numpy as np + + st_time = gettime() + + data_aux = np.multiply(weights.reshape(weights.shape + (1, 1)), data, dtype=settings.precision) + + settings.write_time('VerticalDistribution', 'apply_weights', gettime() - st_time, level=3) + + return data_aux + + @staticmethod + def apply_weights_level(data, weight): + import numpy as np + + st_time = gettime() + + for emi in data: + if emi['data'] is not 0: + emi['data'] = emi['data'] * weight + + settings.write_time('VerticalDistribution', 'apply_weights_level', gettime() - st_time, level=3) + + return data + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/vertical/vertical_gfas.py b/hermesv3_gr/modules/vertical/vertical_gfas.py new file mode 100644 index 0000000..14c6fc0 --- /dev/null +++ b/hermesv3_gr/modules/vertical/vertical_gfas.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +from timeit import default_timer as gettime + +import hermesv3_gr.config.settings as settings +from vertical import VerticalDistribution + + +class GfasVerticalDistribution(VerticalDistribution): + """ + Class that contains all the needed information to vertically distribute the fire emissions. + + :param vertical_output_profile: Path to the file that contains the vertical description of the desired output. + :type vertical_output_profile: str + + :param approach: Approach to take into account. + :type approach: str + """ + def __init__(self, vertical_output_profile, approach, altitude): + st_time = gettime() + + self.altitude = altitude + self.output_heights = vertical_output_profile + self.approach = approach + + settings.write_time('GfasVerticalDistribution', 'Init', gettime() - st_time, level=3) + + return None + + @staticmethod + def calculate_widths(heights_list): + """ + Calculates the width of each vertical level. + + :param heights_list: List of the top altitude in meters of each level. + :type heights_list: list + + :return: List with the width of each vertical level. + :rtype: list + """ + st_time = gettime() + + widths = [] + for i in xrange(len(heights_list)): + if i == 0: + widths.append(heights_list[i]) + else: + widths.append(heights_list[i] - heights_list[i - 1]) + + settings.write_time('GfasVerticalDistribution', 'calculate_widths', gettime() - st_time, level=3) + return widths + + def get_weights(self, heights_list): + """ + Calculates the proportion (%) of emission to put on each layer. + + :param heights_list: List with the width of each vertical level. + :type heights_list: list + + :return: List of the weight to apply to each layer. + :rtype: list + """ + st_time = gettime() + + weights = [] + width_list = self.calculate_widths(heights_list) + if self.approach == 'uniform': + max_percent = 1. + elif self.approach == '50_top': + max_percent = 0.5 + width_list = width_list[0:-1] + else: + max_percent = 1. + + for width in width_list: + weights.append((width * max_percent) / sum(width_list)) + if self.approach == '50_top': + if len(heights_list) == 1: + weights.append(1.) + else: + weights.append(0.5) + + settings.write_time('GfasVerticalDistribution', 'get_weights', gettime() - st_time, level=3) + return weights + + def apply_approach(self, top_fires): + """ + Scatters the emissions vertically. + + :param top_fires: 4D array (time, level, latitude, longitude) with all the emission on each top layer. + :type top_fires: numpy.array + + :return: 4D array (time, level, latitude, longitude) with all the emission distributed on all the involved + layers. + :rtype: numpy.array + """ + import numpy as np + + st_time = gettime() + + fires = np.zeros(top_fires.shape) + for i in xrange(len(self.output_heights)): + if top_fires[i].sum() != 0: + weight_list = self.get_weights(self.output_heights[0: i + 1]) + for i_weight in xrange(len(weight_list)): + fires[i_weight] += top_fires[i] * weight_list[i_weight] + + settings.write_time('GfasVerticalDistribution', 'apply_approach', gettime() - st_time, level=3) + return fires + + def do_vertical_interpolation_allocation(self, values, altitude): + """ + Allocates the fire emissions on their top level. + + :param values: 2D array with the fire emissions + :type values: numpy.ndarray + + :param altitude: 2D array with the altitude of the fires. + :type altitude: numpy.ndarray + + :return: Emissions already allocated on the top altitude of each fire. + :rtype: numpy.ndarray + """ + import numpy as np + + st_time = gettime() + + fire_list = [] + aux_var = values + for height in self.output_heights: + aux_data = np.zeros(aux_var.shape) + ma = np.ma.masked_less_equal(altitude, height) + aux_data[ma.mask] += aux_var[ma.mask] + aux_var -= aux_data + fire_list.append(aux_data) + fire_list = np.array(fire_list).reshape((len(fire_list), values.shape[1], values.shape[2])) + + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation_allocation', gettime() - st_time, + level=3) + return fire_list + + def do_vertical_interpolation(self, values): + """ + Manages all the process to do the vertical distribution. + + :param values: Emissions to be vertically distributed. + :type values: numpy.ndarray + + :return: Emissions already vertically distributed. + :rtype: numpy.ndarray + """ + st_time = gettime() + + fire_list = self.apply_approach(values) + + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation', gettime() - st_time, level=3) + return fire_list + + +if __name__ == '__main__': + pass diff --git a/hermesv3_gr/modules/writing/__init__.py b/hermesv3_gr/modules/writing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py new file mode 100644 index 0000000..08e54e0 --- /dev/null +++ b/hermesv3_gr/modules/writing/writer.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +from timeit import default_timer as gettime + +from hermesv3_gr.config import settings +import os +import sys + + +class Writer(object): + + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): + + self.path = path + self.grid = grid + self.compress = compress + self.parallel = parallel + + self.variables_attributes = None + self.levels = levels + self.date = date + self.hours = hours + + self.global_attributes = None + + self.global_attributes_path = global_attributes_path + + return None + + def write(self, inventory_list): + st_time = gettime() + settings.write_log('') + settings.write_log("Writing netCDF output file {0} .".format(self.path)) + + self.set_variable_attributes(inventory_list) + if self.parallel: + if settings.rank == 0: + self.create_parallel_netcdf() + settings.comm.Barrier() + self.write_parallel_netcdf(inventory_list) + else: + self.write_serial_netcdf(inventory_list) + + settings.write_time('Writer', 'write', gettime() - st_time) + return True + + def create_parallel_netcdf(self): + """ + Implemented on inner class. + """ + return None + + def write_parallel_netcdf(self, emission_list): + """ + Implemented on inner class. + """ + return None + + def write_serial_netcdf(self, emission_list): + """ + Implemented on inner class. + """ + return None + + def set_variable_attributes(self, inventory_list): + st_time = gettime() + empty_dict = {} + for ei in inventory_list: + for emi in ei.emissions: + if not emi['name'] in empty_dict: + dict_aux = emi.copy() + dict_aux['data'] = None + empty_dict[emi['name']] = dict_aux + + self.variables_attributes = empty_dict.values() + + settings.write_time('Writer', 'set_variable_attributes', gettime() - st_time, level=3) + + return True + + def calculate_data_by_var(self, variable, inventory_list, shape): + # TODO Documentation + """ + + :param variable: + :param inventory_list: + :param shape: + :return: + """ + from timeit import default_timer as gettime + import numpy as np + + st_time = gettime() + settings.write_log("\t\t\t\tGetting data for '{0}' pollutant.".format(variable), level=3) + + data = None + + for ei in inventory_list: + for emission in ei.emissions: + if emission['name'] == variable: + if emission['data'] is not 0: + vertical_time = gettime() + if ei.source_type == 'area': + if ei.vertical_factors is not None: + aux_data = emission['data'][np.newaxis, :, :] * ei.vertical_factors[:, np.newaxis, np.newaxis] + else: + if len(emission['data'].shape) != 3: + aux_data = np.zeros((shape[1], shape[2], shape[3])) + aux_data[0, :, :] = emission['data'] + else: + aux_data = emission['data'] + elif ei.source_type == 'point': + aux_data = np.zeros((shape[1], shape[2] * shape[3])) + aux_data[ei.location['layer'], ei.location['FID']] = emission['data'] + aux_data = aux_data.reshape((shape[1], shape[2], shape[3])) + + settings.write_time('VerticalDistribution', 'calculate_data_by_var', gettime() - vertical_time, + level=2) + del emission['data'] + + temporal_time = gettime() + if data is None: + data = np.zeros(shape) + if ei.temporal_factors is not None: + data += aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] + else: + data += aux_data[np.newaxis, :, :, :] + settings.write_time('TemporalDistribution', 'calculate_data_by_var', gettime() - temporal_time, + level=2) + # Unit changes + data = self.unit_change(variable, data) + if data is not None: + data[data < 0] = 0 + settings.write_time('Writer', 'calculate_data_by_var', gettime() - st_time, level=3) + return data + + def unit_change(self, variable, data): + """ + Implement on inner class + """ + return None + + @staticmethod + def calculate_displacements(counts): + st_time = gettime() + + new_list = [0] + accum = 0 + for counter in counts[:-1]: + accum += counter + new_list.append(accum) + + settings.write_time('Writer', 'calculate_displacements', gettime() - st_time, level=3) + return new_list + + @staticmethod + def tuple_to_index(tuple_list, bidimensional=False): + from operator import mul + st_time = gettime() + + new_list = [] + for tuple in tuple_list: + if bidimensional: + new_list.append(tuple[-1] * tuple[-2]) + else: + new_list.append(reduce(mul, tuple)) + settings.write_time('Writer', 'tuple_to_index', gettime() - st_time, level=3) + return new_list + + @staticmethod + def get_writer(output_model, path, grid, levels, date, hours, global_attributes_path, compress, parallel): + """ + Choose between the different writers depending on the desired output model. + + :param output_model: Name of the output model. Only accepted 'MONARCH, CMAQ or WRF_CHEM. + :type output_model: str + + :param path: Path to the output file. + :type path: str + + :param grid: Grid object of the destination. + :type grid: Grid + + :param compress: Indicates if you want a compressed NetCDF. + :type compress: bool + + :param parallel: Indicates if you want to write the NetCDF in parallel. + :type parallel: bool + + :return: Writing object of the desired output model. + :rtype: Writer + """ + from hermesv3_gr.modules.writing.writer_cmaq import WriterCmaq + from hermesv3_gr.modules.writing.writer_monarch import WriterMonarch + from hermesv3_gr.modules.writing.writer_wrf_chem import WriterWrfChem + + settings.write_log('Selecting writing output type for {0}.'.format(output_model)) + if output_model.lower() == 'monarch': + return WriterMonarch(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + elif output_model.lower() == 'cmaq': + return WriterCmaq(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + elif output_model.lower() == 'wrf_chem': + return WriterWrfChem(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise AttributeError("The desired '{0}' output model is not available. ".format(output_model) + + "Only accepted 'MONARCH, CMAQ or WRF_CHEM.") + sys.exit(1) diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py new file mode 100644 index 0000000..2eefb26 --- /dev/null +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -0,0 +1,639 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +from hermesv3_gr.modules.writing.writer import Writer +from timeit import default_timer as gettime +from hermesv3_gr.config import settings +import os +import sys +import numpy as np +from netCDF4 import Dataset +from mpi4py import MPI + + +class WriterCmaq(Writer): + + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): + super(WriterCmaq, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + + self.global_attributes_order = [ + 'IOAPI_VERSION', 'EXEC_ID', 'FTYPE', 'CDATE', 'CTIME', 'WDATE', 'WTIME', 'SDATE', 'STIME', 'TSTEP', 'NTHIK', + 'NCOLS', 'NROWS', 'NLAYS', 'NVARS', 'GDTYP', 'P_ALP', 'P_BET', 'P_GAM', 'XCENT', 'YCENT', 'XORIG', 'YORIG', + 'XCELL', 'YCELL', 'VGTYP', 'VGTOP', 'VGLVLS', 'GDNAM', 'UPNAM', 'FILEDESC', 'HISTORY', 'VAR-LIST'] + + def unit_change(self, variable, data): + from cf_units import Unit + + if data is not None: + units = None + for var in self.variables_attributes: + if var['name'] == variable: + units = var['units'] + break + + if Unit(units).symbol == Unit('mol.s-1').symbol: + data = data * 1000 * self.grid.cell_area + elif Unit(units).symbol == Unit('g.s-1').symbol: + data = data * 1000 * self.grid.cell_area + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("The unit '{0}' of specie {1} is not defined correctly. ".format(units, variable) + + "Should be 'mol.s-1.m-2' or 'kg.s-1.m-2'") + sys.exit(1) + return data + + @staticmethod + def change_variable_attributes(emission_list): + """ + Modifies the emission list to be consistent to use the output as input for CMAQ model. + + :param emission_list: List of emissions + :type emission_list: list + + :return: Emission list ready for CMAQ + :rtype: list + """ + from cf_units import Unit + + var_list = [] + for i in xrange(len(emission_list)): + emission_list[i]['var_desc'] = "{:<80}".format(emission_list[i]['long_name']) + emission_list[i]['long_name'] = "{:<16}".format(emission_list[i]['name']) + + if Unit(emission_list[i]['units']).symbol == Unit('mol.s-1').symbol: + emission_list[i]['units'] = "{:<16}".format('mole/s') + elif Unit(emission_list[i]['units']).symbol == Unit('g.s-1').symbol: + emission_list[i]['units'] = "{:<16}".format('g/s') + var_list.append(emission_list[i]['name']) + return emission_list, var_list + + @staticmethod + def create_tflag(st_date, hours_array, num_vars): + """ + Creates the content of the CMAQ variable TFLAG + + :param st_date: Starting date + :type st_date: datetime.datetime + + :param hours_array: Array with as elements as time steps. Each element has the delta hours from the starting + date. + :type hours_array: numpy.array + + :param num_vars: Number of variables that will contain the NetCDF. + :type num_vars: int + + :return: Array with the content of TFLAG + :rtype: numpy.array + """ + import numpy as np + from datetime import timedelta + + a = np.array([[[]]]) + + for inc_hours in hours_array: + date = st_date + timedelta(hours=inc_hours) + b = np.array([[int(date.strftime('%Y%j'))], [int(date.strftime('%H%M%S'))]] * num_vars) + a = np.append(a, b) + + a.shape = (len(hours_array), 2, num_vars) + return a + + @staticmethod + def str_var_list(var_list): + """ + Transform a list to a string with the elements with 16 white spaces. + + :param var_list: List of variables. + :type var_list: list + + :return: List transformed on string. + :rtype: str + """ + + str_var_list = "" + for var in var_list: + str_var_list += "{:<16}".format(var) + + return str_var_list + + def read_global_attributes(self): + import pandas as pd + from warnings import warn as warning + float_atts = ['VGTOP'] + int_atts = ['FTYPE', 'NTHIK', 'VGTYP'] + str_atts = ['EXEC_ID', 'GDNAM'] + list_float_atts = ['VGLVLS'] + + atts_dict = { + 'EXEC_ID': "{:<80}".format('0.1alpha'), + 'FTYPE': np.int32(1), + 'NTHIK': np.int32(1), + 'VGTYP': np.int32(7), + 'VGTOP': np.float32(5000.), + 'VGLVLS': np.array([1., 0.], dtype=np.float32), + 'GDNAM': "{:<16}".format(''), + } + + if self.global_attributes_path is not None: + df = pd.read_csv(self.global_attributes_path) + + for att in atts_dict.iterkeys(): + try: + if att in int_atts: + atts_dict[att] = np.int32(df.loc[df['attribute'] == att, 'value'].item()) + elif att in float_atts: + atts_dict[att] = np.float32(df.loc[df['attribute'] == att, 'value'].item()) + elif att in str_atts: + atts_dict[att] = str(df.loc[df['attribute'] == att, 'value'].item()) + elif att in list_float_atts: + atts_dict[att] = np.array(df.loc[df['attribute'] == att, 'value'].item().split(), + dtype=np.float32) + except ValueError: + settings.write_log('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( + att, atts_dict[att])) + if settings.rank == 0: + warning('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( + att, atts_dict[att])) + + else: + settings.write_log('WARNING: Check the .err file to get more information.') + message = 'WARNING: No output attributes defined, check the output_attributes' + message += ' parameter of the configuration file.\nUsing default values:' + for key, value in atts_dict.iteritems(): + message += '\n\t{0} = {1}'.format(key, value) + if settings.rank == 0: + warning(message) + + return atts_dict + + def create_global_attributes(self, var_list): + """ + Creates the global attributes and the order that they have to be filled. + + :param date: Starting date. + :type date: datetime.datetime + + :param nx: Number of elements on the x dimension. + :type nx: int + + :param ny: Number of elements on the y dimension. + :type ny: int + + :param nlays: Number of vertical layers. + :type nlays: int + + :param lat_1: Value of lat 1 of the Lambert Conformal Conic projection. + :type lat_1: float + + :param lat_2: Value of lat 2 of the Lambert Conformal Conic projection. + :type lat_2: float + + :param lon_0: Value of lon 0 of the Lambert Conformal Conic projection. + :type lon_0: float + + :param lat_0: Value of lat 0 of the Lambert Conformal Conic projection. + :type lat_0: float + + :param x_0: X value of the origin. + :type x_0: float + + :param y_0: Y value of the origin. + :type y_0: float + + :param inc_x: Increment (in meters) of the x values. + :type inc_x: float + + :param inc_y: Increment (in meters) of the y values. + :type inc_x: float + + :param var_list: List of variables. + :type var_list: list + + :param exec_id: ID of the execution. + :type exec_id: str + + :param ftype: File data type = [CUSTOM3:1, GRDDED3:2, BNDARY3:3, IDDATA3:4, PROFIL3:5, or SMATRX3:6] + (Default = 1) + :type ftype: int + + :param tstep: time step, coded HHMMSS according to Models-3 conventions. + :type tstep: int + + :param nthik: For BNDARY3 files, perimeter thickness (cells), or for SMATRX3 files, number of matrix-columns + (unused for other file types) + :type nthik: int + + :param gdtyp: Map projection type + LATGRD3=1 (Lat-Lon), + LAMGRD3=2 (Lambert conformal conic), + MERGRD3=3 (general tangent Mercator), + STEGRD3=4 (general tangent stereographic), + UTMGRD3=5 (UTM, a special case of Mercator), + POLGRD3=6 (polar secant stereographic), + EQMGRD3=7 (equatorial secant Mercator), or + TRMGRD3=8 (transverse secant Mercator) + :type gdtyp: int + + :param vgtype: Vertical coordinate type + VGSGPH3=1 (hydrostatic sigma-P), + VGSGPN3=2 (nonhydrostatic sigma-P), + VGSIGZ3=3 (sigma-Z), + VGPRES3=4 (pressure (mb)), + VGZVAL3=5 (Z (m above sea lvl), or + VGHVAL3=6 (H (m above ground)) + :type vgtype: int + + :param vgtop: Model-top, for sigma vertical-coordinate types + :type vgtop: int + + :param vglvls: Array of vertical coordinate level values; level 1 of the grid goes from vertical coordinate + VGLEVELS[0] to VGLEVELS[1], etc. + :type vglvls: numpy.array + + :param gdnam: Grid Name + :type gdnam: str + + :return: Dict of global attributes and a list with the keys ordered. + :rtype: tuple + """ + # TODO documentation + from datetime import datetime + + global_attributes = self.read_global_attributes() + + if len(self.hours) > 1: + tstep = (self.hours[1] - self.hours[0]) * 10000 + else: + tstep = 1 * 10000 + + now = datetime.now() + global_attributes['IOAPI_VERSION'] = 'None: made only with NetCDF libraries' + global_attributes['CDATE'] = np.int32(now.strftime('%Y%j')) + global_attributes['CTIME'] = np.int32(now.strftime('%H%M%S')) + global_attributes['WDATE'] = np.int32(now.strftime('%Y%j')) + global_attributes['WTIME'] = np.int32(now.strftime('%H%M%S')) + global_attributes['SDATE'] = np.int32(self.date.strftime('%Y%j')) + global_attributes['STIME'] = np.int32(self.date.strftime('%H%M%S')) + global_attributes['TSTEP'] = np.int32(tstep) + global_attributes['NLAYS'] = np.int32(len(self.levels)) + global_attributes['NVARS'] = np.int32(len(var_list)) + global_attributes['UPNAM'] = "{:<16}".format('HERMESv3') + global_attributes['FILEDESC'] = 'Emissions generated by HERMESv3_GR.' + global_attributes['HISTORY'] = \ + 'Code developed by Barcelona Supercomputing Center (BSC, https://www.bsc.es/).' + \ + 'Developer: Carles Tena Medina (carles.tena@bsc.es)' + \ + 'Reference: Guevara et al., 2018, GMD., in preparation.' + global_attributes['VAR-LIST'] = self.str_var_list(var_list) + + if self.grid.grid_type == 'lcc': + global_attributes['GDTYP'] = np.int32(2) + global_attributes['NCOLS'] = np.int32(self.grid.nx) + global_attributes['NROWS'] = np.int32(self.grid.ny) + global_attributes['P_ALP'] = np.float(self.grid.lat_1) + global_attributes['P_BET'] = np.float(self.grid.lat_2) + global_attributes['P_GAM'] = np.float(self.grid.lon_0) + global_attributes['XCENT'] = np.float(self.grid.lon_0) + global_attributes['YCENT'] = np.float(self.grid.lat_0) + global_attributes['XORIG'] = np.float(self.grid.x_0) - np.float(self.grid.inc_x) / 2 + global_attributes['YORIG'] = np.float(self.grid.y_0) - np.float(self.grid.inc_y) / 2 + global_attributes['XCELL'] = np.float(self.grid.inc_x) + global_attributes['YCELL'] = np.float(self.grid.inc_y) + + return global_attributes + + @staticmethod + def create_cmaq_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, + hours=None, regular_lat_lon=False, rotated=False, nx=None, ny=None, lat_1=None, lat_2=None, + lon_0=None, lat_0=None, x_0=None, y_0=None, inc_x=None, inc_y=None): + + data_list, var_list = WriterCmaq.change_variable_attributes(data_list) + + if settings.writing_serial: + WriterCmaq.write_serial_netcdf( + netcdf_path, center_latitudes, center_longitudes, data_list, + levels=levels, date=date, hours=hours, + global_attributes=WriterCmaq.create_global_attributes(date, nx, ny, len(levels), lat_1, lat_2, lon_0, + lat_0, x_0, y_0, inc_x, inc_y, var_list), + regular_lat_lon=regular_lat_lon, + rotated=rotated, ) + else: + WriterCmaq.write_parallel_netcdf( + netcdf_path, center_latitudes, center_longitudes, data_list, + levels=levels, date=date, hours=hours, + global_attributes=WriterCmaq.create_global_attributes(date, nx, ny, len(levels), lat_1, lat_2, lon_0, + lat_0, x_0, y_0, inc_x, inc_y, var_list), + regular_lat_lon=regular_lat_lon, + rotated=rotated, ) + + @staticmethod + def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, + global_attributes=None, regular_lat_lon=False, rotated=False): + + import sys + from netCDF4 import Dataset + + if regular_lat_lon: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: Regular Lat Lon grid not implemented for CMAQ') + sys.exit(1) + + elif rotated: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: Rotated grid not implemented for CMAQ') + sys.exit(1) + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + netcdf.createDimension('TSTEP', len(hours)) + netcdf.createDimension('DATE-TIME', 2) + netcdf.createDimension('LAY', len(levels)) + netcdf.createDimension('VAR', len(data_list)) + netcdf.createDimension('ROW', center_latitudes.shape[0]) + netcdf.createDimension('COL', center_longitudes.shape[1]) + + # ===== Variables ===== + tflag = netcdf.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME',)) + tflag.setncatts({'units': "{:<16}".format(''), 'long_name': "{:<16}".format('TFLAG'), + 'var_desc': "{:<80}".format('Timestep-valid flags: (1) YYYYDDD or (2) HHMMSS')}) + tflag[:] = WriterCmaq.create_tflag(date, hours, len(data_list)) + + # Rest of variables + for variable in data_list: + var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=True) + var.units = variable['units'] + var.long_name = str(variable['long_name']) + var.var_desc = str(variable['var_desc']) + var[:] = variable['data'] + + # ===== Global attributes ===== + global_attributes, order = global_attributes + for attribute in order: + netcdf.setncattr(attribute, global_attributes[attribute]) + + netcdf.close() + + def create_parallel_netcdf(self): + st_time = gettime() + settings.write_log("\tCreating parallel NetCDF file.", level=2) + # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + # netcdf.createDimension('TSTEP', len(self.hours)) + netcdf.createDimension('TSTEP', None) + settings.write_log("\t\t\t'TSTEP' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), level=3) + + netcdf.createDimension('DATE-TIME', 2) + settings.write_log("\t\t\t'DATE-TIME' dimension: {0}".format(2), level=3) + + netcdf.createDimension('LAY', len(self.levels)) + settings.write_log("\t\t\t'LAY' dimension: {0}".format(len(self.levels)), level=3) + + netcdf.createDimension('VAR', len(self.variables_attributes)) + settings.write_log("\t\t\t'VAR' dimension: {0}".format(len(self.variables_attributes)), level=3) + + netcdf.createDimension('ROW', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'ROW' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + + netcdf.createDimension('COL', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'COL' dimension: {0}".format(self.grid.center_longitudes.shape[1]), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + tflag = netcdf.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME',)) + tflag.setncatts({'units': "{:<16}".format(''), 'long_name': "{:<16}".format('TFLAG'), + 'var_desc': "{:<80}".format('Timestep-valid flags: (1) YYYYDDD or (2) HHMMSS')}) + tflag[:] = self.create_tflag(self.date, self.hours, len(self.variables_attributes)) + settings.write_log("\t\t\t'TFLAG' variable created with size: {0}".format(tflag[:].shape), level=3) + + index = 0 + data_list, var_list = self.change_variable_attributes(self.variables_attributes) + for variable in self.variables_attributes: + index += 1 + var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) + var.units = variable['units'] + var.long_name = str(variable['long_name']) + var.var_desc = str(variable['var_desc']) + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape) + + "\n\t\t\t\t'{0}' variable will be filled later.".format(variable['name']), level=3) + + # ===== Global attributes ===== + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + global_attributes = self.create_global_attributes(var_list) + for attribute in self.global_attributes_order: + netcdf.setncattr(attribute, global_attributes[attribute]) + + netcdf.close() + + settings.write_time('WriterCmaq', 'create_parallel_netcdf', gettime() - st_time, level=3) + + def write_parallel_netcdf(self, emission_list): + st_time = gettime() + settings.write_log("\tAppending data to parallel NetCDF file.", level=2) + + if settings.size > 1: + netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + else: + netcdf = Dataset(self.path, mode='a', format="NETCDF4") + settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) + index = 0 + for variable in self.variables_attributes: + data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + + index += 1 + + var = netcdf.variables[variable['name']] + if settings.size > 1: + var.set_collective(True) + # Correcting NAN + if data is None: + data = 0 + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) + + netcdf.close() + settings.write_time('WriterCmaq', 'write_parallel_netcdf', gettime() - st_time, level=3) + + def write_serial_netcdf(self, emission_list): + st_time = gettime() + + mpi_numpy = False + mpi_vector = True + + # Gathering the index + if mpi_numpy or mpi_vector: + rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, self.grid.y_upper_bound], dtype='i') + full_position = None + if settings.rank == 0: + full_position = np.empty([settings.size, 4], dtype='i') + settings.comm.Gather(rank_position, full_position, root=0) + + if settings.rank == 0: + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + settings.write_log("\tCreating NetCDF file.", level=2) + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + netcdf.createDimension('TSTEP', len(self.hours)) + settings.write_log("\t\t\t'TSTEP' dimension: {0}".format(len(self.hours)), level=3) + netcdf.createDimension('DATE-TIME', 2) + settings.write_log("\t\t\t'DATE-TIME' dimension: {0}".format(2), level=3) + netcdf.createDimension('LAY', len(self.levels)) + settings.write_log("\t\t\t'LAY' dimension: {0}".format(len(self.levels)), level=3) + netcdf.createDimension('VAR', len(self.variables_attributes)) + settings.write_log("\t\t\t'VAR' dimension: {0}".format(len(self.variables_attributes)), level=3) + netcdf.createDimension('ROW', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'ROW' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + netcdf.createDimension('COL', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'COL' dimension: {0}".format(self.grid.center_longitudes.shape[1]), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + tflag = netcdf.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME',)) + tflag.setncatts({'units': "{:<16}".format(''), 'long_name': "{:<16}".format('TFLAG'), + 'var_desc': "{:<80}".format('Timestep-valid flags: (1) YYYYDDD or (2) HHMMSS')}) + tflag[:] = self.create_tflag(self.date, self.hours, len(self.variables_attributes)) + settings.write_log("\t\t\t'TFLAG' variable created with size: {0}".format(tflag[:].shape), level=3) + + full_shape = None + index = 0 + data_list, var_list = self.change_variable_attributes(self.variables_attributes) + for variable in data_list: + if settings.size != 1: + settings.write_log("\t\t\tGathering {0} data.".format(variable['name']), level=3) + rank_data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + if mpi_numpy or mpi_vector: + if rank_data is not None: + root_shape = settings.comm.bcast(rank_data.shape, root=0) + if full_shape is None: + full_shape = settings.comm.allgather(rank_data.shape) + # print 'Rank {0} full_shape: {1}\n'.format(settings.rank, full_shape) + if mpi_numpy: + if settings.size != 1: + if settings.rank == 0: + recvbuf = np.empty((settings.size,) + rank_data.shape) + else: + recvbuf = None + if root_shape != rank_data.shape: + rank_data_aux = np.empty(root_shape) + rank_data_aux[:, :, :, :-1] = rank_data + rank_data = rank_data_aux + # print 'Rank {0} data.shape {1}'.format(settings.rank, rank_data.shape) + settings.comm.Gather(rank_data, recvbuf, root=0) + else: + recvbuf = rank_data + elif mpi_vector: + if rank_data is not None: + counts_i = self.tuple_to_index(full_shape) + rank_buff = [rank_data, counts_i[settings.rank]] + if settings.rank == 0: + displacements = self.calculate_displacements(counts_i) + recvdata = np.empty(sum(counts_i), dtype=settings.precision) + else: + displacements = None + recvdata = None + if settings.precision == np.float32: + recvbuf = [recvdata, counts_i, displacements, MPI.FLOAT] + elif settings.precision == np.float64: + recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: precision {0} unknown'.format(settings.precision)) + sys.exit(1) + + settings.comm.Gatherv(rank_buff, recvbuf, root=0) + + else: + if settings.size != 1: + data = settings.comm.gather(rank_data, root=0) + else: + data = rank_data + + if settings.rank == 0: + if not (mpi_numpy or mpi_vector): + if settings.size != 1: + try: + data = np.concatenate(data, axis=3) + except: + data = 0 + st_time = gettime() + index += 1 + + var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) + var.units = variable['units'] + var.long_name = str(variable['long_name']) + var.var_desc = str(variable['var_desc']) + # var[:] = variable['data'] + + if mpi_numpy: + data = np.ones(var[:].shape, dtype=settings.precision) * 100 + for i in xrange(settings.size): + try: + if i == 0: + var[:, :, :, :full_position[i][3]] = recvbuf[i] + elif i == settings.size - 1: + var[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1] + else: + var[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, + : full_shape[i][-1]] + except: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("ERROR on i {0} ".format(i) + + "data shape: {0} ".format(data[:, :, :, full_position[i][2]:].shape) + + "recvbuf shape {0}".format(recvbuf[i].shape)) + sys.exit(1) + + elif mpi_vector: + var_time = gettime() + + # data_list = []#np.empty(shape, dtype=np.float64) + + if rank_data is not None: + data = np.empty(var[:].shape, dtype=settings.precision) + for i in xrange(settings.size): + # print 'Resizeing {0}'.format(i) + if not i == settings.size - 1: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + else: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + else: + data = 0 + var[:] = data + else: + var[:] = data + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape), + level=3) + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + if settings.rank == 0: + # ===== Global attributes ===== + global_attributes = self.create_global_attributes(var_list) + for attribute in self.global_attributes_order: + netcdf.setncattr(attribute, global_attributes[attribute]) + + netcdf.close() + settings.write_time('WriterCmaq', 'write_serial_netcdf', gettime() - st_time, level=3) + return True diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py new file mode 100644 index 0000000..e45fc6d --- /dev/null +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -0,0 +1,751 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +from hermesv3_gr.modules.writing.writer import Writer +from timeit import default_timer as gettime +from hermesv3_gr.config import settings +import os +import sys +import numpy as np +from netCDF4 import Dataset +from mpi4py import MPI + + +class WriterMonarch(Writer): + + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): + super(WriterMonarch, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + + # self.global_attributes = { + # 'nom_attribut': 'value_attribut' + # } + + def unit_change(self, variable, data): + from cf_units import Unit + st_time = gettime() + + if data is not None: + units = None + for var in self.variables_attributes: + if var['name'] == variable: + units = var['units'] + break + + if Unit(units).symbol == Unit('mol.s-1.m-2').symbol: + data = data * 1000 + elif Unit(units).symbol == Unit('kg.s-1.m-2').symbol: + pass + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("The unit '{0}' of specie {1} is not defined correctly. ".format(units, variable) + + "Should be 'mol.s-1.m-2' or 'kg.s-1.m-2'") + sys.exit(1) + settings.write_time('WriterMonarch', 'unit_change', gettime() - st_time, level=3) + return data + + def create_parallel_netcdf(self): + from cf_units import Unit, encode_time + + st_time = gettime() + + RegularLatLon = False + Rotated = False + LambertConformalConic = False + if self.grid.grid_type == 'global': + RegularLatLon = True + elif self.grid.grid_type == 'rotated': + Rotated = True + elif self.grid.grid_type == 'lcc': + LambertConformalConic = True + + settings.write_log("\tCreating parallel NetCDF file.", level=2) + # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + # print 'NETCDF PATH: {0}'.format(netcdf_path) + + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(self.grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + lat_dim = ('lat',) + elif len(self.grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(self.grid.center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(self.grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', self.grid.center_longitudes.shape[0]) + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), level=3) + lon_dim = ('lon',) + elif len(self.grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[1]), level=3) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(self.grid.center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if self.grid.rlat is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(self.grid.rlat)) + settings.write_log("\t\t\t'rlat' dimension: {0}".format(len(self.grid.rlat)), level=3) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if self.grid.rlon is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(self.grid.rlon)) + settings.write_log("\t\t\t'rlon' dimension: {0}".format(len(self.grid.rlon)), level=3) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(self.grid.y)) + settings.write_log("\t\t\t'y' dimension: {0}".format(len(self.grid.y)), level=3) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(self.grid.x)) + settings.write_log("\t\t\t'x' dimension: {0}".format(len(self.grid.x)), level=3) + lon_dim = ('y', 'x', ) + + # Levels + if self.levels is not None: + netcdf.createDimension('lev', len(self.levels)) + settings.write_log("\t\t\t'lev' dimension: {0}".format(len(self.levels)), level=3) + + # Bounds + if self.grid.boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(self.grid.boundary_latitudes[0, 0])) + settings.write_log("\t\t\t'nv' dimension: {0}".format(len(self.grid.boundary_latitudes[0, 0])), level=3) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(self.hours)) + settings.write_log("\t\t\t'time' dimension: {0}".format(len(self.hours)), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + # Time + if self.date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(self.date.year, self.date.month, self.date.day, + self.date.hour, self.date.minute, self.date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if settings.rank == 0: + time[:] = self.hours + settings.write_log("\t\t\t'time' variable created with size: {0}".format(time[:].shape), level=3) + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=self.compress) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if settings.rank == 0: + lats[:] = self.grid.center_latitudes + settings.write_log("\t\t\t'lat' variable created with size: {0}".format(lats[:].shape), level=3) + + if self.grid.boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=self.compress) + # print lat_bnds[:].shape, boundary_latitudes.shape + if settings.rank == 0: + lat_bnds[:] = self.grid.boundary_latitudes + settings.write_log("\t\t\t'lat_bnds' variable created with size: {0}".format(lat_bnds[:].shape), level=3) + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=self.compress) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if settings.rank == 0: + lons[:] = self.grid.center_longitudes + settings.write_log("\t\t\t'lon' variable created with size: {0}".format(lons[:].shape), level=3) + + if self.grid.boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=self.compress) + # print lon_bnds[:].shape, boundary_longitudes.shape + if settings.rank == 0: + lon_bnds[:] = self.grid.boundary_longitudes + settings.write_log("\t\t\t'lon_bnds' variable created with size: {0}".format(lon_bnds[:].shape), level=3) + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=self.compress) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if settings.rank == 0: + rlat[:] = self.grid.rlat + settings.write_log("\t\t\t'rlat' variable created with size: {0}".format(rlat[:].shape), level=3) + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=self.compress) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if settings.rank == 0: + rlon[:] = self.grid.rlon + settings.write_log("\t\t\t'rlon' variable created with size: {0}".format(rlon[:].shape), level=3) + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if settings.rank == 0: + x[:] = self.grid.x + settings.write_log("\t\t\t'x' variable created with size: {0}".format(x[:].shape), level=3) + + y = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if settings.rank == 0: + y[:] = self.grid.y + settings.write_log("\t\t\t'y' variable created with size: {0}".format(y[:].shape), level=3) + + cell_area_dim = var_dim + # Levels + if self.levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=self.compress) + lev.units = Unit("m").symbol + lev.positive = 'up' + if settings.rank == 0: + lev[:] = self.levels + settings.write_log("\t\t\t'lev' variable created with size: {0}".format(lev[:].shape), level=3) + # print 'DATA LIIIIST {0}'.format(data_list) + # # All variables + if len(self.variables_attributes) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=self.compress) + if settings.rank == 0: + var[:] = 0 + + index = 0 + for variable in self.variables_attributes: + index += 1 + + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=self.compress) + + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if self.grid.cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape) + + "\n\t\t\t\t'{0}' variable will be filled later.".format(variable['name']), level=3) + + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = self.grid.new_pole_latitude_degrees + mapping.grid_north_pole_longitude = 90 - self.grid.new_pole_longitude_degrees + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = "{0}, {1}".format(self.grid.lat_1, self.grid.lat_2) + mapping.longitude_of_central_meridian = self.grid.lon_0 + mapping.latitude_of_projection_origin = self.grid.lat_0 + + # Cell area + if self.grid.cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if self.global_attributes is not None: + netcdf.setncatts(self.global_attributes) + + netcdf.close() + + settings.write_time('WriterMonarch', 'create_parallel_netcdf', gettime() - st_time, level=3) + + def write_parallel_netcdf(self, emission_list): + + st_time = gettime() + + settings.write_log("\tAppending data to parallel NetCDF file.", level=2) + if settings.size > 1: + netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + else: + netcdf = Dataset(self.path, mode='a', format="NETCDF4") + settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) + index = 0 + # print "Rank {0} 2".format(rank) + for variable in self.variables_attributes: + + data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + st_time = gettime() + index += 1 + + var = netcdf.variables[variable['name']] + if settings.size > 1: + var.set_collective(True) + # Correcting NAN + if data is None: + data = 0 + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + + settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) + + if self.grid.cell_area is not None: + c_area = netcdf.variables['cell_area'] + c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area + + netcdf.close() + settings.write_time('WriterMonarch', 'write_parallel_netcdf', gettime() - st_time, level=3) + + def write_serial_netcdf(self, emission_list,): + from cf_units import Unit, encode_time + + st_time = gettime() + + mpi_numpy = False + mpi_vector = True + + # Gathering the index + if mpi_numpy or mpi_vector: + rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, self.grid.y_upper_bound], dtype='i') + full_position = None + if settings.rank == 0: + full_position = np.empty([settings.size, 4], dtype='i') + settings.comm.Gather(rank_position, full_position, root=0) + + if settings.rank == 0: + + RegularLatLon = False + Rotated = False + LambertConformalConic = False + + if self.grid.grid_type == 'global': + RegularLatLon = True + elif self.grid.grid_type == 'rotated': + Rotated = True + elif self.grid.grid_type == 'lcc': + LambertConformalConic = True + settings.write_log("\tCreating NetCDF file.", level=2) + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(self.grid.center_latitudes.shape) == 1: + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(self.grid.center_latitudes.shape) == 2: + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError( + 'ERROR: Latitudes must be on a 1D or 2D array instead of {0} shape.'.format( + len(self.grid.center_latitudes.shape))) + sys.exit(1) + + # Longitude + if len(self.grid.center_longitudes.shape) == 1: + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), level=3) + netcdf.createDimension('lon', self.grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(self.grid.center_longitudes.shape) == 2: + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), level=3) + netcdf.createDimension('lon', self.grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError( + 'ERROR: Longitudes must be on a 1D or 2D array instead of {0} shape.'.format( + len(self.grid.center_longitudes.shape))) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if self.grid.rlat is None: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: For rotated grids is needed the rotated latitudes.') + sys.exit(1) + settings.write_log("\t\t'rlat' dimension: {0}".format(len(self.grid.rlat)), level=2) + netcdf.createDimension('rlat', len(self.grid.rlat)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if self.grid.rlon is None: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: For rotated grids is needed the rotated longitudes.') + sys.exit(1) + settings.write_log("\t\t\t'rlon' dimension: {0}".format(len(self.grid.rlon)), level=3) + netcdf.createDimension('rlon', len(self.grid.rlon)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + settings.write_log("\t\t\t'y' dimension: {0}".format(len(self.grid.y)), level=3) + netcdf.createDimension('y', len(self.grid.y)) + lat_dim = ('y', 'x', ) + settings.write_log("\t\t\t'x' dimension: {0}".format(len(self.grid.x)), level=3) + netcdf.createDimension('x', len(self.grid.x)) + lon_dim = ('y', 'x', ) + + # Levels + if self.levels is not None: + settings.write_log("\t\t\t'lev' dimension: {0}".format(len(self.levels)), level=3) + netcdf.createDimension('lev', len(self.levels)) + + # Bounds + if self.grid.boundary_latitudes is not None: + settings.write_log("\t\t\t'nv' dimension: {0}".format(len(self.grid.boundary_latitudes[0, 0])), level=3) + netcdf.createDimension('nv', len(self.grid.boundary_latitudes[0, 0])) + + # Time + settings.write_log("\t\t\t'time' dimension: {0}".format(len(self.hours)), level=3) + netcdf.createDimension('time', len(self.hours)) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + # Time + if self.date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + time.units = str(u.offset_by_time(encode_time( + self.date.year, self.date.month, self.date.day, self.date.hour, self.date.minute, self.date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = self.hours + settings.write_log("\t\t\t'time' variable created with size: {0}".format(time[:].shape), level=3) + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=self.compress) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = self.grid.center_latitudes + settings.write_log("\t\t\t'lat' variable created with size: {0}".format(lats[:].shape), level=3) + + if self.grid.boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=self.compress) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = self.grid.boundary_latitudes + settings.write_log( + "\t\t\t'lat_bnds' variable created with size: {0}".format(lat_bnds[:].shape), level=3) + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=self.compress) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + lons[:] = self.grid.center_longitudes + settings.write_log("\t\t\t'lon' variable created with size: {0}".format(lons[:].shape), + level=3) + + if self.grid.boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=self.compress) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = self.grid.boundary_longitudes + settings.write_log( + "\t\t\t'lon_bnds' variable created with size: {0}".format(lon_bnds[:].shape), level=3) + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=self.compress) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = self.grid.rlat + settings.write_log("\t\t\t'rlat' variable created with size: {0}".format(rlat[:].shape), level=3) + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=self.compress) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = self.grid.rlon + settings.write_log("\t\t\t'rlon' variable created with size: {0}".format(rlon[:].shape), level=3) + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = self.grid.x + settings.write_log("\t\t\t'x' variable created with size: {0}".format(x[:].shape), level=3) + + y = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = self.grid.y + settings.write_log("\t\t\t'y' variable created with size: {0}".format(y[:].shape), level=3) + + cell_area_dim = var_dim + # Levels + if self.levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=self.compress) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = self.levels + settings.write_log("\t\t\t'lev' variable created with size: {0}".format(lev[:].shape), level=3) + + if len(self.variables_attributes) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=self.compress) + var[:] = 0 + + full_shape = None + index = 0 + for variable in self.variables_attributes: + if settings.size != 1: + settings.write_log("\t\t\tGathering {0} data.".format(variable['name']), level=3) + rank_data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + if mpi_numpy or mpi_vector: + if rank_data is not None: + root_shape = settings.comm.bcast(rank_data.shape, root=0) + if full_shape is None: + full_shape = settings.comm.allgather(rank_data.shape) + # print 'Rank {0} full_shape: {1}\n'.format(settings.rank, full_shape) + if mpi_numpy: + if settings.size != 1: + if settings.rank == 0: + recvbuf = np.empty((settings.size,) + rank_data.shape) + else: + recvbuf = None + if root_shape != rank_data.shape: + rank_data_aux = np.empty(root_shape) + rank_data_aux[:, :, :, :-1] = rank_data + rank_data = rank_data_aux + # print 'Rank {0} data.shape {1}'.format(settings.rank, rank_data.shape) + settings.comm.Gather(rank_data, recvbuf, root=0) + else: + recvbuf = rank_data + elif mpi_vector: + if rank_data is not None: + counts_i = self.tuple_to_index(full_shape) + rank_buff = [rank_data, counts_i[settings.rank]] + if settings.rank == 0: + displacements = self.calculate_displacements(counts_i) + recvdata = np.empty(sum(counts_i), dtype=settings.precision) + else: + displacements = None + recvdata = None + if settings.precision == np.float32: + recvbuf = [recvdata, counts_i, displacements, MPI.FLOAT] + elif settings.precision == np.float64: + recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: precision {0} unknown'.format(settings.precision)) + sys.exit(1) + + settings.comm.Gatherv(rank_buff, recvbuf, root=0) + + else: + if settings.size != 1: + data = settings.comm.gather(rank_data, root=0) + else: + data = rank_data + + if settings.rank == 0: + if not (mpi_numpy or mpi_vector): + if settings.size != 1: + try: + data = np.concatenate(data, axis=3) + except: + data = 0 + index += 1 + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=self.compress) + + var.units = Unit(variable['units']).symbol + + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + + var.coordinates = "lat lon" + + if self.grid.cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + + if mpi_numpy: + data = np.ones(var[:].shape, dtype=settings.precision) * 100 + for i in xrange(settings.size): + try: + if i == 0: + var[:, :, :, :full_position[i][3]] = recvbuf[i] + elif i == settings.size - 1: + var[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1] + else: + var[:, :, :, full_position[i][2]:full_position[i][3]] = \ + recvbuf[i, :, :, :, : full_shape[i][-1]] + except: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("ERROR on i {0} ".format(i) + + "data shape: {0} ".format(data[:, :, :, full_position[i][2]:].shape) + + "recvbuf shape {0}".format(recvbuf[i].shape)) + sys.exit(1) + + elif mpi_vector: + if rank_data is not None: + data = np.empty(var[:].shape, dtype=settings.precision) + for i in xrange(settings.size): + if not i == settings.size - 1: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + else: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + else: + data = 0 + var[:] = data + else: + var[:] = data + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape), + level=3) + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + if settings.rank == 0: + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = 90 - self.grid.new_pole_latitude_degrees + mapping.grid_north_pole_longitude = self.grid.new_pole_longitude_degrees + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = "{0}, {1}".format(self.grid.lat_1, self.grid.lat_2) + mapping.longitude_of_central_meridian = self.grid.lon_0 + mapping.latitude_of_projection_origin = self.grid.lat_0 + + if self.grid.cell_area is not None: + cell_area = settings.comm.gather(self.grid.cell_area, root=0) + if settings.rank == 0: + # Cell area + if self.grid.cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + + cell_area = np.concatenate(cell_area, axis=1) + + c_area[:] = cell_area + + if settings.rank == 0: + if self.global_attributes is not None: + netcdf.setncatts(self.global_attributes) + if settings.rank == 0: + netcdf.close() + settings.write_time('WriterMonarch', 'write_serial_netcdf', gettime() - st_time, level=3) + diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py new file mode 100644 index 0000000..c9b4490 --- /dev/null +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -0,0 +1,444 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +from hermesv3_gr.modules.writing.writer import Writer +from timeit import default_timer as gettime +from hermesv3_gr.config import settings +import os +import sys +import numpy as np +from netCDF4 import Dataset +from mpi4py import MPI + + +class WriterWrfChem(Writer): + + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): + super(WriterWrfChem, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) + + self.global_attributes_order = [ + 'TITLE', 'START_DATE', 'WEST-EAST_GRID_DIMENSION', 'SOUTH-NORTH_GRID_DIMENSION', + 'BOTTOM-TOP_GRID_DIMENSION', 'DX', 'DY', 'GRIDTYPE', 'DIFF_OPT', 'KM_OPT', 'DAMP_OPT', 'DAMPCOEF', 'KHDIF', + 'KVDIF', 'MP_PHYSICS', 'RA_LW_PHYSICS', 'RA_SW_PHYSICS', 'SF_SFCLAY_PHYSICS', 'SF_SURFACE_PHYSICS', + 'BL_PBL_PHYSICS', 'CU_PHYSICS', 'SF_LAKE_PHYSICS', 'SURFACE_INPUT_SOURCE','SST_UPDATE', 'GRID_FDDA', + 'GFDDA_INTERVAL_M', 'GFDDA_END_H', 'GRID_SFDDA', 'SGFDDA_INTERVAL_M', 'SGFDDA_END_H', + 'WEST-EAST_PATCH_START_UNSTAG', 'WEST-EAST_PATCH_END_UNSTAG', 'WEST-EAST_PATCH_START_STAG', + 'WEST-EAST_PATCH_END_STAG', 'SOUTH-NORTH_PATCH_START_UNSTAG', 'SOUTH-NORTH_PATCH_END_UNSTAG', + 'SOUTH-NORTH_PATCH_START_STAG', 'SOUTH-NORTH_PATCH_END_STAG', 'BOTTOM-TOP_PATCH_START_UNSTAG', + 'BOTTOM-TOP_PATCH_END_UNSTAG', 'BOTTOM-TOP_PATCH_START_STAG', 'BOTTOM-TOP_PATCH_END_STAG', 'GRID_ID', + 'PARENT_ID', 'I_PARENT_START', 'J_PARENT_START', 'PARENT_GRID_RATIO', 'DT', 'CEN_LAT', 'CEN_LON', + 'TRUELAT1', 'TRUELAT2', 'MOAD_CEN_LAT', 'STAND_LON', 'POLE_LAT', 'POLE_LON', 'GMT', 'JULYR', 'JULDAY', + 'MAP_PROJ', 'MMINLU', 'NUM_LAND_CAT', 'ISWATER', 'ISLAKE', 'ISICE', 'ISURBAN', 'ISOILWATER'] + + def unit_change(self, variable, data): + from cf_units import Unit + + if data is not None: + units = None + for var_name in self.variables_attributes: + if var_name == variable: + units = self.variables_attributes[var_name]['units'] + break + + if Unit(units).symbol == Unit('mol.h-1.km-2').symbol: + # 10e6 -> from m2 to km2 + # 10e3 -> from kmol to mol + # 3600n -> from s to h + data = data * 10e6 * 10e3 * 3600 + elif Unit(units).symbol == Unit('ug.s-1.m-2').symbol: + # 10e9 -> from kg to ug + data = data * 10e9 + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("The unit '{0}' of specie {1} is not defined correctly.".format(units, variable) + + " Should be 'mol.h-1.km-2' or 'ug.s-1.m-2'") + sys.exit(1) + return data + + def change_variable_attributes(self): + from cf_units import Unit + + new_variable_dict = {} + for variable in self.variables_attributes: + if Unit(variable['units']).symbol == Unit('mol.h-1.km-2').symbol: + new_variable_dict[variable['name']] = { + 'FieldType': np.int32(104), + 'MemoryOrder': "XYZ", + 'description': "EMISSIONS", + 'units': "mol km^-2 hr^-1", + 'stagger': "", + 'coordinates': "XLONG XLAT" + } + elif Unit(variable['units']).symbol == Unit('ug.s-1.m-2').symbol: + new_variable_dict[variable['name']] = { + 'FieldType': np.int32(104), + 'MemoryOrder': "XYZ", + 'description': "EMISSIONS", + 'units': "ug/m3 m/s", + 'stagger': "", + 'coordinates': "XLONG XLAT" + } + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("The unit '{0}' of specie {1} is not ".format(variable['units'], variable['name']) + + "defined correctly. Should be 'mol.h-1.km-2' or 'ug.s-1.m-2'") + sys.exit(1) + self.variables_attributes = new_variable_dict + + def read_global_attributes(self): + import pandas as pd + from warnings import warn as warning + + float_atts = ['DAMPCOEF', 'KHDIF', 'KVDIF', 'CEN_LAT', 'CEN_LON', 'DT'] + int_atts = ['BOTTOM-TOP_GRID_DIMENSION', 'DIFF_OPT', 'KM_OPT', 'DAMP_OPT', + 'MP_PHYSICS', 'RA_LW_PHYSICS', 'RA_SW_PHYSICS', 'SF_SFCLAY_PHYSICS', 'SF_SURFACE_PHYSICS', + 'BL_PBL_PHYSICS', 'CU_PHYSICS', 'SF_LAKE_PHYSICS', 'SURFACE_INPUT_SOURCE', 'SST_UPDATE', + 'GRID_FDDA', 'GFDDA_INTERVAL_M', 'GFDDA_END_H', 'GRID_SFDDA', 'SGFDDA_INTERVAL_M', 'SGFDDA_END_H', + 'BOTTOM-TOP_PATCH_START_UNSTAG', 'BOTTOM-TOP_PATCH_END_UNSTAG', 'BOTTOM-TOP_PATCH_START_STAG', + 'BOTTOM-TOP_PATCH_END_STAG', 'GRID_ID', 'PARENT_ID', 'I_PARENT_START', 'J_PARENT_START', + 'PARENT_GRID_RATIO', 'NUM_LAND_CAT', 'ISWATER', 'ISLAKE', 'ISICE', 'ISURBAN', 'ISOILWATER', + 'HISTORY'] + str_atts = ['GRIDTYPE', 'MMINLU'] + if self.grid.grid_type == 'lcc': + lat_ts = np.float32(self.grid.lat_0) + elif self.grid.grid_type == 'mercator': + lat_ts = np.float32(self.grid.lat_ts) + + atts_dict = { + 'BOTTOM-TOP_GRID_DIMENSION': np.int32(45), + 'GRIDTYPE': 'C', + 'DIFF_OPT': np.int32(1), + 'KM_OPT': np.int32(4), + 'DAMP_OPT': np.int32(3), + 'DAMPCOEF': np.float32(0.2), + 'KHDIF': np.float32(0.), + 'KVDIF': np.float32(0.), + 'MP_PHYSICS': np.int32(6), + 'RA_LW_PHYSICS': np.int32(4), + 'RA_SW_PHYSICS': np.int32(4), + 'SF_SFCLAY_PHYSICS': np.int32(2), + 'SF_SURFACE_PHYSICS': np.int32(2), + 'BL_PBL_PHYSICS': np.int32(8), + 'CU_PHYSICS': np.int32(0), + 'SF_LAKE_PHYSICS': np.int32(0), + 'SURFACE_INPUT_SOURCE': np.int32(1), + 'SST_UPDATE': np.int32(0), + 'GRID_FDDA': np.int32(0), + 'GFDDA_INTERVAL_M': np.int32(0), + 'GFDDA_END_H': np.int32(0), + 'GRID_SFDDA': np.int32(0), + 'SGFDDA_INTERVAL_M': np.int32(0), + 'SGFDDA_END_H': np.int32(0), + 'BOTTOM-TOP_PATCH_START_UNSTAG': np.int32(1), + 'BOTTOM-TOP_PATCH_END_UNSTAG': np.int32(44), + 'BOTTOM-TOP_PATCH_START_STAG': np.int32(1), + 'BOTTOM-TOP_PATCH_END_STAG': np.int32(45), + 'GRID_ID': np.int32(1), + 'PARENT_ID': np.int32(0), + 'I_PARENT_START': np.int32(1), + 'J_PARENT_START': np.int32(1), + 'PARENT_GRID_RATIO': np.int32(1), + 'DT': np.float32(18.), + 'MMINLU': 'MODIFIED_IGBP_MODIS_NOAH', + 'NUM_LAND_CAT': np.int32(41), + 'ISWATER': np.int32(17), + 'ISLAKE': np.int32(-1), + 'ISICE': np.int32(15), + 'ISURBAN': np.int32(13), + 'ISOILWATER': np.int32(14), + 'CEN_LAT': lat_ts, + 'CEN_LON': np.float32(self.grid.lon_0) + } + + if self.global_attributes_path is not None: + df = pd.read_csv(self.global_attributes_path) + + for att in atts_dict.iterkeys(): + try: + if att in int_atts: + atts_dict[att] = np.int32(df.loc[df['attribute'] == att, 'value'].item()) + elif att in float_atts: + atts_dict[att] = np.float32(df.loc[df['attribute'] == att, 'value'].item()) + elif att in str_atts: + atts_dict[att] = str(df.loc[df['attribute'] == att, 'value'].item()) + except ValueError: + print 'A warning has occurred. Check the .err file to get more information.' + if settings.rank == 0: + warning('The global attribute {0} is not defined; Using default value {1}'.format(att, atts_dict[att])) + + else: + settings.write_log('WARNING: Check the .err file to get more information.') + message = 'WARNING: No output attributes defined, check the output_attributes' + message += ' parameter of the configuration file.\nUsing default values:' + for key, value in atts_dict.iteritems(): + message += '\n\t{0} = {1}'.format(key, value) + if settings.rank == 0: + warning(message) + + return atts_dict + + def create_global_attributes(self): + """ + Creates the global attributes that have to be filled. + """ + + global_attributes = self.read_global_attributes() + + global_attributes['TITLE'] = 'Emissions generated by HERMESv3_GR.' + global_attributes['START_DATE'] = self.date.strftime("%Y-%m-%d_%H:%M:%S") + global_attributes['JULYR'] = np.int32(self.date.year) + global_attributes['JULDAY'] = np.int32(self.date.strftime("%j")) + global_attributes['GMT'] = np.float32(self.date.hour) + global_attributes['HISTORY'] = \ + 'Code developed by Barcelona Supercomputing Center (BSC, https://www.bsc.es/). ' + \ + 'Developer: Carles Tena Medina (carles.tena@bsc.es). ' + \ + 'Reference: Guevara et al., 2018, GMD., in preparation.' + + if self.grid.grid_type == 'lcc' or self.grid.grid_type == 'mercator': + global_attributes['WEST-EAST_GRID_DIMENSION'] = np.int32(self.grid.nx + 1) + global_attributes['SOUTH-NORTH_GRID_DIMENSION'] = np.int32(self.grid.ny + 1) + global_attributes['DX'] = np.float32(self.grid.inc_x) + global_attributes['DY'] = np.float32(self.grid.inc_y) + global_attributes['SURFACE_INPUT_SOURCE'] = np.int32(1) + global_attributes['WEST-EAST_PATCH_START_UNSTAG'] = np.int32(1) + global_attributes['WEST-EAST_PATCH_END_UNSTAG'] = np.int32(self.grid.nx) + global_attributes['WEST-EAST_PATCH_START_STAG'] = np.int32(1) + global_attributes['WEST-EAST_PATCH_END_STAG'] = np.int32(self.grid.nx + 1) + global_attributes['SOUTH-NORTH_PATCH_START_UNSTAG'] = np.int32(1) + global_attributes['SOUTH-NORTH_PATCH_END_UNSTAG'] = np.int32(self.grid.ny) + global_attributes['SOUTH-NORTH_PATCH_START_STAG'] = np.int32(1) + global_attributes['SOUTH-NORTH_PATCH_END_STAG'] = np.int32(self.grid.ny + 1) + + global_attributes['POLE_LAT'] = np.float32(90) + global_attributes['POLE_LON'] = np.float32(0) + + if self.grid.grid_type == 'lcc': + global_attributes['MAP_PROJ'] = np.int32(1) + global_attributes['TRUELAT1'] = np.float32(self.grid.lat_1) + global_attributes['TRUELAT2'] = np.float32(self.grid.lat_2) + global_attributes['MOAD_CEN_LAT'] = np.float32(self.grid.lat_0) + global_attributes['STAND_LON'] = np.float32(self.grid.lon_0) + elif self.grid.grid_type == 'mercator': + global_attributes['MAP_PROJ'] = np.int32(3) + global_attributes['TRUELAT1'] = np.float32(self.grid.lat_ts) + global_attributes['TRUELAT2'] = np.float32(0) + global_attributes['MOAD_CEN_LAT'] = np.float32(self.grid.lat_ts) + global_attributes['STAND_LON'] = np.float32(self.grid.lon_0) + + return global_attributes + + def create_times_var(self): + from datetime import timedelta + import netCDF4 + + aux_times_list = [] + + for hour in self.hours: + aux_date = self.date + timedelta(hours=hour) + aux_times_list.append(aux_date.strftime("%Y-%m-%d_%H:%M:%S")) + + str_out = netCDF4.stringtochar(np.array(aux_times_list)) + return str_out + + def create_parallel_netcdf(self): + st_time = gettime() + settings.write_log("\tCreating parallel NetCDF file.", level=2) + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + + if settings.rank == 0: + # ===== Dimensions ===== + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + netcdf.createDimension('Time', None) + settings.write_log("\t\t\t'Time' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), level=3) + netcdf.createDimension('DateStrLen', 19) + settings.write_log("\t\t\t'DateStrLen' dimension: 19", level=3) + netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) + netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + netcdf.createDimension('emissions_zdim', len(self.levels)) + settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen', )) + times[:] = self.create_times_var() + settings.write_log("\t\t\t'Times' variable created with size: {0}".format(times[:].shape), level=3) + + index = 0 + self.change_variable_attributes() + for var_name in self.variables_attributes.iterkeys(): + index += 1 + var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), + zlib=self.compress) + var.setncatts(self.variables_attributes[var_name]) + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape) + + "\n\t\t\t\t'{0}' variable will be filled later.".format(var_name), level=3) + + # ===== Global attributes ===== + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + global_attributes = self.create_global_attributes() + for attribute in self.global_attributes_order: + netcdf.setncattr(attribute, global_attributes[attribute]) + + netcdf.close() + + settings.write_time('WriterCmaq', 'create_parallel_netcdf', gettime() - st_time, level=3) + + def write_parallel_netcdf(self, emission_list): + st_time = gettime() + settings.write_log("\tAppending data to parallel NetCDF file.", level=2) + + if settings.size > 1: + netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + else: + netcdf = Dataset(self.path, mode='a', format="NETCDF4") + settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) + index = 0 + for var_name in self.variables_attributes.iterkeys(): + data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) + + index += 1 + + var = netcdf.variables[var_name] + if settings.size > 1: + var.set_collective(True) + # Correcting NAN + if data is None: + data = 0 + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + settings.write_log("\t\t\t'{0}' variable filled".format(var_name)) + + netcdf.close() + settings.write_time('WriterCmaq', 'write_parallel_netcdf', gettime() - st_time, level=3) + + def write_serial_netcdf(self, emission_list): + st_time = gettime() + + # Gathering the index + rank_position = np.array( + [self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, self.grid.y_upper_bound], + dtype='i') + full_position = None + if settings.rank == 0: + full_position = np.empty([settings.size, 4], dtype='i') + settings.comm.Gather(rank_position, full_position, root=0) + + if settings.rank == 0: + settings.write_log("\tCreating NetCDF file.", level=2) + netcdf = Dataset(self.path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + netcdf.createDimension('Time', None) + settings.write_log("\t\t\t'Time' dimension: UNLIMITED", level=3) + netcdf.createDimension('DateStrLen', 19) + settings.write_log("\t\t\t'DateStrLen' dimension: 19", level=3) + netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) + netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + netcdf.createDimension('emissions_zdim', len(self.levels)) + settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen', )) + times[:] = self.create_times_var() + settings.write_log("\t\t\t'Times' variable created with size: {0}".format(times[:].shape), level=3) + + full_shape = None + index = 0 + + self.change_variable_attributes() + + for var_name in self.variables_attributes.iterkeys(): + if settings.size != 1: + settings.write_log("\t\t\tGathering {0} data.".format(var_name), level=3) + rank_data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) + if rank_data is not None: + # root_shape = settings.comm.bcast(rank_data.shape, root=0) + if full_shape is None: + full_shape = settings.comm.allgather(rank_data.shape) + + counts_i = self.tuple_to_index(full_shape) + rank_buff = [rank_data, counts_i[settings.rank]] + if settings.rank == 0: + displacements = self.calculate_displacements(counts_i) + recvdata = np.empty(sum(counts_i), dtype=settings.precision) + else: + displacements = None + recvdata = None + if settings.precision == np.float32: + recvbuf = [recvdata, counts_i, displacements, MPI.FLOAT] + elif settings.precision == np.float64: + recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError('ERROR: precision {0} unknown'.format(settings.precision)) + sys.exit(1) + + settings.comm.Gatherv(rank_buff, recvbuf, root=0) + + if settings.rank == 0: + if settings.size != 1: + try: + data = np.concatenate(data, axis=3) + except: + data = 0 + st_time = gettime() + index += 1 + + var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), zlib=self.compress) + var.setncatts(self.variables_attributes[var_name]) + + var_time = gettime() + + # data_list = []#np.empty(shape, dtype=np.float64) + + if rank_data is not None: + data = np.empty(var[:].shape, dtype=settings.precision) + for i in xrange(settings.size): + # print 'Resizeing {0}'.format(i) + if not i == settings.size - 1: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + else: + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + else: + data = 0 + var[:] = data + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape), + level=3) + settings.write_log("\t\tCreating NetCDF metadata.", level=2) + if settings.rank == 0: + # ===== Global attributes ===== + global_attributes = self.create_global_attributes() + for attribute in self.global_attributes_order: + netcdf.setncattr(attribute, global_attributes[attribute]) + + netcdf.close() + settings.write_time('WriterWrfChem', 'write_serial_netcdf', gettime() - st_time, level=3) + return True + + + diff --git a/hermesv3_gr/tools/__init__.py b/hermesv3_gr/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hermesv3_gr/tools/coordinates_tools.py b/hermesv3_gr/tools/coordinates_tools.py new file mode 100644 index 0000000..4c1b767 --- /dev/null +++ b/hermesv3_gr/tools/coordinates_tools.py @@ -0,0 +1,489 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +import os +import sys +# import numpy as np +# import math + + +# Global variables + + +def get_grid_area(filename): + # TODO Documentation + """ + + :param filename: + :return: + """ + from cdo import Cdo + from netCDF4 import Dataset + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area + + +def latlon2rotated(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): + # TODO Documentation + """ + + :param lon_pole_deg: + :param lat_pole_deg: + :param lon_deg: + :param lat_deg: + :param lon_min: + :return: + """ + import math + + degrees_to_radians = math.pi / 180. + radians_to_degrees = 180. / math.pi + + lon_max = lon_min + 360 + + # stlm=sin(tlm) + sin_lat_pole_rad = math.sin(lat_pole_deg * degrees_to_radians) + # ctlm=cos(tlm) + cos_lat_pole_rad = math.cos(lat_pole_deg * degrees_to_radians) + # stph=sin(tph) + sin_lon_pole_rad = math.sin(lon_pole_deg * degrees_to_radians) + # ctph=cos(tph) + cos_lon_pole_rad = math.cos(lon_pole_deg * degrees_to_radians) + + # relm=(xlon-tlm0d)*dtr !distance from the centre lon (in rad) + distance_from_center_lon = (lon_deg - lon_pole_deg) * degrees_to_radians + # crlm=cos(relm) !cos of this distance + cos_distance_from_center_lon = math.cos(distance_from_center_lon) + # srlm=sin(relm) !sin of this distance + sin_distance_from_center_lon = math.sin(distance_from_center_lon) + # aph=xlat*dtr !lat in rad + lat_rad = lat_deg * degrees_to_radians + # cph=cos(aph) !cos of lat + cos_lat_rad = math.cos(lat_rad) + # sph=sin(aph) !sin of lat + sin_lat_rad = math.sin(lat_rad) + + # cc=cph*crlm !cos of lat times cos of lon distance + cycdx = cos_lat_rad * cos_distance_from_center_lon + # anum=cph*srlm !cos of lat times sin of lon distance + # denom=ctph0*cc+stph0*sph !cos of the centre lat times cc plus sin of the centre lat times sin of lat + # tlm=atan2(anum,denom) + rotated_lon = math.atan2(cos_lat_rad * sin_distance_from_center_lon, + cos_lat_pole_rad * cycdx + sin_lat_pole_rad * sin_lat_rad) + # tph=asin(ctph0*sph-stph0*cc) + sin_rotated_lat = cos_lat_pole_rad * sin_lat_rad - sin_lat_pole_rad * cycdx + if sin_rotated_lat > 1.: + sin_rotated_lat = 1. + if sin_rotated_lat < -1.: + sin_rotated_lat = -1. + + rotated_lat = math.asin(sin_rotated_lat) + + return rotated_lon * radians_to_degrees, rotated_lat * radians_to_degrees + + +def rotated2latlon(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): + # TODO Documentation + """ + + :param lon_pole_deg: + :param lat_pole_deg: + :param lon_deg: + :param lat_deg: + :param lon_min: + :return: + """ + import numpy as np + import math + + degrees_to_radians = math.pi / 180. + radians_to_degrees = 180. / math.pi + + # Positive east to negative east + lon_pole_deg -= 180 + + tph0 = lat_pole_deg * degrees_to_radians + tlm = lon_deg * degrees_to_radians + tph = lat_deg * degrees_to_radians + tlm0d = lon_pole_deg + ctph0 = np.cos(tph0) + stph0 = np.sin(tph0) + + stlm = np.sin(tlm) + ctlm = np.cos(tlm) + stph = np.sin(tph) + ctph = np.cos(tph) + + # Latitude + sph = (ctph0 * stph) + (stph0 * ctph * ctlm) + # if sph > 1.: + # sph = 1. + # if sph < -1.: + # sph = -1. + # print type(sph) + sph[sph > 1.] = 1. + sph[sph < -1.] = -1. + + aph = np.arcsin(sph) + aphd = aph / degrees_to_radians + + # Longitude + anum = ctph * stlm + denom = (ctlm * ctph - stph0 * sph) / ctph0 + relm = np.arctan2(anum, denom) - math.pi + almd = relm / degrees_to_radians + tlm0d + + # if almd < min_lon: + # almd += 360 + # elif almd > max_lon: + # almd -= 360 + # TODO use lon_min + almd[almd > (lon_min + 360)] -= 360 + almd[almd < lon_min] += 360 + + return almd, aphd + + +def rotated2latlon_single(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): + # TODO Docuemtnation + """ + + :param lon_pole_deg: + :param lat_pole_deg: + :param lon_deg: + :param lat_deg: + :param lon_min: + :return: + """ + import math + + degrees_to_radians = math.pi / 180. + radians_to_degrees = 180. / math.pi + + # lon_max = lon_min + 360 + # + # sin_lat_pole_rad = math.sin(lat_pole_deg*degrees_to_radians) + # cos_lat_pole_rad = math.cos(lat_pole_deg*degrees_to_radians) # + # sin_lon_pole_rad = math.sin(lon_pole_deg*degrees_to_radians) # stph + # cos_lon_pole_rad = math.cos(lon_pole_deg*degrees_to_radians) # ctph + # + # + # # relm=(xlon-tlm0d)*dtr !distance from the centre lon (in rad) + # distance_from_center_lon = (lon_deg - lon_pole_deg)*degrees_to_radians + # # ctlm=cos(relm) !cos of this distance + # cos_distance_from_center_lon = math.cos(distance_from_center_lon) + # # stlm=sin(relm) !sin of this distance + # sin_distance_from_center_lon = math.sin(distance_from_center_lon) + # # aph=xlat*dtr !lat in rad + # lat_rad = lat_deg*degrees_to_radians + # # ctph=cos(aph) !cos of lat + # cos_lat_rad = math.cos(lat_rad) + # # stph=sin(aph) !sin of lat + # sin_lat_rad = math.sin(lat_rad) + # + # + # + # # sph=ctph0*stph+stph0*ctph*ctlm + # sin_rotated_lat = (cos_lat_pole_rad*sin_lat_rad) + (sin_distance_from_center_lon*cos_lat_rad*cos_distance_from_center_lon) + # # sph=min(sph,1.) + # # sph=max(sph,-1.) + # if sin_rotated_lat > 1.: + # sin_rotated_lat = 1. + # if sin_rotated_lat < -1.: + # sin_rotated_lat = -1. + # # aph=asin(sph) + # real_latitude = math.asin(sin_rotated_lat) + # real_longitude = math.atan2(cos_lat_rad*sin_distance_from_center_lon, (cos_distance_from_center_lon*cos_lat_rad - sin_lat_pole_rad*sin_rotated_lat)/cos_lat_pole_rad) - math.pi + + # Positive east to negative east + lon_pole_deg -= 180 + + tph0 = lat_pole_deg * degrees_to_radians + tlm = lon_deg * degrees_to_radians + tph = lat_deg * degrees_to_radians + tlm0d = lon_pole_deg + ctph0 = math.cos(tph0) + stph0 = math.sin(tph0) + + stlm = math.sin(tlm) + ctlm = math.cos(tlm) + stph = math.sin(tph) + ctph = math.cos(tph) + + # Latitude + sph = (ctph0 * stph) + (stph0 * ctph * ctlm) + # if sph > 1.: + # sph = 1. + # if sph < -1.: + # sph = -1. + + aph = math.asin(sph) + aphd = aph / degrees_to_radians + + # Longitude + anum = ctph * stlm + denom = (ctlm * ctph - stph0 * sph) / ctph0 + relm = math.atan2(anum, denom) - math.pi + almd = relm / degrees_to_radians + tlm0d + + if almd > (lon_min + 360): + almd -= 360 + elif almd < lon_min: + almd += 360 + + return almd, aphd + + +def create_bounds(coords, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coords: Coordinates in degrees (latitude or longitude) + :type coords: numpy.ndarray + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.ndarray + """ + import numpy as np + + interval = coords[1] - coords[0] + + coords_left = coords - interval / 2 + coords_right = coords + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') + + return bound_coords + + +def create_bounds_esmpy(coords, spheric=False): + # TODO Documentation + """ + + :param coords: + :param spheric: + :return: + """ + import numpy as np + + interval = coords[1] - coords[0] + + bound_coords = coords - interval/2 + if not spheric: + bound_coords = np.append(bound_coords, [bound_coords[-1] + interval]) + + return bound_coords + + +def create_regular_rotated(lat_origin, lon_origin, lat_inc, lon_inc, n_lat, n_lon): + # TODO Documentation + """ + + :param lat_origin: + :param lon_origin: + :param lat_inc: + :param lon_inc: + :param n_lat: + :param n_lon: + :return: + """ + import numpy as np + + center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*lat_inc), lat_inc, dtype=np.float) + center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*lon_inc), lon_inc, dtype=np.float) + + # print lat_origin + (n_lat*lat_inc) + # print n_lat*lat_inc + + corner_latitudes = create_bounds_esmpy(center_latitudes) + corner_longitudes = create_bounds_esmpy(center_longitudes) + + return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes + + +def create_regular_old(lat_origin, lon_origin, lat_inc, lon_inc, n_lat, n_lon): + # TODO Documentation + import numpy as np + + center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*lat_inc), lat_inc, dtype=np.float) + center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*lon_inc), lon_inc, dtype=np.float) + + # print lat_origin + (n_lat*lat_inc) + # print n_lat*lat_inc + + corner_latitudes = create_bounds(center_latitudes) + corner_longitudes = create_bounds(center_longitudes) + + return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes + + +# def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, inc_lat, inc_lon): +# """ +# Creates a custom grid with the given parameters. The grid is divided in 4 arrays: +# - Center Latitudes +# - Center Longitudes +# - Boundary Latitudes (# latitudes +1) +# - Boundary Longitudes (# longitudes +1) +# +# :param center_lat: Latitude of the center of the grid (degrees). +# :type center_lat: float +# +# :param center_lon: Longitude of the center of the grid (degrees). +# :type center_lon: float +# +# :param west_boundary: Distance from de center to the western boundary (degrees) +# (not to the center of the first cell) +# :type west_boundary: float +# +# :param south_boundary: Distance from de center to the southern boundary (degrees) +# (not to the center of the first cell) +# :type south_boundary: float +# +# :param inc_lat: Vertical resolution of each cell (degrees). +# :type inc_lat: float +# +# :param inc_lon: Horizontal resolution of each cell (degrees) +# :type inc_lon: float +# +# :return: Arrays with the Center Latitudes, Center Longitudes, Boundary Latitudes, Boundary Longitudes. +# :rtype: tuple (numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray) +# """ +# lat_origin = center_lat - abs(south_boundary) + (inc_lat/2) +# lon_origin = center_lon - abs(west_boundary) + (inc_lon/2) +# n_lat = (abs(south_boundary)/inc_lat)*2 +# n_lon = (abs(west_boundary)/inc_lon)*2 +# +# center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*inc_lat), inc_lat, dtype=np.float) +# center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*inc_lon), inc_lon, dtype=np.float) +# +# corner_latitudes = create_bounds(center_latitudes) +# corner_longitudes = create_bounds(center_longitudes) +# +# # print center_latitudes +# +# return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes + + +def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, inc_lat, inc_lon): + """ + Creates a custom grid with the given parameters. The grid is divided in 4 arrays: + - Center Latitudes + - Center Longitudes + - Boundary Latitudes (# latitudes +1) + - Boundary Longitudes (# longitudes +1) + + :param center_lat: Latitude of the center of the grid (degrees). + :type center_lat: float + + :param center_lon: Longitude of the center of the grid (degrees). + :type center_lon: float + + :param west_boundary: Distance from de center to the western boundary (degrees) + (not to the center of the first cell) + :type west_boundary: float + + :param south_boundary: Distance from de center to the southern boundary (degrees) + (not to the center of the first cell) + :type south_boundary: float + + :param inc_lat: Vertical resolution of each cell (degrees). + :type inc_lat: float + + :param inc_lon: Horizontal resolution of each cell (degrees) + :type inc_lon: float + + :return: Arrays with the Center Latitudes, Center Longitudes, Boundary Latitudes, Boundary Longitudes. + :rtype: tuple (numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray) + """ + import numpy as np + + lat_origin = center_lat - abs(south_boundary) # + (inc_lat/2) + lon_origin = center_lon - abs(west_boundary) # + (inc_lon/2) + n_lat = (abs(south_boundary)/inc_lat)*2 + n_lon = (abs(west_boundary)/inc_lon)*2 + + center_latitudes = np.arange(lat_origin + inc_lat, lat_origin + (n_lat*inc_lat) - inc_lat + inc_lat/2, inc_lat, + dtype=np.float) + center_longitudes = np.arange(lon_origin + inc_lon, lon_origin + (n_lon*inc_lon) - inc_lon + inc_lon/2, inc_lon, + dtype=np.float) + + corner_latitudes = create_bounds(center_latitudes) + corner_longitudes = create_bounds(center_longitudes) + + center_latitudes = np.concatenate([ + [lat_origin + inc_lat/2 - inc_lat/4], + center_latitudes, + [lat_origin + (n_lat*inc_lat) - inc_lat/2 + inc_lat/4]]) + + center_longitudes = np.concatenate([ + [lon_origin + inc_lon/2 - inc_lon/4], + center_longitudes, + [lon_origin + (n_lon*inc_lon) - inc_lon/2 + inc_lon/4]]) + + corner_latitudes = np.concatenate([ + [[[lat_origin, lat_origin + inc_lat/2]]], + corner_latitudes, + [[[lat_origin + (n_lat*inc_lat) - inc_lat/2, lat_origin + (n_lat*inc_lat)]]]], axis=1) + + corner_longitudes = np.concatenate([ + [[[lon_origin, lon_origin + inc_lon/2]]], + corner_longitudes, + [[[lon_origin + (n_lon*inc_lon) - inc_lon/2, lon_origin + (n_lon*inc_lon)]]]], axis=1) + + return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes + + +if __name__ == '__main__': + import numpy as np + new_pole_longitude_degrees = 20.0 # lonpole tlm0d + new_pole_latitude_degrees = 35.0 # latpole tph0d + # + print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, 20.0, 35.0) + print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, -20.2485, -9.9036) + # + print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, 0, 0) + print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -35.) + # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.9) + # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.8) + # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.7) + print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, np.array([-51., -51., -51., -51.]), + np.array([-35., -34.9, -34.8, -34.7])) + # + # lat, lon = rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, np.array([0]), np.array([0])) + # print lat + + # lat, lon, b_lat, b_lon = create_regular_grid(0, 0, -180, -90, 1., 1.) + # print lat + # print lon + # print b_lat + # print b_lon diff --git a/hermesv3_gr/tools/custom_calendar.py b/hermesv3_gr/tools/custom_calendar.py new file mode 100644 index 0000000..256bda9 --- /dev/null +++ b/hermesv3_gr/tools/custom_calendar.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +import sys +import datetime +import holidays + + +def custom_holidays(zone, year): + """ + Calculates the festivity days that appear in the library holidays adding the Maundy Thursday and the God Friday + + :param zone: Name of the country. It has to appear and has to have the same format (capital letters) of the library + holidays: https://pypi.python.org/pypi/holidays + :type zone: str + + :param year: Year to get the festivities. + :type year: int + + :return: Dictionary with the festivity days. + :rtype: dict + """ + c_holidays = get_holidays(zone, year) + + # Adding more festivities than appear in the library + pascua_sunday = pascua(year) + c_holidays.update({pascua_sunday - datetime.timedelta(days=3): 'Maundy Thursday'}) # Jueves Santo + c_holidays.update({pascua_sunday - datetime.timedelta(days=2): 'God Friday'}) # Viernes Santo + + return c_holidays + + +def get_holidays(zone, year): + """ + Find the holidays for the selected zone and year. + + :param zone: Name of the country. It has to appear and has to have the same format (capital letters) of the library + holidays: https://pypi.python.org/pypi/holidays + :type zone: str + + :param year: Year to found the Pascua. + :type year: int + + :return: Dictionary with the festivity days. + :rtype: dict + """ + method_to_call = getattr(holidays, zone) + result = method_to_call(years=year) + return result + + +def pascua(year): + """ + Calculates the "Pascua" date + + :param year: Year to found the Pascua. + :type year: int + + :return: Sunday of Pascua. + :rtype: datetime.date + """ + # Magic constants + m = 24 + n = 5 + + # Remainders + a = year % 19 + b = year % 4 + c = year % 7 + d = (19 * a + m) % 30 + e = (2 * b + 4 * c + 6 * d + n) % 7 + + if d + e < 10: + day = d + e + 22 + month = 3 + else: + day = d + e - 9 + month = 4 + + # Special exceptions + if day == 26 and month == 4: + day = 19 + if day == 25 and month == 4 and d == 28 and e == 6 and a > 10: + day = 18 + + return datetime.date(year, month, day) diff --git a/hermesv3_gr/tools/lcc_LatLon_to_m.py b/hermesv3_gr/tools/lcc_LatLon_to_m.py new file mode 100644 index 0000000..5fbe9d9 --- /dev/null +++ b/hermesv3_gr/tools/lcc_LatLon_to_m.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +from pyproj import Proj +if __name__ == '__main__': + projection = Proj( + proj='lcc', + ellps='WGS84', + R=6370000.000, + lat_1=37, + lat_2=43, + lon_0=-3, + lat_0=40, + to_meter=1, + x_0=0, + y_0=0, + a=6370000.000, + k_0=1.0) + lon_array = [-11.5488, -11.5066, 7.1104] + lat_array = [32.5108, 32.5142, 46.6579] + UTMx, UTMy = projection(lon_array, lat_array) + + print UTMx, UTMy \ No newline at end of file diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py new file mode 100644 index 0000000..4adbd9b --- /dev/null +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -0,0 +1,3778 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import os +from timeit import default_timer as gettime +import sys +import numpy as np +from netCDF4 import Dataset +from mpi4py import MPI +from hermesv3_gr.config import settings + + +def open_netcdf(netcdf_path): + from netCDF4 import Dataset + nc_out = Dataset(netcdf_path, mode='a') + return nc_out + + +def close_netcdf(nc): + nc.close() + + +def get_grid_area(filename): + """ + Calculates the area of each cell. + + :param filename: Full path to the NetCDF to calculate the cell areas. + :type filename: str + + :return: Returns the area of each cell. + :rtype: numpy.ndarray + """ + from cdo import Cdo + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area + + +def extract_vars(netcdf_path, variables_list, attributes_list=[]): + data_list = [] + # print netcdf_path + netcdf = Dataset(netcdf_path, mode='r') + for var in variables_list: + if var == 'emi_nox_no2': + var1 = var + var2 = 'emi_nox' + else: + var1 = var2 = var + dict_aux = \ + { + 'name': var1, + 'data': netcdf.variables[var2][:], + } + for attribute in attributes_list: + dict_aux.update({attribute: netcdf.variables[var2].getncattr(attribute)}) + data_list.append(dict_aux) + netcdf.close() + + return data_list + + + +def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, + Mercator=False, lat_ts=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated or Mercator): + RegularLatLon = True + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + elif LambertConformalConic or Mercator: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + try: + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + except TypeError: + netcdf.createDimension('nv', boundary_latitudes.shape[1]) + + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + # print 'lons:', lons[:].shape, center_longitudes.shape + lons[:] = center_longitudes + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = rotated_lons + if LambertConformalConic or Mercator: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + elif Mercator: + var.grid_mapping = 'mercator' + try: + var[:] = variable['data'] + except: + print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + elif Mercator: + #Mercator + mapping = netcdf.createVariable('mercator', 'i') + mapping.grid_mapping_name = "mercator" + mapping.longitude_of_projection_origin = lon_0 + mapping.standard_parallel = lat_ts + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def write_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + print "Rank {0} var: {1}".format(rank, variable['name']) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + + +def write_simple_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + from cf_units import Unit, encode_time + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + + # ===== Dimensions ===== + var_dim = ('lat', 'lon',) + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + # lat_dim = ('lat',) + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + # lon_dim = ('lon',) + netcdf.createDimension('lev', len(levels)) + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + time = netcdf.createVariable('time', 'd', ('time',))#, zlib=True) + u = Unit('hours') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', 'lat')# , zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + + # Longitude + lons = netcdf.createVariable('lon', 'f', 'lon')# , zlib=True) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + cell_area_dim = var_dim + # Levels + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',))# , zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + index = 0 + for variable in data_list: + index += 1 + print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index,len(data_list)) + + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim) + # chunksizes=(1, len(levels), + # grid.x_upper_bound - grid.x_lower_bound, + # grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) + var.units = Unit(variable['units']).symbol + # print "Rank {0} in: {1}, out: {2}".format(rank, variable['data'].shape, var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape) + netcdf.close() + netcdf = Dataset(netcdf_path, mode='r+', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index,len(data_list)) + + var = netcdf.variables[variable['name']] + if index == 8: + pass + # print variable['data'].shape, var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape + else: + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + netcdf.close() + +def write_chunked_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, + chunksizes=(1, len(levels), + grid.x_upper_bound - grid.x_lower_bound, + grid.y_upper_bound - grid.y_lower_bound), zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + settings.comm.Barrier() + + print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def write_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim)#, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',))#, zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim)#, zlib=True) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',))#, zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',))#, zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',))#, zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',))#, zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',))#, zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',))#, zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim)#, zlib=True) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, + chunksizes=(1, len(levels), + grid.x_upper_bound - grid.x_lower_bound, + grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + settings.comm.Barrier() + + print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def only_create_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + compressed = False + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, + chunksizes=(1, len(levels), + grid.x_upper_bound - grid.x_lower_bound, + grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) + # print '1' + + var.units = Unit(variable['units']).symbol + # print '2' + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + # print '3' + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + # print '4' + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + # print '5' + var.coordinates = "lat lon" + # print '6' + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # print '8' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def only_write_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='r+', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + # print "Rank {0} writing var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + + var = netcdf.variables[variable['name']] + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + # print "Rank {0} ---> 2".format(rank) + # settings.comm.Barrier() + print "TIME -> VarWritten Rank {0} {1} {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + c_area = netcdf.variables['cell_area'] + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + netcdf.close() + +def write_netcdf_serie(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + # netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim)#, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',))#, zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim)#, zlib=True) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',))#, zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',))#, zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',))#, zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',))#, zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',))#, zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',))#, zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim)#, zlib=True) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, + chunksizes=(1, len(levels), + grid.x_upper_bound - grid.x_lower_bound, + grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + + +def gather_netcdf(file_list, output_path, common_vars=[]): + c_lats, c_lons, levs = extract_vars(file_list[0], ['lat', 'lon', 'lev']) + + from timeit import default_timer as get_time + + st_time = get_time() + data_list = [] + + for file in file_list: + nc_in = Dataset(file, mode='r') + # variables = nc_in.variables + for i, var in nc_in.variables.iteritems(): + if var.name not in common_vars: + dict_aux = \ + { + 'name': var.name, + 'data': var[:], + 'units': var.units, + } + data_list.append(dict_aux) + + # print type(variables) + # print variables.popitem() + # OrderedDict. + nc_in.close() + write_netcdf(output_path, c_lats['data'], c_lons['data'], data_list, levels=levs['data']) + + print 'Time of gattering netcdf: {0}'.format(round(get_time() - st_time, 2)) + +# @profile +def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + from cf_units import Unit, encode_time + import sys + from netCDF4 import Dataset + import numpy as np + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + lons[:] = center_longitudes + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + # print ('time',) + var_dim + # print variable + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours + # if variable['data'] is not 0: + # print var[:].shape, variable['data'].shape, variable['data'].max() + shape = None + exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) + # exit() + print shape + var[:] = np.zeros(shape) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + return netcdf + + +def create_netcdf_close(netcdf_path, center_latitudes, center_longitudes, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + from cf_units import Unit, encode_time + import sys + from netCDF4 import Dataset + import numpy as np + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + lons[:] = center_longitudes + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + # print ('time',) + var_dim + # print variable + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours + # if variable['data'] is not 0: + # print var[:].shape, variable['data'].shape, variable['data'].max() + shape = None + exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) + # exit() + var[:] = np.zeros(shape) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + netcdf.close + return True + + + + + +def fill_netcdf(tstep, nc, data): + import numpy as np + print tstep + + print 'Filling time step number {0}'.format(tstep) + + for pollutant in data: + # print pollutant + var = nc.variables[pollutant['name']] + # print pollutant['data'].shape + # print var.shape + # print 'isnan:', np.isnan(var[:].sum()) + # print 'var:', var[:].sum(), type(var[:].sum()) + # exit() + if pollutant['data'] is not 0: + if pollutant['data'].shape[1] is 1: + var[tstep, 0, :, :] += pollutant['data'][0, :, :] + else: + print var[tstep, :, :, :].shape, pollutant['data'].shape + var[tstep, :, :, :] += pollutant['data'][0, :, :, :] + # print var[:].shape + + +def fill_netcdf_level_tstep(nc, tstep, level, data): + # TODO Documentation + import numpy as np + from timeit import default_timer as gettime + + st_time = gettime() + + # print nc + print '\t\t\tFilling time step number {0}, level {1}'.format(tstep, level) + for pollutant in data: + if pollutant['data'] is not 0: + # Condition to do anything if is an empty layer + if float(round(pollutant['data'].sum(), 5)) != float(round(0, 5)): + var = nc.variables[pollutant['name']] + # print 'SUUUUUUUUUUUUUUM 0', var[:].flatten()[4425], pollutant['name'], tstep, level + # TODO differentiate this two conditions + if pollutant['data'].shape[1] is 1: + var[tstep, level, :, :] = pollutant['data'][0, 0, :, :] + else: + var[tstep, level, :, :] = pollutant['data'][0, :, :] + # print 'SUUUUUUUUUUUUUUM 1', var[:].flatten()[4425], pollutant['name'], tstep, level + + print 'TIME -> fill_netcdf_level_tstep: {0} s'.format(round(gettime() - st_time, 2)) + + + + + +def only_create_netcdf_old(netcdf_path, grid, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + compressed = False + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + # print 'DATA LIIIIST {0}'.format(data_list) +# # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) + # chunksizes=(1, len(levels), + # grid.x_upper_bound - grid.x_lower_bound, + # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) + # print '1' + + var.units = Unit(variable['units']).symbol + # print '2' + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + # print '3' + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + # print '4' + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + # print '5' + var.coordinates = "lat lon" + # print '6' + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + + + # print '8' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + # print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def only_create_netcdf(netcdf_path, grid, data_list, levels=None, date=None, hours=None, global_attributes=None): + from cf_units import Unit, encode_time + + # boundary_latitudes=self.grid.boundary_latitudes, + # boundary_longitudes=self.grid.boundary_longitudes, + # cell_area=self.grid.cell_area, + # RegularLatLon=True + + compressed = False + RegularLatLon = False + Rotated = False + LambertConformalConic = False + if grid.grid_type == 'global': + RegularLatLon = True + elif grid.grid_type == 'rotated': + Rotated = True + elif grid.grid_type == 'lcc': + LambertConformalConic = True + + # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + # print 'NETCDF PATH: {0}'.format(netcdf_path) + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if grid.rlat is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(grid.rlat)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if grid.rlon is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(grid.rlon)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if grid.boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(grid.boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if settings.rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if settings.rank == 0: + lats[:] = grid.center_latitudes + + if grid.boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) + # print lat_bnds[:].shape, boundary_latitudes.shape + if settings.rank == 0: + lat_bnds[:] = grid.boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if settings.rank == 0: + lons[:] = grid.center_longitudes + + if grid.boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) + # print lon_bnds[:].shape, boundary_longitudes.shape + if settings.rank == 0: + lon_bnds[:] = grid.boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if settings.rank == 0: + rlat[:] = grid.rlat + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if settings.rank == 0: + rlon[:] = grid.rlon + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if settings.rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if settings.rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) + lev.units = Unit("m").symbol + lev.positive = 'up' + if settings.rank == 0: + lev[:] = levels + # print 'DATA LIIIIST {0}'.format(data_list) +# # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) + var[:] = 0 + + index = 0 + for variable in data_list: + st_time = gettime() + index += 1 + # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) + # chunksizes=(1, len(levels), + # grid.x_upper_bound - grid.x_lower_bound, + # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) + # print '1' + + var.units = Unit(variable['units']).symbol + # print '2' + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + # print '3' + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + # print '4' + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + # print '5' + var.coordinates = "lat lon" + # print '6' + if grid.cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + + + # print '8' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + # print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = grid.new_pole_latitude_degrees + mapping.grid_north_pole_longitude = 90 - grid.new_pole_longitude_degrees + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if grid.cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + + +# @profile +def only_write_netcdf_parallel(netcdf_path, grid, var_names, emision_list): + + # print "Rank {0} 1".format(rank) + + netcdf = Dataset(netcdf_path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + index = 0 + # print "Rank {0} 2".format(rank) + for variable in var_names: + + data = calculate_data_by_var(variable, emision_list, grid.shape) + st_time = gettime() + index += 1 + print "Writing var {1} Rank {0} {2}/{3} ".format(settings.rank, variable, index, len(var_names)) + + var = netcdf.variables[variable] + var.set_collective(True) + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = data + + # print "Rank {0} ---> 2".format(rank) + # settings.comm.Barrier() + print "TIME -> Written var {1} Rank {0} {2} s\n".format(settings.rank, variable, round(gettime() - st_time, 2)) + + if grid.cell_area is not None: + c_area = netcdf.variables['cell_area'] + c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = grid.cell_area + + netcdf.close() + + +def serial_netcdf(netcdf_path, grid, data_list, emision_list, levels=None, date=None, hours=None, global_attributes=None): + from cf_units import Unit, encode_time + + mpi_numpy = False + mpi_vector = True + + # Gathering the index + if mpi_numpy or mpi_vector: + rank_position = np.array([grid.x_lower_bound, grid.x_upper_bound, grid.y_lower_bound, grid.y_upper_bound], dtype='i') + full_position = None + if settings.rank == 0: + full_position = np.empty([settings.size, 4], dtype='i') + settings.comm.Gather(rank_position, full_position, root=0) + # print 'Rank {0} recv: {1} '.format(settings.rank, full_position) + # exit() + # if rank != 0: + # + # for variable in data_list: + # rank_data = calculate_data_by_var(variable['name'], emision_list, grid.shape) + # settings.comm.gather(rank_data, root=0) + + if settings.rank == 0: + compressed = True + + RegularLatLon = False + Rotated = False + LambertConformalConic = False + + LambertConformalConic = False + if grid.grid_type == 'global': + RegularLatLon = True + elif grid.grid_type == 'rotated': + Rotated = True + elif grid.grid_type == 'lcc': + LambertConformalConic = True + + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + # print 'NETCDF PATH: {0}'.format(netcdf_path) + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if grid.rlat is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(grid.rlat)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if grid.rlon is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(grid.rlon)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if grid.boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(grid.boundary_latitudes[0, 0])) + # sys.exit() + + # Time + # netcdf.createDimension('time', None) + netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = grid.center_latitudes + + if grid.boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = grid.boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + lons[:] = grid.center_longitudes + + if grid.boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = grid.boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = grid.rlat + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = grid.rlon + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + # print 'DATA LIIIIST {0}'.format(data_list) + # # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) + var[:] = 0 + + full_shape = None + index = 0 + for variable in data_list: + rank_data = calculate_data_by_var(variable['name'], emision_list, grid.shape) + if mpi_numpy or mpi_vector: + if rank_data is not None: + root_shape = settings.comm.bcast(rank_data.shape, root=0) + if full_shape is None: + # rank_shape = rank_data.shape + # full_shape = settings.comm.gather(rank_data.shape, root=0) + full_shape = settings.comm.allgather(rank_data.shape) + # print 'Rank {0} full_shape: {1}\n'.format(settings.rank, full_shape) + if mpi_numpy: + if settings.size != 1: + if settings.rank == 0: + recvbuf = np.empty((settings.size,) + rank_data.shape) + else: + recvbuf = None + if root_shape != rank_data.shape: + rank_data_aux = np.empty(root_shape) + rank_data_aux[:, :, :, :-1] = rank_data + rank_data = rank_data_aux + print 'Rank {0} data.shape {1}'.format(settings.rank, rank_data.shape) + settings.comm.Gather(rank_data, recvbuf, root=0) + else: + recvbuf = rank_data + elif mpi_vector: + if rank_data is not None: + counts_i = tuple_to_index(full_shape) + rank_buff = [rank_data, counts_i[settings.rank]] + if settings.rank == 0: + displacements = calculate_displacements(counts_i) + recvdata = np.empty(sum(counts_i), dtype=settings.precision) + else: + displacements = None + recvdata = None + if settings.precision == np.float32: + recvbuf = [recvdata, counts_i, displacements, MPI.FLOAT] + elif settings.precision == np.float64: + recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] + else: + print 'ERROR: precission {0} unknown'.format(settings.precision) + + # print "Rank {0} sendbuf: {1}".format(settings.rank, sendbuf) + + settings.comm.Gatherv(rank_buff, recvbuf, root=0) + + else: + if settings.size != 1: + data = settings.comm.gather(rank_data, root=0) + else: + data = rank_data + + # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) + # print ('time',) + var_dim + # data = calculate_data_by_var(variable['name'], emision_list, grid.shape) + + if settings.rank == 0: + print full_shape + # print recvbuf.shape + if not (mpi_numpy or mpi_vector): + if settings.size != 1: + try: + data = np.concatenate(data, axis=3) + except: + print 'var: {0} data: {1}'.format(variable['name'], data) + data = 0 + # print 'data shape 0 : {0}'.format(np.concatenate(data, axis=0).shape) + # print 'data shape 1 : {0}'.format(np.concatenate(data, axis=1).shape) + # print 'data shape 2 : {0}'.format(np.concatenate(data, axis=2).shape) + # print 'data shape 3 : {0}'.format(np.concatenate(data, axis=3).shape) + # exit() + + # data = fix_gathered_data(data, (len(hours), len(levels), )) + st_time = gettime() + index += 1 + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) + # chunksizes=(1, len(levels), + # grid.x_upper_bound - grid.x_lower_bound, + # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) + # print '1' + + var.units = Unit(variable['units']).symbol + # print '2' + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + # print '3' + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + # print '4' + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + # print '5' + var.coordinates = "lat lon" + # print '6' + if grid.cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + + + # print '8' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + # + if mpi_numpy: + data = np.ones(var[:].shape, dtype=settings.precision) * 100 + for i in xrange(settings.size): + print i, full_shape[i] + # print recvbuf[i, :, :, :full_shape[i][-2], : full_shape[i][-1]].shape + # print i + if True: + try: + if i == 0: + # print full_position[i] + # data[:, :, :, :full_position[i][3]] = recvbuf[i] + var[:, :, :, :full_position[i][3]] = recvbuf[i] + elif i == settings.size - 1: + # data[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, : full_shape[i][-1]] + # data[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1 ] + var[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1] + else: + # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, : full_shape[i][-1]] + var[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, + : full_shape[i][-1]] + # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :,1:] + # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, : full_shape[i][-1]] + except: + print 'ERROR on i {0}'.format(i) + print 'data shape: {0} recvbuf shape {1}'.format(data[:, :, :, full_position[i][2]:].shape, recvbuf[i].shape) + # print 'data shape: {0} recvbuf shape {1}'.format(data[:, :, :, full_position[i][2]:full_position[i][3]].shape, recvbuf[i, :, :, :, : full_shape[i][-1]].shape) + + + # data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :full_shape[i][-2] +1, : full_shape[i][-1]+1] + # var[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :full_shape[i][-2], : full_shape[i][-1]] + # print data[:] - var[:] + # var[:] = data + elif mpi_vector: + var_time = gettime() + + # data_list = []#np.empty(shape, dtype=np.float64) + + if rank_data is not None: + data = np.empty(var[:].shape, dtype=settings.precision) + for i in xrange(settings.size): + # print 'Resizeing {0}'.format(i) + if not i == settings.size - 1: + # print dspls_i[i], dspls_i[i + 1], recvbuf[0][0:9], counts[i] + # data_aux = np.array(recvbuf[0][dspls_i[i]: dspls_i[i + 1]]).reshape(counts[i]) + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + else: + # data_aux = np.array(recvbuf[0][dspls_i[i]:]).reshape(counts[i]) + data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + # data_list.append(data_aux) + # data = np.concatenate(data_list, axis=1) + else: + data = 0 + print "Var {0} comm time: {1}".format(variable['name'], round(gettime() - var_time, 2)) + var[:] = data + else: + var[:] = data + print "Var {0} writing time: {1}".format(variable['name'], round(gettime() - st_time, 2)) + # exit() + + + if settings.rank == 0: + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = grid.new_pole_latitude_degrees + mapping.grid_north_pole_longitude = 90 - grid.new_pole_longitude_degrees + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + if grid.cell_area is not None: + # counts_i = tuple_to_index(full_shape, bidimensional=True) + # # print grid.cell_area.shape + # rank_buff = [np.ascontiguousarray(grid.cell_area, dtype=np.float32), counts_i[settings.rank]] + # if settings.rank == 0: + # displacements = calculate_displacements(counts_i) + # recvdata = np.empty(sum(counts_i), dtype=settings.precision) + # else: + # displacements = None + # recvdata = None + # recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] + # + # # print "Rank {0} sendbuf: {1}".format(settings.rank, sendbuf) + # + # settings.comm.Gatherv(rank_buff, recvbuf, root=0) + + cell_area = settings.comm.gather(grid.cell_area, root=0) + # cell_area = np.concatenate(cell_area, axis=1) + if settings.rank == 0: + # Cell area + if grid.cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + # cell_area = np.zeros(c_area[:].shape, dtype=settings.precision) + # # for i in xrange(settings.size): + # for i in xrange(1): + # print recvbuf[0][:10] + # print np.ascontiguousarray(grid.cell_area, dtype=np.float32).flatten()[:10] + # # print 'Resizeing {0}'.format(i) + # print full_shape[i] + # if not i == settings.size - 1: + # # print dspls_i[i], dspls_i[i + 1], recvbuf[0][0:9], counts[i] + # # data_aux = np.array(recvbuf[0][dspls_i[i]: dspls_i[i + 1]]).reshape(counts[i]) + # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape((full_shape[i][-2], full_shape[i][-1])) + # # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = grid.cell_area + # else: + # # data_aux = np.array(recvbuf[0][dspls_i[i]:]).reshape(counts[i]) + # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape((full_shape[i][-2], full_shape[i][-1])) + # # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = grid.cell_area + # + cell_area = np.concatenate(cell_area, axis=1) + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +def only_create_write_netcdf(netcdf_path, grid, var_names, emision_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + + compressed = False + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(grid.center_latitudes.shape) == 1: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(grid.center_latitudes.shape) == 2: + netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + lat_dim = ('lon', 'lat', ) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(grid.center_longitudes.shape) == 1: + netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(grid.center_longitudes.shape) == 2: + netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + lon_dim = ('lon', 'lat', ) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x', ) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x', ) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + # netcdf.createDimension('time', len(hours)) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',)) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',)) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + if rank == 0: + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if rank == 0: + lats[:] = grid.center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) + # print lat_bnds[:].shape, boundary_latitudes.shape + if rank == 0: + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if rank == 0: + lons[:] = grid.center_longitudes + + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) + # print lon_bnds[:].shape, boundary_longitudes.shape + if rank == 0: + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + if rank == 0: + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + if rank == 0: + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + if rank == 0: + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + if rank == 0: + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) + lev.units = Unit("m").symbol + lev.positive = 'up' + if rank == 0: + lev[:] = levels + # print 'DATA LIIIIST {0}'.format(data_list) +# # All variables + if len(var_names) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) + var[:] = 0 + + index = 0 + for variable in var_names: + st_time = gettime() + index += 1 + print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(var_names)) + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) + # print '1' + + var.units = Unit(variable['units']).symbol + # print '2' + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + # print '3' + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + # print '4' + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + # print '5' + var.coordinates = "lat lon" + # print '6' + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + + var.set_collective(True) + + data = calculate_data_by_var(variable['name'], emision_list, grid.shape) + + var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = data + + # print '8' + # if variable['data'] is not 0: + # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) + # print variable + + # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + + print "Rank {0} created var: {1}; time: {2} s".format(rank, variable['name'], round(gettime() - st_time, 2)) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + +# @profile +def calculate_data_by_var(variable, inventory_list, shape): + # TODO Documentation + """ + + :param variable: + :param inventory_list: + :param shape: + :return: + """ + from timeit import default_timer as gettime + + st_time = gettime() + # print 'Getting Data' + # print variable, shape + + data = None + # data = np.zeros(shape) + + for ei in inventory_list: + for emission in ei.emissions: + if emission['name'] == variable: + if emission['data'] is not 0: + # print variable + + if ei.vertical_factors is not None: + aux_data = emission['data'] * ei.vertical_factors[:, np.newaxis, np.newaxis] + else: + if not len(emission['data'].shape) == 4: + aux_data = np.zeros((shape[1], shape[2], shape[3])) + aux_data[0, :, :] = emission['data'] + else: + aux_data = emission['data'] + + del emission['data'] + + if ei.temporal_factors is not None: + if data is None: + data = aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] + else: + data += aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] + else: + if data is None: + data = aux_data[np.newaxis, :, :, :] + else: + data += aux_data[np.newaxis, :, :, :] + + print "TIME -> get_data_by_var: Rank {2} {0} -> {1} s\n".format(variable, round(gettime() - st_time, 4), settings.rank) + + return data + + +def tuple_to_index(tuple_list, bidimensional=False): + from operator import mul + new_list = [] + for tuple in tuple_list: + if bidimensional: + new_list.append(tuple[-1] * tuple[-2]) + else: + new_list.append(reduce(mul, tuple)) + return new_list + + +def calculate_displacements(counts): + new_list = [0] + accum = 0 + for counter in counts[:-1]: + accum += counter + new_list.append(accum) + return new_list + + +if __name__ == '__main__': + pass + + + + diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py new file mode 100644 index 0000000..79f4f67 --- /dev/null +++ b/preproc/ceds_preproc.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/jgcri/ceds/original_files' +output_path = '/esarchive/recon/jgcri/ceds' +list_pollutants = ['BC', 'CO', 'NH3', 'NMVOC', 'NOx', 'OC', 'SO2'] +voc_pollutants = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', 'VOC08', 'VOC09', 'VOC12', 'VOC13', 'VOC14', 'VOC15', 'VOC16', 'VOC17', 'VOC18', 'VOC19', 'VOC20', 'VOC21', 'VOC22', 'VOC23', 'VOC24', 'VOC25'] + +list_sectors = ['agriculture', 'energy', 'industry', 'transport', 'residential', 'solvents', 'waste', 'ships'] +#list_years = from 1950 to 2014 +list_years = [2010] +input_name = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' +voc_input_name = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim-supplemental-data_gr_01-12.nc' +do_air = True +air_input_name = '-em-AIR-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26_gr_01-12.nc' +# ============================================================== + +import sys +import os + + +def voc_to_vocname(voc): + voc_dict = { + 'VOC01': 'alcohols', + 'VOC02': 'ethane', + 'VOC03': 'propane', + 'VOC04': 'butanes', + 'VOC05': 'pentanes', + 'VOC06': 'hexanes-pl', + 'VOC07': 'ethene', + 'VOC08': 'propene', + 'VOC09': 'ethyne', + 'VOC12': 'other-alke', + 'VOC13': 'benzene', + 'VOC14': 'toluene', + 'VOC15': 'xylene', + 'VOC16': 'trimethylb', + 'VOC17': 'other-arom', + 'VOC18': 'esters', + 'VOC19': 'ethers', + 'VOC20': 'chlorinate', + 'VOC21': 'methanal', + 'VOC22': 'other-alka', + 'VOC23': 'ketones', + 'VOC24': 'acids', + 'VOC25': 'other-voc' + } + + return voc_dict[voc] + + +def sector_to_index(sector): + sector_dict = { + 'agriculture': 0, + 'energy': 1, + 'industry': 2, + 'transport': 3, + 'residential': 4, + 'solvents': 5, + 'waste': 6, + 'ships': 7 + } + + return sector_dict[sector] + + +def get_input_name(pollutant, year, air=False): + if air: + file_name = air_input_name.replace('', pollutant) + elif pollutant in list_pollutants: + file_name = input_name.replace('', pollutant) + elif pollutant in voc_pollutants: + file_name = voc_input_name.replace('', '{0}-{1}'.format(pollutant, voc_to_vocname(pollutant))) + else: + raise ValueError('Pollutant {0} not in pollutant list or voc list'.format(pollutant)) + + if year < 1851 or year > 2014: + raise ValueError('Select a year between 1851 and 2014') + elif year <= 1899: + file_name = file_name.replace('', str(1851)).replace('', str(1899)) + elif year <= 1949: + file_name = file_name.replace('', str(1900)).replace('', str(1949)) + elif year <= 1949: + file_name = file_name.replace('', str(1950)).replace('', str(1999)) + else: + file_name = file_name.replace('', str(2000)).replace('', str(2014)) + + return os.path.join(input_path, file_name) + + +def get_full_year_data(file_name, pollutant, sector, year, air=False): + from netCDF4 import Dataset + from datetime import datetime + import cf_units + import numpy as np + + nc = Dataset(file_name, mode='r') + + time = nc.variables['time'] + + time_array = cf_units.num2date(time[:], time.units, time.calendar) + time_array = np.array([datetime(year=x.year, month=x.month, day=1) for x in time_array]) + + i_time = np.where(time_array == datetime(year=year, month=1, day=1))[0][0] + if air: + data = nc.variables['AIR'][i_time:i_time + 12, :, :, :] + elif pollutant in list_pollutants: + data = nc.variables['{0}_em_anthro'.format(pollutant)][i_time:i_time+12, sector_to_index(sector), :, :] + elif pollutant in voc_pollutants: + data = nc.variables['{0}-{1}_em_speciated_VOC'.format(pollutant, voc_to_vocname(pollutant).replace('-', '_'))][i_time:i_time+12, sector_to_index(sector), :, :] + nc.close() + + return data + + +def get_global_attributes(file_name): + from netCDF4 import Dataset + + nc = Dataset(file_name, mode='r') + + atts_dict = {} + for name in nc.ncattrs(): + atts_dict[name] = nc.getncattr(name) + + nc.close() + return atts_dict + + +def do_transformation(year): + from datetime import datetime + from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf + for pollutant in list_pollutants + voc_pollutants: + file_name = get_input_name(pollutant, year) + if os.path.exists(file_name): + c_lats, c_lons, b_lats, b_lons = extract_vars(file_name, ['lat', 'lon', 'lat_bnds', 'lon_bnds']) + cell_area = get_grid_area(file_name) + + global_attributes = get_global_attributes(file_name) + for sector in list_sectors: + data = get_full_year_data(file_name, pollutant, sector, year) + + if pollutant == 'NOx': + pollutant_name = 'nox_no2' + else: + pollutant_name = pollutant.lower() + + file_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) + if not os.path.exists(file_path): + os.makedirs(file_path) + + for month in xrange(1, 12 + 1, 1): + emission = { + 'name': pollutant_name, + 'units': 'kg.m-2.s-1', + 'data': data[month - 1, :, :].reshape((1,) + cell_area.shape) + } + write_netcdf(os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), + c_lats['data'], c_lons['data'], [emission], + date=datetime(year=year, month=month, day=1), + boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], + cell_area=cell_area, global_attributes=global_attributes) + else: + raise IOError('File not found {0}'.format(file_name)) + + +def do_air_transformation(year): + from datetime import datetime + from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf + + for pollutant in list_pollutants: + file_name = get_input_name(pollutant, year, air=True) + if os.path.exists(file_name): + c_lats, c_lons, b_lats, b_lons = extract_vars(file_name, ['lat', 'lon', 'lat_bnds', 'lon_bnds']) + cell_area = get_grid_area(file_name) + + global_attributes = get_global_attributes(file_name) + + data = get_full_year_data(file_name, pollutant, None, year, air=True) + + if pollutant == 'NOx': + pollutant_name = 'nox_no2' + else: + pollutant_name = pollutant.lower() + + for sector in ['air_lto', 'air_cds', 'air_crs']: + file_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) + if not os.path.exists(file_path): + os.makedirs(file_path) + + if sector == 'air_lto': + data_aux = data[:, 0:1 + 1, :, :].sum(axis=1) + elif sector == 'air_cds': + data_aux = data[:, 2:14 + 1, :, :].sum(axis=1) + elif sector == 'air_crs': + data_aux = data[:, 15:24 + 1, :, :].sum(axis=1) + + for month in xrange(1, 12 + 1, 1): + emission = { + 'name': pollutant_name, + 'units': 'kg.m-2.s-1', + 'data': data_aux[month - 1, :, :].reshape((1,) + cell_area.shape) + } + write_netcdf(os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), + c_lats['data'], c_lons['data'], [emission], + date=datetime(year=year, month=month, day=1), + boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], + cell_area=cell_area, global_attributes=global_attributes) + else: + raise IOError('File not found {0}'.format(file_name)) + + +if __name__ == '__main__': + for y in list_years: + # do_transformation(y) + if do_air: + do_air_transformation(y) + + diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py new file mode 100644 index 0000000..197abb7 --- /dev/null +++ b/preproc/eclipsev5a_preproc.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/iiasa/eclipsev5a/original_files' +output_path = '/esarchive/recon/iiasa/eclipsev5a/original_files/test' +input_name = 'ECLIPSE_base_CLE_V5a_.nc' +input_name_flaring = 'ECLIPSE_V5a_baseline_CLE_flaring.nc' +input_name_ship = "ship_CLE_emis_.nc" +monthly_pattern_file = 'ECLIPSEv5_monthly_patterns.nc' +list_years = [1990, 1995, 2000, 2005, 2010, 2015, 2020, 2025, 2030, 2040, 2050] +list_pollutants = ['BC', 'CH4', 'CO', 'NH3', 'NOx', 'OC', 'OM', 'PM10', 'PM25', 'SO2', 'VOC'] +# ============================================================== + +import os +import sys + +from datetime import datetime +from netCDF4 import Dataset +import numpy as np +from cf_units import Unit +from hermesv3_gr.tools.coordinates_tools import * + +month_factor = 1000000. / (30. * 24. * 3600.) # To pass from kt/month to Kg/s +year_factor = 1000000. / (365. * 24. * 3600.) # To pass from kt/year to Kg/s +var_units = 'kg.m-2.s-1' + + +def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cell_area, date): + print output_name_path + # Creating NetCDF & Dimensions + nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") + nc_output.createDimension('nv', 2) + nc_output.createDimension('lon', center_lons.shape[0]) + nc_output.createDimension('lat', center_lats.shape[0]) + nc_output.createDimension('time', None) + + # TIME + time = nc_output.createVariable('time', 'd', ('time',), zlib=True) + # time.units = "{0} since {1}".format(tstep_units, global_atts['Start_DateTime'].strftime('%Y-%m-%d %H:%M:%S')) + time.units = "hours since {0}".format(date.strftime('%Y-%m-%d %H:%M:%S')) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0] + + # LATITUDE + lat = nc_output.createVariable('lat', 'f', ('lat',), zlib=True) + lat.bounds = "lat_bnds" + lat.units = "degrees_north" + lat.axis = "Y" + lat.long_name = "latitude" + lat.standard_name = "latitude" + lat[:] = center_lats + + lat_bnds = nc_output.createVariable('lat_bnds', 'f', ('lat', 'nv',), zlib=True) + lat_bnds[:] = create_bounds(center_lats) + + # LONGITUDE + lon = nc_output.createVariable('lon', 'f', ('lon',), zlib=True) + lon.bounds = "lon_bnds" + lon.units = "degrees_east" + lon.axis = "X" + lon.long_name = "longitude" + lon.standard_name = "longitude" + lon[:] = center_lons + + lon_bnds = nc_output.createVariable('lon_bnds', 'f', ('lon', 'nv',), zlib=True) + lon_bnds[:] = create_bounds(center_lons) + + for var in data_list: + # VARIABLE + nc_var = nc_output.createVariable(var['name'], 'f', ('time', 'lat', 'lon',), zlib=True) + nc_var.units = var['units'].symbol + nc_var.long_name = var['long_name'] + nc_var.coordinates = 'lat lon' + nc_var.grid_mapping = 'crs' + nc_var.cell_measures = 'area: cell_area' + nc_var[:] = var['data'] + + # CELL AREA + cell_area = nc_output.createVariable('cell_area', 'f', ('lat', 'lon',)) + cell_area.long_name = "area of the grid cell" + cell_area.standard_name = "area" + cell_area.units = "m2" + cell_area[:] = grid_cell_area + + # CRS + crs = nc_output.createVariable('crs', 'i') + crs.grid_mapping_name = "latitude_longitude" + crs.semi_major_axis = 6371000.0 + crs.inverse_flattening = 0 + + nc_output.setncattr('title', 'ECLIPSEv5a inventory') + nc_output.setncattr('Conventions', 'CF-1.6', ) + nc_output.setncattr('institution', 'IIASA', ) + nc_output.setncattr('source', 'IIASA', ) + nc_output.setncattr('history', 'Re-writing of the ECLIPSEv5a input to follow the CF-1.6 conventions;\n' + + '2017-11-28: Creating;\n') + nc_output.setncattr('http://www.iiasa.ac.at/web/home/research/researchPrograms/air/ECLIPSEv5a.html') + nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', ) + + nc_output.close() + + +def extract_sector_by_name(name): + sector_dict ={ + 'emis_agr': 'agriculture', + 'emis_awb': 'agriculture_waste', + 'emis_dom': 'residential', + 'emis_ene': 'energy', + 'emis_ind': 'industry', + 'emis_tra': 'transport', + 'emis_wst': 'waste', + } + + try: + return_value = sector_dict[name] + except KeyError: + return_value = None + + return return_value + + +def extract_month_profile_by_sector(sector, month, pollutant=None): + sector_dict ={ + 'residential': 'dom', + 'energy': 'ene', + 'agriculture_waste': 'agr_awb', + 'agriculture': 'agr', + 'industry': 'ind', + 'transport': 'tra', + 'waste': 'other', + } + if sector == '' and pollutant == 'NH3': + profile_name = 'agr_NH3' + else: + profile_name = sector_dict[sector] + + nc_profiles = Dataset(os.path.join(input_path, monthly_pattern_file), mode='r') + + profile = nc_profiles.variables[profile_name][month, :, :] + + nc_profiles.close() + profile = np.nan_to_num(profile) + # profile = profile.nan_to_num() + return profile + + +def get_output_name(pollutant, sector, year, month): + output_path_aux = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant, sector), ) + + if not(os.path.exists(output_path_aux)): + os.makedirs(output_path_aux) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=month, day=1).strftime('%Y%m'))) + + +def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area): + for i in xrange(len(list_years)): + + for month in xrange(12): + # print i, list_years[i], month + 1 + if pollutant == 'NOx': + pollutant_name = 'nox_no2' + elif pollutant == 'VOC': + pollutant_name = 'nmvoc' + else: + pollutant_name = pollutant.lower() + output_name = get_output_name(pollutant_name.lower(), sector.lower(), list_years[i], month + 1) + profile = extract_month_profile_by_sector(sector, month, pollutant) + data_aux = data[i, :, :] * profile + # print factor + data_aux = (data_aux * month_factor) / cell_area + # #data_aux = data_aux / cell_area + # print 'original: ', data[i, 192, 404] + # print 'factor: ', profile[192, 404] + # print 'destiny', data_aux[192, 404] + data_aux = data_aux.reshape((1,) + data_aux.shape) + data_list = [{ + 'name': pollutant_name, + 'long_name': pollutant_name, + 'data': data_aux, + 'units': Unit(var_units), + }] + write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=list_years[i], month=month + 1, day=1)) + + +def do_transformation(): + for pollutant in list_pollutants: + file_name = os.path.join(input_path, input_name.replace('', pollutant)) + print file_name + nc = Dataset(file_name, mode='r') + c_lats = nc.variables['lat'][:] + c_lons = nc.variables['lon'][:] + cell_area = get_grid_area(file_name) + for var in nc.variables: + sector = extract_sector_by_name(var) + + if sector is not None: + do_single_transformation(pollutant, sector, nc.variables[var][:], c_lats, c_lons, cell_area) + nc.close() + + +def get_flaring_output_name(pollutant, sector, year): + output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) + + if not(os.path.exists(output_path_aux)): + os.makedirs(output_path_aux) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=1, day=1).strftime('%Y'))) + + +def get_flaring_var_name(nc_var): + nc_var_2_var = { + 'emis_SO2_flr': 'so2', + 'emis_NOx_flr': 'nox_no2', + 'emis_NH3_flr': 'nh3', + 'emis_VOC_flr': 'nmvoc', + 'emis_PM25_flr': 'pm25', + 'emis_BC_flr': 'bc', + 'emis_OC_flr': 'oc', + 'emis_PM10_flr': 'pm10', + 'emis_CO_flr': 'co', + 'emis_CH4_flr': 'ch4', + } + try: + return_value = nc_var_2_var[nc_var] + except KeyError: + return_value = None + return return_value + + +def do_flaring_transformation(): + nc_in = Dataset(os.path.join(input_path, input_name_flaring), mode='r') + c_lats = nc_in.variables['lat'][:] + c_lons = nc_in.variables['lon'][:] + cell_area = get_grid_area(os.path.join(input_path, input_name_flaring)) + for var in nc_in.variables: + var_name = get_flaring_var_name(var) + if var_name is not None: + data = nc_in.variables[var][:] + data = np.nan_to_num(data) + for i in xrange(len(list_years)): + output_name = get_flaring_output_name(var_name, 'flaring', list_years[i]) + data_aux = data[i, :, :] + data_aux = (data_aux * year_factor) / cell_area + data_aux = data_aux.reshape((1,) + data_aux.shape) + data_list = [{ + 'name': var_name, + 'long_name': var_name, + 'data': data_aux, + 'units': Unit(var_units), + }] + write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=list_years[i], month=1, day=1)) + nc_in.close() + + +def get_ship_output_name(pollutant, sector, year): + output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) + + if not(os.path.exists(output_path_aux)): + os.makedirs(output_path_aux) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=1, day=1).strftime('%Y'))) + + +def get_ship_var_name(nc_var): + nc_var_2_var = { + 'SO2': 'so2', + 'NOx': 'nox_no2', + 'VOC': 'nmvoc', + 'PM25': 'pm25', + 'BC': 'bc', + 'OC': 'oc', + 'PM10': 'pm10', + 'CO': 'co', + 'CH4': 'ch4', + } + try: + return_value = nc_var_2_var[nc_var] + except KeyError: + return_value = None + return return_value + +def do_ship_transformation(): + + for year in list_years: + in_path = os.path.join(input_path, input_name_ship.replace('', str(year))) + nc_in = Dataset(in_path, mode='r') + c_lats = nc_in.variables['lat'][:] + c_lons = nc_in.variables['lon'][:] + + cell_area = get_grid_area(in_path) + + for var in nc_in.variables: + var_name = get_ship_var_name(var) + if var_name is not None: + data = nc_in.variables[var][0, :, :] + data = np.nan_to_num(data) + + data = (data * year_factor) / cell_area + data = data.reshape((1,) + data.shape) + data_list = [{ + 'name': var_name, + 'long_name': var_name, + 'data': data, + 'units': Unit(var_units), + }] + + write_netcdf(get_ship_output_name(var_name, 'ship', year), data_list, c_lats, c_lons, cell_area, + datetime(year=year, month=1, day=1)) + nc_in.close() + + +if __name__ == '__main__': + do_transformation() + do_flaring_transformation() + do_ship_transformation() + diff --git a/preproc/edgarv432_ap_preproc.py b/preproc/edgarv432_ap_preproc.py new file mode 100755 index 0000000..70e9f9d --- /dev/null +++ b/preproc/edgarv432_ap_preproc.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/jrc/edgarv432_ap/original_files/' +output_path = '/esarchive/recon/jrc/edgarv432_ap' +list_pollutants = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] +#list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] +list_years = [2012] + +# To do yearly emissions +process_yearly = True +yearly_input_name = 'yearly/v432___.0.1x0.1.nc' + +# To process monthly emissions, 2010 directly from monthly_input_name and other years calculated using bla bla bla +process_monthly = True +monthly_input_name = 'monthly/v432__2010__.0.1x0.1.nc' +monthly_pattern_file = 'temporal_profiles/v432_FM_.0.1x0.1.nc' +# ============================================================== + +""" +Main script to transform EDGARv4.3.2 AP global emission inventory to a NetCDF that follows the CF-1.6 conventions. + +This script also calculates the boundaries of teh cells and teh cell area. + +Carles Tena Medina (carles.tena@bsc.es) from Barcelona Supercomputing Center (BSC-CNS). +""" +import os +import timeit +from netCDF4 import Dataset +import numpy as np + +from hermesv3_gr.tools.coordinates_tools import * +from warnings import warn as warning + + +def ipcc_to_sector_dict(): + ipcc_sector_dict = \ + { + "IPCC_1A1a": "ENE", + "IPCC_1A1b_1A1c_1A5b1_1B1b_1B2a5_1B2a6_1B2b5_2C1b": "REF_TRF", + "IPCC_1A2": "IND", + "IPCC_1A3a_CDS": "TNR_Aviation_CDS", + "IPCC_1A3a_CRS": "TNR_Aviation_CRS", + "IPCC_1A3a_LTO": "TNR_Aviation_LTO", + "IPCC_1A3b": "TRO", + "IPCC_1A3c_1A3e": "TNR_Other", + "IPCC_1A3d_1C2": "TNR_Ship", + "IPCC_1A4": "RCO", + "IPCC_1B1a_1B2a1_1B2a2_1B2a3_1B2a4_1B2c": "PRO", + "IPCC_2A": "NMM", + "IPCC_2B": "CHE", + "IPCC_2C1a_2C1c_2C1d_2C1e_2C1f_2C2": "IRO", + "IPCC_2C3_2C4_2C5": "NFE", + "IPCC_2D": "FOO_PAP", + "IPCC_2G": "NEU", + "IPCC_3": "PRU_SOL", + "IPCC_4B": "MNM", + "IPCC_4C_4D1_4D2_4D4": "AGS", + "IPCC_4F": "AWB", + "IPCC_6A_6D": "SWD_LDF", + "IPCC_6B": "WWT", + "IPCC_6C": "SWD_INC", + "IPCC_7A": "FFF" + } + + return ipcc_sector_dict + + +def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, grid_cell_area, year, sector, + month=None): + # Creating NetCDF & Dimensions + print output_name_path + nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") + nc_output.createDimension('nv', 2) + nc_output.createDimension('lon', center_lons.shape[0]) + nc_output.createDimension('lat', center_lats.shape[0]) + nc_output.createDimension('time', None) + + # TIME + time = nc_output.createVariable('time', 'd', ('time',), zlib=True) + # time.units = "{0} since {1}".format(tstep_units, global_atts['Start_DateTime'].strftime('%Y-%m-%d %H:%M:%S')) + if month is None: + time.units = "years since {0}-01-01 00:00:00".format(year) + else: + time.units = "months since {0}-{1}-01 00:00:00".format(year, str(month).zfill(2)) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0] + + # LATITUDE + lat = nc_output.createVariable('lat', 'f', ('lat',), zlib=True) + lat.bounds = "lat_bnds" + lat.units = "degrees_north" + lat.axis = "Y" + lat.long_name = "latitude" + lat.standard_name = "latitude" + lat[:] = center_lats + + lat_bnds = nc_output.createVariable('lat_bnds', 'f', ('lat', 'nv',), zlib=True) + lat_bnds[:] = create_bounds(center_lats) + + # LONGITUDE + lon = nc_output.createVariable('lon', 'f', ('lon',), zlib=True) + lon.bounds = "lon_bnds" + lon.units = "degrees_east" + lon.axis = "X" + lon.long_name = "longitude" + lon.standard_name = "longitude" + lon[:] = center_lons + + lon_bnds = nc_output.createVariable('lon_bnds', 'f', ('lon', 'nv',), zlib=True) + lon_bnds[:] = create_bounds(center_lons) + + # VARIABLE + nc_var = nc_output.createVariable(data_atts['long_name'], 'f', ('time', 'lat', 'lon',), zlib=True) + nc_var.units = data_atts['units'] + nc_var.long_name = data_atts['long_name'] + nc_var.coordinates = data_atts['coordiantes'] + nc_var.grid_mapping = data_atts['grid_mapping'] + nc_var.cell_measures = 'area: cell_area' + nc_var[:] = data.reshape((1,) + data.shape) + + # CELL AREA + cell_area = nc_output.createVariable('cell_area', 'f', ('lat', 'lon',)) + cell_area.long_name = "area of the grid cell" + cell_area.standard_name = "area" + cell_area.units = "m2" + cell_area[:] = grid_cell_area + + # CRS + crs = nc_output.createVariable('crs', 'i') + crs.grid_mapping_name = "latitude_longitude" + crs.semi_major_axis = 6371000.0 + crs.inverse_flattening = 0 + + nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format(sector, data_atts[ + 'long_name']), ) + nc_output.setncattr('Conventions', 'CF-1.6', ) + nc_output.setncattr('institution', 'JRC', ) + nc_output.setncattr('source', 'EDGARv4.3.2_AP', ) + nc_output.setncattr('history', 'Re-writing of the EDGAR input to follow the CF 1.6 conventions;\n' + + '2017-03-22: Added time dimension (UNLIMITED);\n' + + '2017-03-22: Added boundaries;\n' + + '2017-03-24: Added global attributes;\n' + + '2017-03-24: Re-naming pollutant;\n' + + '2017-04-03: Added cell_area variable;\n') + nc_output.setncattr('references', 'web: http://edgar.jrc.ec.europa.eu/overview.php?v=432\n' + + ' doi:https://data.europa.eu/doi/10.2904/JRC_DATASET_EDGAR', ) + nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', ) + + nc_output.close() + + +def do_yearly_transformation(year): + print year + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: + in_pollutant = pollutant + pollutant = 'PM2.5' + + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + if pollutant == 'PM2.5': + pollutant = in_pollutant.replace('.', '') + elif pollutant == 'NOx': + pollutant = 'nox_no2' + + data_attributes = {'long_name': pollutant.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + + out_path_aux = os.path.join(output_path, 'yearly_mean', pollutant.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant.lower(), year)), + data, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +def do_monthly_transformation(year): + print year + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: + in_pollutant = pollutant + pollutant = 'PM2.5' + + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + if pollutant == 'PM2.5': + pollutant = in_pollutant.replace('.', '') + elif pollutant == 'NOx': + pollutant = 'nox_no2' + + data_attributes = {'long_name': pollutant.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + + out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + nc_month_factors = Dataset(os.path.join(input_path, monthly_pattern_file.replace('', sector))) + month_factors = nc_month_factors.variables[sector][:] + for month in xrange(1, 12 + 1, 1): + data_aux = data * month_factors[month - 1, :, :] + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), year, str(month).zfill(2))), + data_aux, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning( + "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +def do_2010_monthly_transformation(): + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + for month in xrange(1, 12 + 1, 1): + file_path = os.path.join(input_path, monthly_input_name.replace('', pollutant).replace('', str(month)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: + in_pollutant = pollutant + pollutant = 'PM2.5' + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + if pollutant == 'PM2.5': + pollutant = in_pollutant.replace('.', '') + elif pollutant == 'NOx': + pollutant = 'nox_no2' + + data_attributes = {'long_name': pollutant.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + + out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), year, str(month).zfill(2))), + data, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +if __name__ == '__main__': + + if process_yearly: + for y in list_years: + do_yearly_transformation(y) + + if process_monthly: + for y in list_years: + if y == 2010: + do_2010_monthly_transformation() + else: + do_monthly_transformation(y) + diff --git a/preproc/edgarv432_voc_preproc.py b/preproc/edgarv432_voc_preproc.py new file mode 100755 index 0000000..484c397 --- /dev/null +++ b/preproc/edgarv432_voc_preproc.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/jrc/edgarv432_voc/original_files/' +output_path = '/esarchive/recon/jrc/edgarv432_voc' +list_pollutants = ['voc1', 'voc2', 'voc3', 'voc4', 'voc5', 'voc6', 'voc7', 'voc8', 'voc9', 'voc10', 'voc11', 'voc12', + 'voc13', 'voc14', 'voc15', 'voc16', 'voc17', 'voc18', 'voc19', 'voc20', 'voc21', 'voc22', 'voc23', + 'voc24', 'voc25'] +#list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] +list_years = [2010] + +# To do yearly emissions +process_yearly = True +yearly_input_name = 'yearly/v432_VOC_spec___.0.1x0.1.nc' + +# To process monthly emissions, 2010 directly from monthly_input_name and other years calculated using bla bla bla +process_monthly = False +monthly_input_name = 'monthly/v432_VOC_spec__2010__.0.1x0.1.nc' +monthly_pattern_file = 'temporal_profiles/v432_FM_.0.1x0.1.nc' +# ============================================================== + +""" +Main script to transform EDGARv4.3.2 AP global emission inventory to a NetCDF that follows the CF-1.6 conventions. + +This script also calculates the boundaries of teh cells and teh cell area. + +Carles Tena Medina (carles.tena@bsc.es) from Barcelona Supercomputing Center (BSC-CNS). +""" +import os +import timeit +from netCDF4 import Dataset +import numpy as np + +from hermesv3_gr.tools.coordinates_tools import * +from warnings import warn as warning + + +def ipcc_to_sector_dict(): + ipcc_sector_dict = \ + { + "IPCC_4F": "AWB", + "IPCC_1A1": "ENE", + "IPCC_7A": "FFF", + "IPCC_1A2": "IND", + "IPCC_2_3": "PPA", + "IPCC_1B1a_1B2a1_1B2a2_1B2a3_1B2a4_1B2c": "PRO", + "IPCC_1A4": "RCO", + "IPCC_1A1b_1B2a5": "REF", + "IPCC_6": "SWD", + "IPCC_1A3a_CDS": "TNR_Aviation_CDS", + "IPCC_1A3a_CRS": "TNR_Aviation_CRS", + "IPCC_1A3a_LTO": "TNR_Aviation_LTO", + "IPCC_1A3c_1A3e": "TNR_Other", + "IPCC_1A3d_1C2": "TNR_Ship", + "IPCC_1A1c_1A5b1_1B1b_1B2a6_1B2b5_2C1b": "TRF", + "IPCC_1A3b": "TRO" + } + + return ipcc_sector_dict + + +def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, grid_cell_area, year, sector, + month=None): + # Creating NetCDF & Dimensions + print output_name_path + nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") + nc_output.createDimension('nv', 2) + nc_output.createDimension('lon', center_lons.shape[0]) + nc_output.createDimension('lat', center_lats.shape[0]) + nc_output.createDimension('time', None) + + # TIME + time = nc_output.createVariable('time', 'd', ('time',), zlib=True) + # time.units = "{0} since {1}".format(tstep_units, global_atts['Start_DateTime'].strftime('%Y-%m-%d %H:%M:%S')) + if month is None: + time.units = "years since {0}-01-01 00:00:00".format(year) + else: + time.units = "months since {0}-{1}-01 00:00:00".format(year, str(month).zfill(2)) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0] + + # LATITUDE + lat = nc_output.createVariable('lat', 'f', ('lat',), zlib=True) + lat.bounds = "lat_bnds" + lat.units = "degrees_north" + lat.axis = "Y" + lat.long_name = "latitude" + lat.standard_name = "latitude" + lat[:] = center_lats + + lat_bnds = nc_output.createVariable('lat_bnds', 'f', ('lat', 'nv',), zlib=True) + lat_bnds[:] = create_bounds(center_lats) + + # LONGITUDE + lon = nc_output.createVariable('lon', 'f', ('lon',), zlib=True) + lon.bounds = "lon_bnds" + lon.units = "degrees_east" + lon.axis = "X" + lon.long_name = "longitude" + lon.standard_name = "longitude" + lon[:] = center_lons + + lon_bnds = nc_output.createVariable('lon_bnds', 'f', ('lon', 'nv',), zlib=True) + lon_bnds[:] = create_bounds(center_lons) + + # VARIABLE + nc_var = nc_output.createVariable(data_atts['long_name'], 'f', ('time', 'lat', 'lon',), zlib=True) + nc_var.units = data_atts['units'] + nc_var.long_name = data_atts['long_name'] + nc_var.coordinates = data_atts['coordiantes'] + nc_var.grid_mapping = data_atts['grid_mapping'] + nc_var.cell_measures = 'area: cell_area' + nc_var[:] = data.reshape((1,) + data.shape) + + # CELL AREA + cell_area = nc_output.createVariable('cell_area', 'f', ('lat', 'lon',)) + cell_area.long_name = "area of the grid cell" + cell_area.standard_name = "area" + cell_area.units = "m2" + cell_area[:] = grid_cell_area + + # CRS + crs = nc_output.createVariable('crs', 'i') + crs.grid_mapping_name = "latitude_longitude" + crs.semi_major_axis = 6371000.0 + crs.inverse_flattening = 0 + + nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format(sector, data_atts[ + 'long_name']), ) + nc_output.setncattr('Conventions', 'CF-1.6', ) + nc_output.setncattr('institution', 'JRC', ) + nc_output.setncattr('source', 'EDGARv4.3.2_AP', ) + nc_output.setncattr('history', 'Re-writing of the EDGAR input to follow the CF 1.6 conventions;\n' + + '2017-03-22: Added time dimension (UNLIMITED);\n' + + '2017-03-22: Added boundaries;\n' + + '2017-03-24: Added global attributes;\n' + + '2017-03-24: Re-naming pollutant;\n' + + '2017-04-03: Added cell_area variable;\n') + + nc_output.setncattr('references', 'web: http://edgar.jrc.ec.europa.eu/overview.php?v=432_VOC_spec\n' + + 'publication: Huang et al. (ACP, 2017)', ) + nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', ) + + nc_output.close() + + +def do_yearly_transformation(year): + print year + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + if pollutant in ['voc{0}'.format(x) for x in xrange(1, 9 + 1, 1)]: + pollutant_aux = pollutant.replace('voc', 'voc0') + else: + pollutant_aux = pollutant + + data_attributes = {'long_name': pollutant_aux.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + out_path_aux = os.path.join(output_path, 'yearly_mean', pollutant_aux.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + # print os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant_aux.lower(), year)) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant_aux.lower(), year)), + data, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +def do_monthly_transformation(year): + print year + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + + if pollutant in ['voc{0}'.format(x) for x in xrange(1, 9 + 1, 1)]: + pollutant_aux = pollutant.replace('voc', 'voc0') + else: + pollutant_aux = pollutant + + data_attributes = {'long_name': pollutant_aux.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + + out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + nc_month_factors = Dataset(os.path.join(input_path, monthly_pattern_file.replace('', sector))) + month_factors = nc_month_factors.variables[sector][:] + for month in xrange(1, 12 + 1, 1): + data_aux = data * month_factors[month - 1, :, :] + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant_aux.lower(), year, str(month).zfill(2))), + data_aux, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning( + "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +def do_2010_monthly_transformation(): + for pollutant in list_pollutants: + for ipcc in ipcc_to_sector_dict().keys(): + for month in xrange(1, 12 + 1, 1): + file_path = os.path.join(input_path, monthly_input_name.replace('', pollutant).replace('', str(month)).replace('', ipcc)) + + if os.path.exists(file_path): + grid_area = get_grid_area(file_path) + print file_path + nc_in = Dataset(file_path, mode='r') + + data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + + data = np.array(data) + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + nc_in.close() + + sector = ipcc_to_sector_dict()[ipcc] + + if pollutant in ['voc{0}'.format(x) for x in xrange(1, 9 + 1, 1)]: + pollutant_aux = pollutant.replace('voc', 'voc0') + else: + pollutant_aux = pollutant + + data_attributes = {'long_name': pollutant_aux.lower(), + 'units': 'kg.m-2.s-1', + 'coordinates': 'lat lon', + 'grid_mapping': 'crs'} + + out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant_aux.lower(), year, str(month).zfill(2))), + data, data_attributes, lats, lons, grid_area, year, sector.lower()) + + else: + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + + +if __name__ == '__main__': + + if process_yearly: + for y in list_years: + do_yearly_transformation(y) + + if process_monthly: + for y in list_years: + if y == 2010: + do_2010_monthly_transformation() + else: + do_monthly_transformation(y) + diff --git a/preproc/emep_preproc.py b/preproc/emep_preproc.py new file mode 100644 index 0000000..8ba58d3 --- /dev/null +++ b/preproc/emep_preproc.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/ceip/emepv18/original_files' +output_path = '/esarchive/recon/ceip/emepv18/yearly_mean' +input_name = '__2018_GRID_.txt' +# list_years = [2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016] +list_years = [2015] +list_pollutants = ['NOx', 'NMVOC', 'SOx', 'NH3', 'PM2_5', 'PM10', 'CO'] +# ============================================================== + + +import sys +import os +from warnings import warn as warning +from datetime import datetime + + +def correct_input_error(df): + df.loc[df['LATITUDE'] == 36.14, 'LATITUDE'] = 36.15 + df.loc[df['LONGITUDE'] == 29.58, 'LONGITUDE'] = 29.55 + + return df + + +def get_sectors(): + return ['A_PublicPower', 'B_Industry', 'C_OtherStationaryComb', 'D_Fugitive', 'E_Solvents', 'F_RoadTransport', + 'G_Shipping', 'H_Aviation', 'I_Offroad', 'J_Waste', 'K_AgriLivestock', 'L_AgriOther'] + + +def calculate_grid_definition(in_path): + import pandas as pd + import numpy as np + + df = pd.read_table(in_path, sep=';', skiprows=[0, 1, 2, 3], names=['ISO2', 'YEAR', 'SECTOR', 'POLLUTANT', 'LONGITUDE', 'LATITUDE', 'UNIT', 'EMISSION']) + + df = correct_input_error(df) + # Longitudes + lons = np.sort(np.unique(df.LONGITUDE)) + lons_interval = lons[1:] - lons[:-1] + print 'Lon min: {0}; Lon max: {1}; Lon inc: {2}; Lon num: {3}'.format( + df.LONGITUDE.min(), df.LONGITUDE.max(), lons_interval.min(), len(lons)) + + # Latitudes + lats = np.sort(np.unique(df.LATITUDE)) + lats_interval = lats[1:] - lats[:-1] + print 'Lat min: {0}; Lat max: {1}; Lat inc: {2}; Lat num: {3}'.format( + df.LATITUDE.min(), df.LATITUDE.max(), lats_interval.min(), len(lats)) + + lats = np.arange(-90 + lats_interval.min()/2, 90, lats_interval.min(), dtype=np.float64) + lons = np.arange(-180 + lons_interval.min()/2, 180, lons_interval.min(), dtype=np.float64) + + return lats, lons, lats_interval.min(), lons_interval.min() + + +def do_transformation(year): + from hermesv3_gr.tools.netcdf_tools import write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + import pandas as pd + import numpy as np + + unit_factor = 1000./(365.*24.*3600.) # From Mg/year to Kg/s + + for pollutant in list_pollutants: + for sector in get_sectors(): + in_file = os.path.join(input_path, input_name.replace('', str(year)).replace('', sector).replace('', pollutant)) + + if os.path.exists(in_file): + print in_file + c_lats, c_lons, lat_interval, lon_interval = calculate_grid_definition(in_file) + b_lats = create_bounds(c_lats, number_vertices=2) + b_lons = create_bounds(c_lons, number_vertices=2) + name = pollutant.lower() + if name == 'nox': + name = 'nox_no2' + elif name == 'pm2_5': + name = 'pm25' + elif name == 'voc': + name = 'nmvoc' + + element = { + 'name': name, + 'units': 'kg.m-2.s-1', + 'data': np.zeros((len(c_lats), len(c_lons))) + } + + df = pd.read_table(in_file, sep=';', skiprows=[0,1,2,3], + names=['ISO2','YEAR','SECTOR','POLLUTANT','LONGITUDE','LATITUDE','UNIT','EMISSION']) + + df = correct_input_error(df) + + df['row_lat'] = np.array((df.LATITUDE - (-90 + lat_interval/2)) / lat_interval, dtype=np.int32) + df['col_lon'] = np.array((df.LONGITUDE - (-180 + lon_interval/2)) / lon_interval, dtype=np.int32) + + df = df.groupby(['row_lat', 'col_lon']).sum().reset_index() + + element['data'][df.row_lat, df.col_lon] += df['EMISSION'] + + element['data'] = element['data'].reshape((1,) + element['data'].shape) + + complete_output_dir = os.path.join(output_path, '{0}_{1}'.format(element['name'], sector.lower())) + if not os.path.exists(complete_output_dir): + os.makedirs(complete_output_dir) + complete_output_dir = os.path.join(complete_output_dir, '{0}_{1}.nc'.format(element['name'], year)) + + write_netcdf(complete_output_dir, c_lats, c_lons, [element], date=datetime(year, month=1, day=1), + boundary_latitudes=b_lats, boundary_longitudes=b_lons) + cell_area = get_grid_area(complete_output_dir) + element['data'] = element['data'] * unit_factor / cell_area + write_netcdf(complete_output_dir, c_lats, c_lons, [element], date=datetime(year, month=1, day=1), + boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, + global_attributes={ + 'references': 'web: web: http://www.ceip.at/ms/ceip_home1/ceip_home/webdab_emepdatabase/emissions_emepmodels/', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)' + }) + else: + warning("The pollutant {0} for the GNFR14 sector {1} does not exist.\n File not found: {2}".format(pollutant, sector, in_file)) + + +if __name__ == '__main__': + for y in list_years: + do_transformation(y) diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py new file mode 100755 index 0000000..51b918b --- /dev/null +++ b/preproc/gfas12_preproc.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +from datetime import datetime, timedelta + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/' +input_name = 'ga_.grb' +output_path = '/esarchive/recon/ecmwf/gfas' + +starting_date = datetime(year=2018, month=8, day=29) +ending_date = datetime(year=2018, month=8, day=29) + +parameters_file = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/GFAS_Parameters.csv' +# ============================================================== + +import os +import sys +import timeit +import numpy as np +from netCDF4 import Dataset +import cf_units +import pandas as pd +import datetime +import cdo + +parentPath = os.path.abspath(os.path.join('..', '..')) +if parentPath not in sys.path: + sys.path.insert(0, parentPath) + +from hermesv3_gr.tools.coordinates_tools import * + +cdo = cdo.Cdo() + + +def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cell_area, date): + # TODO Documentation + """ + Write a NetCDF with the given information. + + :param output_name_path: Complete path to the output NetCDF to be stored. + :type output_name_path: str + + :param data_list + + :param center_lats: Latitudes of the center of each cell. + :type center_lats: numpy.ndarray + + :param center_lons: Longitudes of the center of each cell. + :type center_lons: numpy.ndarray + + :param date: Date of the current netCDF. + :type date: datetime.datetime + + """ + print output_name_path + # Creating NetCDF & Dimensions + nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") + nc_output.createDimension('nv', 2) + nc_output.createDimension('lon', center_lons.shape[0]) + nc_output.createDimension('lat', center_lats.shape[0]) + nc_output.createDimension('time', None) + + # TIME + time = nc_output.createVariable('time', 'd', ('time',), zlib=True) + # time.units = "{0} since {1}".format(tstep_units, global_atts['Start_DateTime'].strftime('%Y-%m-%d %H:%M:%S')) + time.units = "hours since {0}".format(date.strftime('%Y-%m-%d %H:%M:%S')) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0] + + # LATITUDE + lat = nc_output.createVariable('lat', 'f', ('lat',), zlib=True) + lat.bounds = "lat_bnds" + lat.units = "degrees_north" + lat.axis = "Y" + lat.long_name = "latitude" + lat.standard_name = "latitude" + lat[:] = center_lats + + lat_bnds = nc_output.createVariable('lat_bnds', 'f', ('lat', 'nv',), zlib=True) + lat_bnds[:] = create_bounds(center_lats) + + # LONGITUDE + lon = nc_output.createVariable('lon', 'f', ('lon',), zlib=True) + lon.bounds = "lon_bnds" + lon.units = "degrees_east" + lon.axis = "X" + lon.long_name = "longitude" + lon.standard_name = "longitude" + lon[:] = center_lons + + lon_bnds = nc_output.createVariable('lon_bnds', 'f', ('lon', 'nv',), zlib=True) + lon_bnds[:] = create_bounds(center_lons) + + for var in data_list: + # VARIABLE + nc_var = nc_output.createVariable(var['name'], 'f', ('time', 'lat', 'lon',), zlib=True) + nc_var.units = var['units'].symbol + nc_var.long_name = var['long_name'] + nc_var.coordinates = 'lat lon' + nc_var.grid_mapping = 'crs' + nc_var.cell_measures = 'area: cell_area' + nc_var[:] = var['data'] + + # CELL AREA + cell_area = nc_output.createVariable('cell_area', 'f', ('lat', 'lon',)) + cell_area.long_name = "area of the grid cell" + cell_area.standard_name = "area" + cell_area.units = "m2" + cell_area[:] = grid_cell_area + + # CRS + crs = nc_output.createVariable('crs', 'i') + crs.grid_mapping_name = "latitude_longitude" + crs.semi_major_axis = 6371000.0 + crs.inverse_flattening = 0 + + nc_output.setncattr('title', 'GFASv1.2 inventory') + nc_output.setncattr('Conventions', 'CF-1.6', ) + nc_output.setncattr('institution', 'ECMWF', ) + nc_output.setncattr('source', 'GFAS', ) + nc_output.setncattr('history', 'Re-writing of the GFAS input to follow the CF-1.6 conventions;\n' + + '2017-04-05: Added boundaries;\n' + + '2017-04-05: Added global attributes;\n' + + '2017-04-05: Re-naming pollutant;\n' + + '2017-04-05: Added cell_area variable;\n' + + '2017-04-19: Added new varaibles;\n') + nc_output.setncattr('references', '', ) + nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', ) + + nc_output.close() + + +def do_transformation(input_file, date, output_dir, variables_list): + """ + + :param input_file: + :param output_file: + :param date: + :type date: datetime.datetime + :param output_dir: + :param variables_list: + :return: + """ + + nc_temp = cdo.copy(input=input_file, options='-R -r -f nc4c -z zip_4') + + nc_in = Dataset(nc_temp, mode='r') + + cell_area = get_grid_area(nc_temp) + + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + + for variable in variables_list: + variable['data'] = nc_in.variables[variable['original_name']][:] + + nc_in.close() + + out_path_aux = os.path.join(output_dir, 'daily_mean', 'multivar') + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + output_path = os.path.join(out_path_aux, 'ga_{0}.nc'.format(date.strftime('%Y%m%d'))) + write_netcdf(output_path, variables_list, lats, lons, cell_area, date) + + +def do_var_list(variables_file): + df = pd.read_csv(variables_file, sep=';') + list_aux = [] + for i, element in df.iterrows(): + # print element + dict_aux = { + 'original_name': 'var' + str(element.id), + 'name': element['Short_Name'], + 'long_name': element['Name'], + 'units': cf_units.Unit(element['Units']), + } + list_aux.append(dict_aux) + return list_aux + + +if __name__ == '__main__': + + var_list = do_var_list(parameters_file) + + date_aux = starting_date + while date_aux <= ending_date: + f = os.path.join(input_path, input_name.replace('', date_aux.strftime('%Y%m%d'))) + if os.path.isfile(f): + do_transformation(f, date_aux, output_path, var_list) + else: + print 'ERROR: file {0} not found'.format(f) + + date_aux = date_aux + timedelta(days=1) diff --git a/preproc/htapv2_preproc.py b/preproc/htapv2_preproc.py new file mode 100755 index 0000000..7d0a28d --- /dev/null +++ b/preproc/htapv2_preproc.py @@ -0,0 +1,523 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/jrc/htapv2/original_files' +output_path = '/esarchive/recon/jrc/htapv2' + +input_name = 'edgar_HTAP__emi___.0.1x0.1.nc' +input_name_air = 'edgar_HTAP_emi___.0.1x0.1.nc' +input_name_ships = 'edgar_HTAP__emi_SHIPS_.0.1x0.1.nc' +#HTAP auxiliary NMVOC emission data for the industry sub-sectors (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) +input_name_nmvoc_industry = 'HTAPv2_NMVOC___.0.1x0.1.nc' + +# list_years = [2008, 2010] +list_years = [2010] + +#RETRO ratios applied to HTAPv2 NMVOC emissions (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) +voc_ratio_path = '/esarchive/recon/jrc/htapv2/original_files/retro_nmvoc_ratio_2000_01x01' +voc_ratio_name = 'retro_nmvoc_ratio__2000_0.1deg.nc' +voc_ratio_air_name = 'VOC_split_AIR.csv' +voc_ratio_ships_name = 'VOC_split_SHIP.csv' + +# ============================================================== + +import os +import sys + + +def do_transformation_annual(filename, out_path, pollutant, sector, year): + """ + Re-write the HTAPv2 inputs following ES anc CF-1.6 conventions for annual inventories. + + :param filename: Path to the input file. + :type filename: str + + :param out_path: Path to store the output. + :type out_path: str + + :param pollutant: Pollutant name. + :type pollutant: str + + :param sector: Name of the sector. + :type sector: str + + :param year: Year. + :type year: int + + :return: + """ + import os + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + print filename + c_lats, c_lons = extract_vars(filename, ['lat', 'lon']) + + if pollutant == 'pm25': + [data] = extract_vars(filename, ['emi_pm2.5'], + attributes_list=['standard_name', 'units', 'cell_method', 'long_name']) + else: + [data] = extract_vars(filename, ['emi_{0}'.format(pollutant)], + attributes_list=['standard_name', 'units', 'cell_method', 'long_name']) + data['data'] = data['data'].reshape((1,) + data['data'].shape) + data['name'] = pollutant + + global_attributes = { + 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, data['long_name']), + 'Conventions': 'CF-1.6', + 'institution': 'European Commission, Joint Research Centre (JRC)', + 'source': 'HTAPv2', + 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + + '2017-04-04: Added time dimension (UNLIMITED);\n' + + '2017-04-04: Added boundaries;\n' + + '2017-04-04: Added global attributes;\n' + + '2017-04-04: Re-naming pollutant;\n' + + '2017-04-04: Added cell_area variable;\n', + 'references': 'EC, JRC / US EPA, HTAP_V2. http://edgar.jrc.ec.europa.eu/htap/EDGAR-HTAP_v1_final_jan2012.pdf\n ' + + 'http://edgar.jrc.ec.europa.eu/htap_v2/', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', + } + + out_path = os.path.join(out_path, pollutant + '_' + sector.lower()) + if not os.path.exists(out_path): + os.makedirs(out_path) + + out_path = os.path.join(out_path, '{0}_{1}.nc'.format(pollutant, year)) + print out_path + write_netcdf(out_path, c_lats['data'], c_lons['data'], [data], + boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), + cell_area=get_grid_area(filename), global_attributes=global_attributes,) + + +def do_transformation(filename_list, out_path, pollutant, sector, year): + """ + Re-write the HTAPv2 inputs following ES anc CF-1.6 conventions. + + :param filename_list: List of input file names. + :type filename_list: list + + :param out_path: Path to store the output. + :type out_path: str + + :param pollutant: Pollutant name. + :type pollutant: str + + :param sector: Name of the sector. + :type sector: str + + :param year: Year. + :type year: int + + :return: + """ + import os + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + + for month in xrange(1, 13): + print filename_list[month - 1] + c_lats, c_lons = extract_vars(filename_list[month - 1], ['lat', 'lon']) + + if pollutant == 'pm25': + [data] = extract_vars(filename_list[month - 1], ['emi_pm2.5'], + attributes_list=['standard_name', 'units', 'cell_method', 'long_name']) + else: + [data] = extract_vars(filename_list[month - 1], ['emi_{0}'.format(pollutant)], + attributes_list=['standard_name', 'units', 'cell_method', 'long_name']) + data['data'] = data['data'].reshape((1,) + data['data'].shape) + data['name'] = pollutant + + global_attributes = { + 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, data['long_name']), + 'Conventions': 'CF-1.6', + 'institution': 'European Commission, Joint Research Centre (JRC)', + 'source': 'HTAPv2', + 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + + '2017-04-04: Added time dimension (UNLIMITED);\n' + + '2017-04-04: Added boundaries;\n' + + '2017-04-04: Added global attributes;\n' + + '2017-04-04: Re-naming pollutant;\n' + + '2017-04-04: Added cell_area variable;\n', + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', + } + + out_path_aux = os.path.join(out_path, pollutant + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + out_path_aux = os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant, year, str(month).zfill(2))) + write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data], + boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), + cell_area=get_grid_area(filename_list[month - 1]), global_attributes=global_attributes,) + + +def do_ratio_list(sector=None): + if sector == 'SHIPS': + return {'all': os.path.join(voc_ratio_path, voc_ratio_ships_name)} + elif sector == 'AIR_CDS': + return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} + elif sector == 'AIR_CRS': + return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} + elif sector == 'AIR_LTO': + return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} + else: + return { + 'voc01': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '01')), + 'voc02': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '02')), + 'voc03': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '03')), + 'voc04': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '04')), + 'voc05': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '05')), + 'voc06': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '06')), + 'voc07': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '07')), + 'voc08': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '08')), + 'voc09': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '09')), + 'voc12': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '12')), + 'voc13': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '13')), + 'voc14': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '14')), + 'voc15': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '15')), + 'voc16': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '16')), + 'voc17': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '17')), + 'voc18': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '18')), + 'voc19': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '19')), + 'voc20': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '20')), + 'voc21': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '21')), + 'voc22': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '22')), + 'voc23': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '23')), + 'voc24': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '24')), + 'voc25': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '25')), + } + + +def do_nmvoc_month_transformation(filename_list, out_path, sector, year): + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + + nmvoc_ratio_list = do_ratio_list() + + print sector + if sector == 'ENERGY': + ratio_var = 'pow' + + nmvoc_ratio_list.pop('voc18', None) + nmvoc_ratio_list.pop('voc19', None) + nmvoc_ratio_list.pop('voc20', None) + + elif sector == 'RESIDENTIAL': + ratio_var = 'res' + + nmvoc_ratio_list.pop('voc18', None) + nmvoc_ratio_list.pop('voc20', None) + + elif sector == 'TRANSPORT': + ratio_var = 'tra' + + nmvoc_ratio_list.pop('voc01', None) + nmvoc_ratio_list.pop('voc18', None) + nmvoc_ratio_list.pop('voc19', None) + nmvoc_ratio_list.pop('voc20', None) + nmvoc_ratio_list.pop('voc24', None) + nmvoc_ratio_list.pop('voc25', None) + + + print type(nmvoc_ratio_list), nmvoc_ratio_list + + for month in xrange(1, 13): + print filename_list[month - 1] + c_lats, c_lons = extract_vars(filename_list[month - 1], ['lat', 'lon']) + + [data] = extract_vars(filename_list[month - 1], ['emi_nmvoc']) + + for voc, ratio_file in nmvoc_ratio_list.iteritems(): + print voc, ratio_file + + pollutant = voc + [ratio] = extract_vars(ratio_file, [ratio_var]) + + data_aux = data.copy() + data_aux['data'] = data['data']*ratio['data'] + data_aux['data'] = data_aux['data'].reshape((1,) + data_aux['data'].shape) + data_aux['name'] = voc + data_aux['units'] = 'kg m-2 s-1' + global_attributes = { + 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, pollutant), + 'Conventions': 'CF-1.6', + 'institution': 'European Commission, Joint Research Centre (JRC)', + 'source': 'HTAPv2', + 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + + '2017-04-28: ...', + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', + } + + out_path_aux = os.path.join(out_path, pollutant + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + out_path_aux = os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant, year, str(month).zfill(2))) + print out_path_aux + write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data_aux], + boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), + global_attributes=global_attributes,) + + +def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year): + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + + nmvoc_ratio_list = do_ratio_list() + + print sector + + print type(nmvoc_ratio_list), nmvoc_ratio_list + + for month in xrange(1, 13): + print filename_list[month - 1] + c_lats, c_lons = extract_vars(filename_list[month - 1], ['lat', 'lon']) + + [ind, exf, sol] = extract_vars(filename_list[month - 1], ['emiss_ind', 'emiss_exf', 'emiss_sol']) + + for voc, ratio_file in nmvoc_ratio_list.iteritems(): + print voc, ratio_file + data = { + 'name': voc, + 'units': 'kg m-2 s-1', + } + if voc in ['voc02', 'voc03', 'voc04', 'voc05', 'voc07', 'voc08', 'voc12', 'voc13']: + [r_inc, r_exf] = extract_vars(ratio_file, ['inc', 'exf']) + data.update({'data': ind['data']*r_inc['data'] + exf['data']*r_exf['data']}) + elif voc in ['voc01', 'voc23', 'voc25']: + [r_inc, r_sol] = extract_vars(ratio_file, ['inc', 'sol']) + data.update({'data': ind['data']*r_inc['data'] + sol['data']*r_sol['data']}) + elif voc in ['voc09', 'voc16', 'voc21', 'voc22', 'voc24']: + [r_inc] = extract_vars(ratio_file, ['inc']) + data.update({'data': ind['data']*r_inc['data']}) + # elif voc in []: + # [r_exf, r_sol] = extract_vars(ratio_file, ['exf', 'sol']) + # data.update({'data': exf['data']*r_exf['data'] + sol['data']*r_sol['data']}) + elif voc in ['voc18', 'voc19', 'voc20']: + [r_sol] = extract_vars(ratio_file, ['sol']) + data.update({'data': sol['data']*r_sol['data']}) + else: + [r_inc, r_exf, r_sol] = extract_vars(ratio_file, ['inc', 'exf', 'sol']) + data.update({'data': ind['data']*r_inc['data'] + exf['data']*r_exf['data'] + sol['data']*r_sol['data']}) + + global_attributes = { + 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, voc), + 'Conventions': 'CF-1.6', + 'institution': 'European Commission, Joint Research Centre (JRC)', + 'source': 'HTAPv2', + 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + + '2017-04-28: ...', + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)', + } + + out_path_aux = os.path.join(out_path, voc + '_industry') + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + out_path_aux = os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(voc, year, str(month).zfill(2))) + print out_path_aux + write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data], + boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), + global_attributes=global_attributes,) + + +def do_nmvoc_year_transformation(filename, out_path, sector, year): + import os + import pandas as pd + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + + nmvoc_ratio_file = do_ratio_list(sector)['all'] + nmvoc_ratio_list = pd.read_csv(nmvoc_ratio_file, sep=';') + + + print 'hola->',filename + + c_lats, c_lons = extract_vars(filename, ['lat', 'lon']) + + [data] = extract_vars(filename, ['emi_nmvoc']) + + for i, voc_ratio in nmvoc_ratio_list.iterrows(): + pollutant = voc_ratio['voc_group'] + ratio = voc_ratio['factor'] + + # print pollutant, ratio + + data_aux = data.copy() + data_aux['data'] = data['data']*ratio + data_aux['data'] = data_aux['data'].reshape((1,) + data_aux['data'].shape) + data_aux['name'] = pollutant + data_aux['units'] = 'kg m-2 s-1' + global_attributes = { + 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, pollutant), + 'Conventions': 'CF-1.6', + 'institution': 'European Commission, Joint Research Centre (JRC)', + 'source': 'HTAPv2', + 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + + '2017-04-28: ...', + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)\n ' + + 'HTAP contact: greet.maenhout@jrc.ec.europa.eu', + } + + out_path_aux = os.path.join(out_path, pollutant + '_' + sector.lower()) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + + out_path_aux = os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant, year)) + print out_path_aux + write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data_aux], + boundary_latitudes=create_bounds(c_lats['data']), + boundary_longitudes=create_bounds(c_lons['data']), + global_attributes=global_attributes,) + + +def get_pollutant_dict(): + pollutant_dict = { + # 'bc': 'BC', + # 'co': 'CO', + # 'nh3': 'NH3', + # 'nox_no2': 'NOx', + # 'oc': 'OC', + # 'pm10': 'PM10', + # 'pm25': 'PM2.5', + # 'so2': 'SO2', + 'nmvoc': 'NMVOC' + } + return pollutant_dict + + +def get_sector_dict(): + common_dict = { + 'month': ['ENERGY', 'INDUSTRY', 'RESIDENTIAL', 'TRANSPORT'], + 'year': ['SHIPS', 'AIR_CDS', 'AIR_CRS', 'AIR_LTO'] + } + sector_dict = { + 'bc': common_dict, + 'co': common_dict, + 'nh3': {'month': ['AGRICULTURE', 'ENERGY', 'INDUSTRY', 'RESIDENTIAL', 'TRANSPORT'], + 'year': []}, + 'nox_no2': common_dict, + 'oc': common_dict, + 'pm10': common_dict, + 'pm25': common_dict, + 'so2': common_dict, + 'nmvoc': common_dict, + } + return sector_dict + + +def get_nmvoc_sector_dict(): + nmvoc_sectors = {'month': ['ENERGY', 'INDUSTRY_3subsectors', 'RESIDENTIAL', 'TRANSPORT'], + 'year': ['SHIPS', 'AIR_CDS', 'AIR_CRS', 'AIR_LTO']} + return nmvoc_sectors + + +def check_vocs(year): + from hermesv3_gr.tools.netcdf_tools import extract_vars + for month in xrange(1, 12 +1, 1): + for snap in ['ENERGY', 'INDUSTRY', 'RESIDENTIAL', 'TRANSPORT']: + nmvoc_path = os.path.join(output_path, 'monthly_mean', 'nmvoc_{0}'.format(snap.lower()), 'nmvoc_{0}{1}.nc'.format(year, str(month).zfill(2))) + # print nmvoc_path + [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) + nmvoc_sum = new_voc['data'].sum() + + voc_sum = 0 + for voc in ['voc{0}'.format(str(x).zfill(2)) for x in xrange(1, 25 +1, 1)]: + voc_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(voc, snap.lower()), '{0}_{1}{2}.nc'.format(voc, year, str(month).zfill(2))) + # print voc_path, os.path.exists(voc_path) + if os.path.exists(voc_path): + [new_voc] = extract_vars(voc_path, [voc]) + voc_sum += new_voc['data'].sum() + + print '{0} month: {4}; NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format(snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum, month) + + + +if __name__ == '__main__': + + for y in list_years: + # check_vocs(y) + # sys.exit(1) + for pollutant_dict in get_pollutant_dict().iteritems(): + for current_sector in get_sector_dict()[pollutant_dict[0]]['month']: + input_name_aux = input_name.replace('', current_sector) + input_name_aux = input_name_aux.replace('', str(y)) + input_name_aux = input_name_aux.replace('', pollutant_dict[1]) + file_list = [os.path.join(input_path, input_name_aux.replace('', str(aux_month))) + for aux_month in xrange(1, 13)] + + do_transformation(file_list, os.path.join(output_path, 'monthly_mean'), pollutant_dict[0], current_sector, + y) + # annual inventories + for current_sector in get_sector_dict()[pollutant_dict[0]]['year']: + if current_sector[0:3] == 'AIR': + input_name_aux = input_name_air + else: + input_name_aux = input_name_ships + input_name_aux = input_name_aux.replace('', current_sector) + input_name_aux = input_name_aux.replace('', str(y)) + input_name_aux = input_name_aux.replace('', pollutant_dict[1]) + input_name_aux = os.path.join(input_path, input_name_aux) + + do_transformation_annual(input_name_aux, os.path.join(output_path, 'yearly_mean', ), pollutant_dict[0], + current_sector, y) + + for current_sector in get_nmvoc_sector_dict()['month']: + if current_sector == 'INDUSTRY_3subsectors': + input_name_aux = input_name_nmvoc_industry + else: + input_name_aux = input_name + input_name_aux = input_name_aux.replace('', 'NMVOC') + input_name_aux = input_name_aux.replace('', current_sector) + input_name_aux = input_name_aux.replace('', str(y)) + file_list = [os.path.join(input_path, input_name_aux.replace('', str(aux_month))) + for aux_month in xrange(1, 13)] + + if current_sector == 'INDUSTRY_3subsectors': + do_nmvoc_industry_month_transformation(file_list, os.path.join(output_path, 'monthly_mean'), current_sector, + y) + else: + do_nmvoc_month_transformation(file_list, os.path.join(output_path, 'monthly_mean'), current_sector, + y) + for current_sector in get_nmvoc_sector_dict()['year']: + if current_sector[0:3] == 'AIR': + input_name_aux = input_name_air + else: + input_name_aux = input_name_ships + input_name_aux = input_name_aux.replace('', 'NMVOC') + input_name_aux = input_name_aux.replace('', current_sector) + input_name_aux = input_name_aux.replace('', str(y)) + input_name_aux = os.path.join(input_path, input_name_aux) + print input_name_aux + do_nmvoc_year_transformation(input_name_aux, os.path.join(output_path, 'yearly_mean'), current_sector, + y) + diff --git a/preproc/tno_mac_iii_preproc.py b/preproc/tno_mac_iii_preproc.py new file mode 100644 index 0000000..47fe12f --- /dev/null +++ b/preproc/tno_mac_iii_preproc.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/tno/tno_macc_iii/original_files/ascii' +output_path = '/esarchive/recon/tno/tno_macc_iii/yearly_mean' +input_name = 'TNO_MACC_III_emissions_v1_1_.txt' +# list_years = [2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011] +list_years = [2011] +voc_ratio_path = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' +vor_ratio_name = 'ratio_.nc' + +# ============================================================== + +import sys +import os + + +def get_pollutants(in_path): + """ + Finds the pollutants on the ASCII emissions table. + + :param in_path: Path to the ASCII file that contains the information of the TNO_MAC-III emissions. + :type in_path: str + + :return: List of the acronyms of the pollutants. + :rtype: list + """ + import pandas as pd + + columns = list(pd.read_table(in_path, sep=';', nrows=1).columns) + return columns[6:] + + +def calculate_grid_definition(in_path): + import pandas as pd + import numpy as np + + df = pd.read_table(in_path, sep=';') + df = df[df.SourceType != 'P'] + + # Longitudes + lons = np.sort(np.unique(df.Lon)) + lons_interval = lons[1:] - lons[:-1] + print 'Lon min: {0}; Lon max: {1}; Lon inc: {2}; Lon num: {3}'.format( + df.Lon.min(), df.Lon.max(), lons_interval.min(), len(lons)) + + # Latitudes + lats = np.sort(np.unique(df.Lat)) + lats_interval = lats[1:] - lats[:-1] + print 'Lat min: {0}; Lat max: {1}; Lat inc: {2}; Lat num: {3}'.format( + df.Lat.min(), df.Lat.max(), lats_interval.min(), len(lats)) + + lats = np.arange(-90 + lats_interval.min()/2, 90, lats_interval.min(), dtype=np.float64) + lons = np.arange(-180 + lons_interval.min()/2, 180, lons_interval.min(), dtype=np.float64) + + return lats, lons, lats_interval.min(), lons_interval.min() + + +def create_pollutant_empty_list(in_path, len_c_lats, len_c_lons): + import numpy as np + + pollutant_list = [] + for pollutant in get_pollutants(in_path): + aux_dict = {} + if pollutant == 'PM2_5': + aux_dict['name'] = 'pm25' + elif pollutant == 'NOX': + aux_dict['name'] = 'nox_no2' + else: + aux_dict['name'] = pollutant.lower() + aux_dict['TNO_name'] = pollutant + aux_dict['units'] = 'kg.m-2.s-1' + # aux_dict['units'] = 'Mg.km-2.year-1' + aux_dict['data'] = np.zeros((len_c_lats, len_c_lons)) + # aux_dict['data'] = np.zeros((len_c_lons, len_c_lats)) + pollutant_list.append(aux_dict) + return pollutant_list + + +def do_transformation(year): + from hermesv3_gr.tools.netcdf_tools import write_netcdf, get_grid_area + from hermesv3_gr.tools.coordinates_tools import create_bounds + from datetime import datetime + import pandas as pd + import numpy as np + + in_file = os.path.join(input_path, input_name.replace('', str(year))) + + unit_factor = 1000./(365.*24.*3600.) # To pass from Mg/year to Kg/s + # unit_factor = 1000000 # To pass from Mg/m2.year to Mg/Km2.year + + c_lats, c_lons, lat_interval, lon_interval = calculate_grid_definition(in_file) + + b_lats = create_bounds(c_lats, number_vertices=2) + b_lons = create_bounds(c_lons, number_vertices=2) + + df = pd.read_table(in_file, sep=';') + + df_np = df[df.SourceType != 'P'] + df_p = df[df.SourceType == 'P'] + + df_np.loc[:, 'row_lat'] = np.array((df_np.Lat - (-90 + lat_interval / 2)) / lat_interval, dtype=np.int32) + df_np.loc[:, 'col_lon'] = np.array((df_np.Lon - (-180 + lon_interval / 2)) / lon_interval, dtype=np.int32) + + df_p.loc[:, 'row_lat'] = abs(np.array([c_lats]*len(df_p.Lat)) - df_p.Lat.values[:, None]).argmin(axis=1) + df_p.loc[:, 'col_lon'] = abs(np.array([c_lons]*len(df_p.Lon)) - df_p.Lon.values[:, None]).argmin(axis=1) + + df = pd.concat([df_np, df_p]) + + for name, group in df.groupby('SNAP'): + print 'snap', name + pollutant_list = create_pollutant_empty_list(in_file, len(c_lats), len(c_lons)) + + # Other mobile sources ignoring sea cells (shipping emissions) + if name == 8: + for sea in ['ATL', 'BAS', 'BLS', 'MED', 'NOS']: + group = group[group.ISO3 != sea] + + group = group.groupby(['row_lat', 'col_lon']).sum().reset_index() + + for i in xrange(len(pollutant_list)): + # pollutant_list[i]['data'][group.col_lon, group.row_lat] = group[pollutant_list[i]['TNO_name']] + pollutant_list[i]['data'][group.row_lat, group.col_lon] += group[pollutant_list[i]['TNO_name']] + pollutant_list[i]['data'] = pollutant_list[i]['data'].reshape((1,) + pollutant_list[i]['data'].shape) + # print pollutant_list[i]['data'].max() + + aux_output_path = os.path.join(output_path, '{0}_snap{1}'.format(pollutant_list[i]['name'], name)) + if not os.path.exists(aux_output_path): + os.makedirs(aux_output_path) + aux_output_path = os.path.join(aux_output_path, '{0}_{1}.nc'.format(pollutant_list[i]['name'], year)) + write_netcdf(aux_output_path, c_lats, c_lons, [pollutant_list[i]], date=datetime(year, month=1, day=1), + boundary_latitudes=b_lats, boundary_longitudes=b_lons) + cell_area = get_grid_area(aux_output_path) + + pollutant_list[i]['data'] = pollutant_list[i]['data']*unit_factor/cell_area + + write_netcdf(aux_output_path, c_lats, c_lons, [pollutant_list[i]], date=datetime(year, month=1, day=1), + boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, + global_attributes={ + 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. Denier van der Gon TNO-MACC_II emission inventory; a multi-year (2003–2009) consistent high-resolution European emission inventory for air quality modelling Atmospheric Chemistry and Physics 14 10963–10976 2014', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)' + } + ) + + +def get_voc_ratio(ratio_path, snap): + import numpy as np + from hermesv3_gr.tools.netcdf_tools import extract_vars + if snap == 'snap34': + snap = 'snap3' + try: + [data_list] = extract_vars(ratio_path, [snap]) + except KeyError: + return None + return data_list + + +def get_voc_list(): + return ['voc{0}'.format(str(x).zfill(2)) for x in [1,2,3,4,5,6,7,8,9,12,13,14,15,16,17,18,19,20,21,22,23,24,25]] + + +def get_sector_list(): + return ['snap{0}'.format(x) for x in [1, 2, 34, 5, 6, 71, 72, 73, 74, 8, 9]] + + +def do_voc_transformation(year): + from hermesv3_gr.tools.netcdf_tools import write_netcdf, extract_vars + from hermesv3_gr.tools.coordinates_tools import create_bounds + from warnings import warn as warning + + for snap in get_sector_list(): + in_path = os.path.join(output_path, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) + [nmvoc, c_lats, c_lons, cell_area] = extract_vars(in_path, ['nmvoc', 'lat', 'lon', 'cell_area']) + for voc in get_voc_list(): + ratio_path = os.path.join(voc_ratio_path, vor_ratio_name.replace('', voc)) + ratios_dict = get_voc_ratio(ratio_path, snap) + if ratios_dict is not None: + new_voc = { + 'name': voc, + 'units': 'kg.m-2.s-2' + } + + b_lats = create_bounds(c_lats['data'], number_vertices=2) + b_lons = create_bounds(c_lons['data'], number_vertices=2) + mask = ratios_dict['data'] + + new_voc['data'] = nmvoc['data'] * mask + + out_dir_aux = os.path.join(output_path, '{0}_{1}'.format(voc, snap)) + if not os.path.exists(out_dir_aux): + os.makedirs(out_dir_aux) + # print os.path.join(out_dir_aux, '{0}_{1}.nc'.format(voc, year)) + write_netcdf(os.path.join(out_dir_aux, '{0}_{1}.nc'.format(voc, year)), c_lats['data'], + c_lons['data'], [new_voc], boundary_latitudes=b_lats, boundary_longitudes=b_lons, + cell_area=cell_area['data'], + global_attributes={ + 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. Denier van der Gon TNO-MACC_II emission inventory; a multi-year (2003–2009) consistent high-resolution European emission inventory for air quality modelling Atmospheric Chemistry and Physics 14 10963–10976 2014', + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)' + }) + else: + warning("The pollutant {0} for the sector {1} does not exist.\n SNAP not found: {2}".format(voc, snap, ratio_path)) + + return True + + +def check_vocs(year): + from hermesv3_gr.tools.netcdf_tools import extract_vars + + for snap in get_sector_list(): + nmvoc_path = os.path.join(output_path, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) + [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) + nmvoc_sum = new_voc['data'].sum() + + voc_sum = 0 + for voc in get_voc_list(): + voc_path = os.path.join(output_path, '{0}_{1}'.format(voc, snap), '{0}_{1}.nc'.format(voc, year)) + if os.path.exists(voc_path): + [new_voc] = extract_vars(voc_path, [voc]) + voc_sum += new_voc['data'].sum() + + print '{0} NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format(snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum) + + + +if __name__ == '__main__': + for y in list_years: + do_transformation(y) + do_voc_transformation(y) + # check_vocs(y) diff --git a/preproc/tno_mac_iii_preproc_voc_ratios.py b/preproc/tno_mac_iii_preproc_voc_ratios.py new file mode 100644 index 0000000..4814278 --- /dev/null +++ b/preproc/tno_mac_iii_preproc_voc_ratios.py @@ -0,0 +1,473 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +# ============== CONFIGURATION PARAMETERS ====================== +output_path = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' +world_info_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/temporal/tz_world_country_iso3166.csv' +tno_world_mask = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/TNO_MACC-III_WorldMask.nc' +csv_path = '/esarchive/recon/tno/tno_macc_iii/original_files/TNO_MACC_NMVOC profile_country_SNAP_12_05_2010.csv' + +# ============================================================== + +import sys +import os + + +def extract_vars(netcdf_path, variables_list, attributes_list=[]): + from netCDF4 import Dataset + data_list = [] + # print netcdf_path + netcdf = Dataset(netcdf_path, mode='r') + for var in variables_list: + if var == 'emi_nox_no2': + var1 = var + var2 = 'emi_nox' + else: + var1 = var2 = var + dict_aux = \ + { + 'name': var1, + 'data': netcdf.variables[var2][:], + } + for attribute in attributes_list: + dict_aux.update({attribute: netcdf.variables[var2].getncattr(attribute)}) + data_list.append(dict_aux) + netcdf.close() + + return data_list + + +def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + + :param global_attributes: + :type global_attributes: dict + + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :return: + """ + from cf_units import Unit, encode_time + from netCDF4 import Dataset + + if not (RegularLatLon or LambertConformalConic or Rotated): + RegularLatLon = True + print netcdf_path + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lon', 'lat',) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) + lon_dim = ('lon', 'lat',) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + + elif LambertConformalConic: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x',) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x',) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str( + u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + print 'lons:', lons[:].shape, center_longitudes.shape + lons[:] = center_longitudes + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = rotated_lons + if LambertConformalConic: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + # if variable['data'] is not 0: + # print var[:].shape, variable['data'].shape + try: + var[:] = variable['data'] + except: + print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() + + +def get_grid_area(filename): + """ + Calculates the area of each cell. + + :param filename: Full path to the NetCDF to calculate the cell areas. + :type filename: str + + :return: Returns the area of each cell. + :rtype: numpy.ndarray + """ + from cdo import Cdo + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area + + +def create_bounds(coords, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coords: Coordinates in degrees (latitude or longitude) + :type coords: numpy.ndarray + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.ndarray + """ + import numpy as np + + interval = coords[1] - coords[0] + + coords_left = coords - interval / 2 + coords_right = coords + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') + + return bound_coords + + +def create_voc_ratio(voc): + import numpy as np + import pandas as pd + country_values, lat, lon = extract_vars(tno_world_mask, ['timezone_id', 'lat', 'lon']) + country_values = country_values['data'].reshape((country_values['data'].shape[1], country_values['data'].shape[1])) + print output_path + if not os.path.exists(output_path): + os.makedirs(output_path) + + complete_output_path = os.path.join(output_path, 'ratio_{0}.nc'.format(voc)) + if not os.path.exists(complete_output_path): + print 'Creating ratio file for {0}\npath: {1}'.format(voc, complete_output_path) + data_list = [] + for snap in get_sector_list(voc): + print snap + mask_factor = np.zeros(country_values.shape) + iso_codes = get_iso_codes() + for country_code, factor in get_country_code_and_factor(voc, snap).iteritems(): + try: + mask_factor[country_values == iso_codes[country_code]] = factor + except: + # print 'passed', country_code + pass + # To fulfill the blanks on the map + mask_factor[mask_factor <= 0] = get_default_ratio(voc, snap) + + data_list.append({ + 'name': 'snap{0}'.format(snap), + 'units': '', + 'data': mask_factor.reshape((1,) + mask_factor.shape) + }) + write_netcdf(complete_output_path, lat['data'], lon['data'], data_list) + # print '->', country_code, iso_codes[country_code], factor + else: + print 'Ratio file for {0} already created\npath: {1}'.format(voc, complete_output_path) + return True + + +def get_default_ratio(voc, snap): + import pandas as pd + + df = pd.read_csv(csv_path, sep=';') + + df = df.loc[df['vcode'] == voc.replace('voc', 'v'), :] + df = df.loc[df['snap'] == snap, :] + + return df.loc[df['ISO3'] == 'EUR', 'fr'].item() + + +def get_iso_codes(): + import pandas as pd + + # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) + df = pd.read_csv(world_info_path, sep=';') + del df['time_zone'], df['time_zone_code'] + df = df.drop_duplicates().dropna() + df = df.set_index('country_code_alpha') + codes_dict = df.to_dict() + codes_dict = codes_dict['country_code'] + + return codes_dict + + +def get_voc_list(): + import pandas as pd + + df = pd.read_csv(csv_path, sep=';') + del df['ISO3'], df['snap'], df['output substance name'], df['fr'] + df = df.drop_duplicates().dropna() + voc_list = df.vcode.values + for i in xrange(len(voc_list)): + voc_list[i] = voc_list[i].replace('v', 'voc') + return df.vcode.values + + +def get_sector_list(voc): + import pandas as pd + voc = voc.replace('voc', 'v') + df = pd.read_csv(csv_path, sep=';') + df = df[df.vcode == voc] + del df['ISO3'], df['vcode'], df['output substance name'], df['fr'] + df = df.drop_duplicates().dropna() + return df.snap.values + + +def get_sector_list_text(voc): + voc = voc.replace('voc', 'v') + sector_list = get_sector_list(csv_path, voc) + new_list = [] + for int_sector in sector_list: + new_list.append('snap{0}'.format(int_sector)) + return new_list + + +def get_country_code_and_factor(voc, snap): + import pandas as pd + voc = voc.replace('voc', 'v') + df = pd.read_csv(csv_path, sep=';') + df = df[df.vcode == voc] + df = df[df.snap == snap] + del df['snap'], df['vcode'], df['output substance name'] + df = df.drop_duplicates().dropna() + df = df.set_index('ISO3') + + country_dict = df.to_dict() + country_dict = country_dict['fr'] + + return country_dict + + +if __name__ == '__main__': + for voc in get_voc_list(): + create_voc_ratio(voc) diff --git a/preproc/wiedinmyer_preproc.py b/preproc/wiedinmyer_preproc.py new file mode 100755 index 0000000..f0616d0 --- /dev/null +++ b/preproc/wiedinmyer_preproc.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + +# ============== CONFIGURATION PARAMETERS ====================== +input_path = '/esarchive/recon/ucar/wiedinmyer/original_files/' +output_path = '/esarchive/recon/ucar/wiedinmyer/' +list_pollutants = ['co2', 'co', 'so2', 'nox_no', 'nh3', 'ch4', 'c2h2', 'c2h4', 'c3h6', 'ch3oh', 'ch2o', 'ch3cooh', + 'hcn', 'c6h6', 'pcb', 'pah', 'pcdd', 'pbdd', 'nmoc', 'hcl', 'hg', 'pm25', 'pm10', 'oc', 'bc'] + +input_name = 'ALL_Emiss_04282014.nc' +year = 2010 +# ============================================================== + +import sys +import os +import timeit +from netCDF4 import Dataset +import cf_units + +# from hermesv3_gr.tools.coordinates_tools import * + + +def out_pollutant_to_in_pollutant(out_p): + pollutant_dict = { + 'co2': 'CO2grid', + 'co': 'COgrid', + 'so2': 'SO2grid', + 'nox_no': 'NOxgrid', + 'nh3': 'NH3grid', + 'ch4': 'CH4grid', + 'c2h2': 'C2H2grid', + 'c2h4': 'C2H4grid', + 'c3h6': 'C3H6grid', + 'ch3oh': 'MEOHgrid', + 'ch2o': 'FORMgrid', + 'ch3cooh': 'AcetAcidgrid', + 'hcn': 'HCNgrid', + 'c6h6': 'BENZgrid', + 'pcb': 'PCBgrid', + 'pah': 'PAHgrid', + 'pcdd': 'PCDDgrid', + 'pbdd': 'PBDDgrid', + 'nmoc': 'NMOCgrid', + 'hcl': 'HClgrid', + 'hg': 'Hggrid', + 'pm25': 'PM25grid', + 'pm10': 'PM10grid', + 'oc': 'OCgrid', + 'bc': 'BCgrid', + } + + return pollutant_dict[out_p] + + +def do_transformation(filename): + """ + Re-write the WIEDINMYER inputs following ES anc CF-1.6 conventions. + + :param filename: Name of the input file. + :type filename: str + """ + import numpy as np + print filename + from hermesv3_gr.tools.netcdf_tools import get_grid_area + from cf_units import Unit + + grid_area = get_grid_area(filename) + + nc_in = Dataset(filename, mode='r') + + # Reading lat, lon + lats = nc_in.variables['lat'][:] + lons = nc_in.variables['lon'][:] + + factor = 1000000./(365.*24.*3600.) # To pass from Gg/m2.year to Kg/m2.s + + for output_pollutant in list_pollutants: + input_pollutant = out_pollutant_to_in_pollutant(output_pollutant) + + data = nc_in.variables[input_pollutant][:] + data = np.nan_to_num(data) + data = data/grid_area # To pass from Gg/year to Gg/m2.year + data = data*factor + data_attributes = {'name': output_pollutant, + 'long_name': nc_in.variables[input_pollutant].long_name, + 'units': Unit('kg.m-2.s-1').symbol, + 'coordiantes': 'lat lon', + 'grid_mapping': 'crs'} + data = np.array(data) + + out_path_aux = os.path.join(output_path, 'yearly_mean', output_pollutant) + if not os.path.exists(out_path_aux): + os.makedirs(out_path_aux) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(output_pollutant, year)), + data, data_attributes, lats, lons, grid_area, year, 01) + nc_in.close() + + +def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, grid_cell_area, time_year, time_month): + """ + Write a NetCDF with the given information. + + :param output_name_path: Complete path to the output NetCDF to be stored. + :type output_name_path: str + + :param data: Data of the variable to be stored. + :type data: numpy.ndarray + + :param data_atts: Information of the data to fill the data attributes of the NetCDF variable. + 'long_name': Name of the pollutant. + 'units': Units of the pollutant. + 'coordiantes': Variables that contains the coordinates of the data. + 'grid_mapping': Mapping variable + :type data_atts: dict + + :param center_lats: Latitudes of the center of each cell. + :type center_lats: numpy.ndarray + + :param center_lons: Longitudes of the center of each cell. + :type center_lons: numpy.ndarray + + :param time_year: Year. + :type time_year: int + + :param time_month: Number of the month. + :type time_month: int + + :param sector: Sector name of the pollutant. + :type sector: str + """ + from hermesv3_gr.tools.coordinates_tools import create_bounds + + print output_name_path + # Creating NetCDF & Dimensions + nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") + nc_output.createDimension('nv', 2) + nc_output.createDimension('lon', center_lons.shape[0]) + nc_output.createDimension('lat', center_lats.shape[0]) + nc_output.createDimension('time', None) + + # TIME + time = nc_output.createVariable('time', 'd', ('time',), zlib=True) + # time.units = "{0} since {1}".format(tstep_units, global_atts['Start_DateTime'].strftime('%Y-%m-%d %H:%M:%S')) + time.units = "months since {0}-{1}-01 00:00:00".format(time_year, str(time_month).zfill(2)) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0] + + # LATITUDE + lat = nc_output.createVariable('lat', 'f', ('lat',), zlib=True) + lat.bounds = "lat_bnds" + lat.units = "degrees_north" + lat.axis = "Y" + lat.long_name = "latitude" + lat.standard_name = "latitude" + lat[:] = center_lats + + lat_bnds = nc_output.createVariable('lat_bnds', 'f', ('lat', 'nv',), zlib=True) + lat_bnds[:] = create_bounds(center_lats) + + # LONGITUDE + lon = nc_output.createVariable('lon', 'f', ('lon',), zlib=True) + lon.bounds = "lon_bnds" + lon.units = "degrees_east" + lon.axis = "X" + lon.long_name = "longitude" + lon.standard_name = "longitude" + lon[:] = center_lons + + lon_bnds = nc_output.createVariable('lon_bnds', 'f', ('lon', 'nv',), zlib=True) + lon_bnds[:] = create_bounds(center_lons) + + # VARIABLE + nc_var = nc_output.createVariable(data_atts['name'], 'f', ('time', 'lat', 'lon',), zlib=True) + nc_var.units = data_atts['units'] + nc_var.long_name = data_atts['long_name'] + nc_var.coordinates = data_atts['coordiantes'] + nc_var.grid_mapping = data_atts['grid_mapping'] + nc_var.cell_measures = 'area: cell_area' + nc_var[:] = data.reshape((1,) + data.shape) + + # CELL AREA + cell_area = nc_output.createVariable('cell_area', 'f', ('lat', 'lon',)) + cell_area.long_name = "area of the grid cell" + cell_area.standard_name = "area" + cell_area.units = "m2" + cell_area[:] = grid_cell_area + + # CRS + crs = nc_output.createVariable('crs', 'i') + crs.grid_mapping_name = "latitude_longitude" + crs.semi_major_axis = 6371000.0 + crs.inverse_flattening = 0 + + nc_output.setncattr('title', 'Annual trash burning emissions', ) + nc_output.setncattr('Conventions', 'CF-1.6', ) + nc_output.setncattr('institution', 'UCAR', ) + nc_output.setncattr('source', 'WIEDINMYER', ) + nc_output.setncattr('history', 'Re-writing of the WIEDINMYER input to follow the CF 1.6 conventions;\n' + + '2014-04-28: Created by C. Wiedinmyer;\n' + + '2017-04-04: Added time dimension (UNLIMITED);\n' + + '2017-04-04: Added boundaries;\n' + + '2017-04-04: Added global attributes;\n' + + '2017-04-04: Re-naming pollutant;\n' + + '2017-04-04: Added cell_area variable;\n') + nc_output.setncattr('references', '', ) + nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center); Original file from C. Wiedinmyer', ) + + nc_output.close() + + +if __name__ == '__main__': + starting_time = timeit.default_timer() + + do_transformation(os.path.join(input_path, input_name)) + + print 'Time(s):', timeit.default_timer() - starting_time diff --git a/setup.py b/setup.py index 4a3f403..8c1491b 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,23 @@ #!/usr/bin/env python +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + from os import path from setuptools import find_packages from setuptools import setup @@ -37,13 +55,13 @@ setup( 'pyproj', 'configargparse', 'cf_units>=1.1.3', - 'calendar', - 'ESMPy', + # 'calendar', + 'ESMPy>=7.1.0', 'holidays', 'pytz', - 're', + # 're', 'timezonefinder', - 'unittest' + # 'unittest' ], packages=find_packages(), classifiers=[ diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_temporal.py b/tests/unit/test_temporal.py new file mode 100644 index 0000000..3ffed3c --- /dev/null +++ b/tests/unit/test_temporal.py @@ -0,0 +1,427 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS + +# This file is part of HERMESv3. + +# HERMESv3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# HERMESv3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with HERMESv3. If not, see . + +import os +from datetime import datetime, timedelta +import unittest +import hermesv3_gr.config.settings as settings + +from hermesv3_gr.modules.temporal.temporal import TemporalDistribution + + +class TestTemporalDistribution(unittest.TestCase): + def setUp(self): + pass + + def testing_calculate_ending_date_1hour(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 1, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_ending_date(), + datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)) + + def testing_calculate_ending_date_24hours(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 24, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_ending_date(), + datetime(year=2016, month=01, day=01, hour=23, minute=0, second=0)) + + def testing_calculate_ending_date_3hour_each2(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_ending_date(), + datetime(year=2016, month=01, day=01, hour=4, minute=0, second=0)) + + def testing_def_calculate_timedelta_3hour_each2(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_timedelta(datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)), + timedelta(hours=2)) + + def testing_def_calculate_timedelta_month(self): + temporal = TemporalDistribution( + datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_timedelta(datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0)), + timedelta(hours=24*28)) + + def testing_def_calculate_timedelta_month_leapyear(self): + temporal = TemporalDistribution( + datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + self.assertEqual( + temporal.calculate_timedelta(datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0)), + timedelta(hours=24*29)) + + def testing_get_tz_from_id(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.get_tz_from_id(309), + "Europe/Andorra") + + def testing_get_id_from_tz(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.get_id_from_tz("Europe/Andorra"), + 309) + + def testing_parse_tz(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.parse_tz("America/Fort_Nelson"), + 'America/Vancouver') + + def testing_find_closest_timezone_BCN(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.find_closest_timezone(41.390205, 2.154007), + 'Europe/Madrid') + + def testing_find_closest_timezone_MEX(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.find_closest_timezone(19.451054, -99.125519), + "America/Mexico_City") + + def testing_find_closest_timezone_Kuwait(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.find_closest_timezone(29.378586, 47.990341), + "Asia/Kuwait") + + def testing_find_closest_timezone_Shanghai(self): + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + + self.assertEqual( + temporal.find_closest_timezone(31.267401, 121.522179), + "Asia/Shanghai") + + def testing_create_netcdf_timezones(self): + import numpy as np + from hermesv3_gr.modules.grids.grid import Grid + from hermesv3_gr.tools.netcdf_tools import extract_vars + + aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' + if not os.path.exists(aux_path): + os.makedirs(aux_path) + + grid = Grid('global', aux_path) + grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) + grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) + + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + aux_path) + + self.assertTrue(temporal.create_netcdf_timezones(grid)) + + [timezones] = extract_vars(temporal.netcdf_timezones, ['timezone_id']) + timezones = list(timezones['data'][0, :].astype(int).flatten()) + + self.assertEqual(timezones, + [335, 147, 247, 268]) + + def testing_calculate_timezones(self): + self.testing_create_netcdf_timezones() + + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + self.assertEqual(temporal.calculate_timezones().tolist(), + [['Europe/Madrid', "America/Mexico_City"], ["Asia/Kuwait", "Asia/Shanghai"]]) + + def testing_calculate_2d_temporal_factors(self): + self.testing_create_netcdf_timezones() + + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + timezones = temporal.calculate_timezones() + + temporal.monthly_profile = {1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1., + 7: 1., + 8: 1., + 9: 1., + 10: 1., + 11: 1., + 12: 1.} + temporal.daily_profile_id = {0: 1., + 1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1.} + temporal.hourly_profile = {0: 1., + 1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1., + 7: 1., + 8: 1., + 9: 1., + 10: 1., + 11: 1., + 12: 1., + 13: 20., + 14: 1., + 15: 1., + 16: 1., + 17: 1., + 18: 1., + 19: 1., + 20: 1., + 21: 1., + 22: 1., + 23: 1.} + + self.assertEqual(temporal.calculate_2d_temporal_factors(datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), timezones).tolist(), + [[20., 1.], [1., 1.]]) + + def testing_do_temporal(self): + import numpy as np + from hermesv3_gr.modules.grids.grid import Grid + self.testing_create_netcdf_timezones() + + aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' + + temporal = TemporalDistribution( + datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), 'hourly', 1, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + aux_path) + temporal.monthly_profile = {1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1., + 7: 1., + 8: 1., + 9: 1., + 10: 1., + 11: 1., + 12: 1.} + temporal.daily_profile_id = {0: 1., + 1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1.} + temporal.hourly_profile = {0: 1., + 1: 1., + 2: 1., + 3: 1., + 4: 1., + 5: 1., + 6: 1., + 7: 1., + 8: 1., + 9: 1., + 10: 1., + 11: 1., + 12: 1., + 13: 20., + 14: 1., + 15: 1., + 16: 1., + 17: 1., + 18: 1., + 19: 1., + 20: 1., + 21: 1., + 22: 1., + 23: 1.} + + grid = Grid('global', aux_path) + grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) + grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) + data_in = [{'data': np.array([[10., 10.], [10., 10.]])}] + # data_out = [{'data': np.array([[200., 10.], [10., 10.]])}] + data_out = temporal.do_temporal(data_in, grid) + + self.assertEqual(data_out[0]['data'].tolist(), [[[200., 10.], [10., 10.]]]) + + def testing_calculate_weekdays_no_leap_year(self): + from datetime import datetime + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + self.assertEqual(temporal.calculate_weekdays(datetime(year=2017, month=02, day=1)), + {0: 4, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) + + def testing_calculate_weekdays_leap_year(self): + from datetime import datetime + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + self.assertEqual(temporal.calculate_weekdays(datetime(year=2016, month=02, day=1)), + {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) + + def testing_calculate_weekdays_factors_full_month(self): + from datetime import datetime + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + + self.assertEqual(round(temporal.calculate_weekday_factor_full_month( + {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}), 5), + round(0.2/29, 5)) + + def testing_calculate_rebalance_factor(self): + from datetime import datetime + temporal = TemporalDistribution( + datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + + self.assertEqual(round(temporal.calculate_rebalance_factor( + {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, datetime(year=2016, month=02, day=1)), 5), + round(0.2/29, 5)) + + # def testing_get_temporal_daily_profile(self): + # from datetime import datetime + # from calendar import monthrange + # date = datetime(year=2016, month=02, day=1) + # temporal = TemporalDistribution( + # date, 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # + # print temporal.get_temporal_daily_profile(date) + -- GitLab From 83ad29bb56a2fafb6d84e78e242cb01e1eee4d1d Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 6 Sep 2018 18:23:12 +0200 Subject: [PATCH 03/51] updating scripts --- preproc/gfas12_preproc.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py index 51b918b..301c65f 100755 --- a/preproc/gfas12_preproc.py +++ b/preproc/gfas12_preproc.py @@ -40,9 +40,6 @@ import pandas as pd import datetime import cdo -parentPath = os.path.abspath(os.path.join('..', '..')) -if parentPath not in sys.path: - sys.path.insert(0, parentPath) from hermesv3_gr.tools.coordinates_tools import * -- GitLab From e2714f797321c866acb3dd46b5efb1a2681473e3 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 11:07:23 +0200 Subject: [PATCH 04/51] Cleaning code --- hermesv3_gr/modules/vertical/vertical.py | 2 +- preproc/ceds_preproc.py | 122 +++++++++++++++++---- preproc/eclipsev5a_preproc.py | 99 ++++++++++++++--- preproc/edgarv432_ap_preproc.py | 123 +++++++++++++++++----- preproc/edgarv432_voc_preproc.py | 118 +++++++++++++++++---- preproc/emep_preproc.py | 47 +++++---- preproc/gfas12_preproc.py | 106 +++++++++++++++---- preproc/htapv2_preproc.py | 24 +++-- preproc/tno_mac_iii_preproc.py | 68 +++++++++--- preproc/tno_mac_iii_preproc_voc_ratios.py | 61 +++++------ preproc/wiedinmyer_preproc.py | 27 +++-- 11 files changed, 605 insertions(+), 192 deletions(-) diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py index bd7f3f8..b6c786b 100644 --- a/hermesv3_gr/modules/vertical/vertical.py +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -182,7 +182,7 @@ class VerticalDistribution(object): Calculates the vertical distribution using the given data and weights. :param data: Emissions to be vertically distributed. - :type data: numpy.ndarray + :type data: numpy.array :param weights: Weights of each layer. :type weights: list of float diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py index 79f4f67..068c261 100644 --- a/preproc/ceds_preproc.py +++ b/preproc/ceds_preproc.py @@ -18,14 +18,20 @@ # along with HERMESv3_GR. If not, see . +import os +import sys + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/jgcri/ceds/original_files' output_path = '/esarchive/recon/jgcri/ceds' list_pollutants = ['BC', 'CO', 'NH3', 'NMVOC', 'NOx', 'OC', 'SO2'] -voc_pollutants = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', 'VOC08', 'VOC09', 'VOC12', 'VOC13', 'VOC14', 'VOC15', 'VOC16', 'VOC17', 'VOC18', 'VOC19', 'VOC20', 'VOC21', 'VOC22', 'VOC23', 'VOC24', 'VOC25'] +voc_pollutants = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', 'VOC08', 'VOC09', 'VOC12', 'VOC13', + 'VOC14', 'VOC15', 'VOC16', 'VOC17', 'VOC18', 'VOC19', 'VOC20', 'VOC21', 'VOC22', 'VOC23', 'VOC24', + 'VOC25'] list_sectors = ['agriculture', 'energy', 'industry', 'transport', 'residential', 'solvents', 'waste', 'ships'] -#list_years = from 1950 to 2014 +# list_years = from 1950 to 2014 list_years = [2010] input_name = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' voc_input_name = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim-supplemental-data_gr_01-12.nc' @@ -33,11 +39,17 @@ do_air = True air_input_name = '-em-AIR-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26_gr_01-12.nc' # ============================================================== -import sys -import os - def voc_to_vocname(voc): + """ + Gets the voc complete name from the VOCXX format + + :param voc: Voc number in the format VOCXX + :type voc: str + + :return: Voc name + :rtype:str + """ voc_dict = { 'VOC01': 'alcohols', 'VOC02': 'ethane', @@ -68,6 +80,15 @@ def voc_to_vocname(voc): def sector_to_index(sector): + """ + Gets the index where are allocated the emissions for the selected sector. + + :param sector: Name to the sector to get the index position. + :type sector: str + + :return: Index of the position of the current sector + :rtype: int + """ sector_dict = { 'agriculture': 0, 'energy': 1, @@ -83,6 +104,21 @@ def sector_to_index(sector): def get_input_name(pollutant, year, air=False): + """ + Gets the path for the input file name + + :param pollutant: Name of the pollutant + :type pollutant: str + + :param year: Year to extract + :type year: int + + :param air: Indicates if the input file is related with air emissions + :type air: bool + + :return: Path to the input file + :rtype: str + """ if air: file_name = air_input_name.replace('', pollutant) elif pollutant in list_pollutants: @@ -107,6 +143,27 @@ def get_input_name(pollutant, year, air=False): def get_full_year_data(file_name, pollutant, sector, year, air=False): + """ + Gets the needed date in the input format. + + :param file_name: path to the input file. + :type file_name: str + + :param pollutant: Name of the pollutant. + :type pollutant: str + + :param sector: Name of the sector. + :type sector: str + + :param year: Year to calculate. + :type year: int + + :param air: Indicates if the input file is related with air emissions + :type air: bool + + :return: Data of the selected emission. + :rtype: numpy.array + """ from netCDF4 import Dataset from datetime import datetime import cf_units @@ -125,13 +182,25 @@ def get_full_year_data(file_name, pollutant, sector, year, air=False): elif pollutant in list_pollutants: data = nc.variables['{0}_em_anthro'.format(pollutant)][i_time:i_time+12, sector_to_index(sector), :, :] elif pollutant in voc_pollutants: - data = nc.variables['{0}-{1}_em_speciated_VOC'.format(pollutant, voc_to_vocname(pollutant).replace('-', '_'))][i_time:i_time+12, sector_to_index(sector), :, :] + data = nc.variables['{0}-{1}_em_speciated_VOC'.format( + pollutant, voc_to_vocname(pollutant).replace('-', '_'))][i_time:i_time+12, sector_to_index(sector), :, :] + else: + data = None nc.close() return data def get_global_attributes(file_name): + """ + Gets the global attributes of the input file. + + :param file_name: Path to the NetCDF file + :type file_name: str + + :return: Global attributes + :rtype: dict + """ from netCDF4 import Dataset nc = Dataset(file_name, mode='r') @@ -145,6 +214,12 @@ def get_global_attributes(file_name): def do_transformation(year): + """ + Does the transformation for the selected year + + :param year: Year to calculate + :type year: int + """ from datetime import datetime from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf for pollutant in list_pollutants + voc_pollutants: @@ -172,16 +247,23 @@ def do_transformation(year): 'units': 'kg.m-2.s-1', 'data': data[month - 1, :, :].reshape((1,) + cell_area.shape) } - write_netcdf(os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), - c_lats['data'], c_lons['data'], [emission], - date=datetime(year=year, month=month, day=1), - boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], - cell_area=cell_area, global_attributes=global_attributes) + write_netcdf( + os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), + c_lats['data'], c_lons['data'], [emission], date=datetime(year=year, month=month, day=1), + boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], cell_area=cell_area, + global_attributes=global_attributes) else: raise IOError('File not found {0}'.format(file_name)) + return True def do_air_transformation(year): + """ + Does the transformations of the ari emissions for the selected year. + + :param year: Year to calculate + :type year: int + """ from datetime import datetime from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf @@ -193,7 +275,7 @@ def do_air_transformation(year): global_attributes = get_global_attributes(file_name) - data = get_full_year_data(file_name, pollutant, None, year, air=True) + data = get_full_year_data(file_name, pollutant, '', year, air=True) if pollutant == 'NOx': pollutant_name = 'nox_no2' @@ -211,6 +293,9 @@ def do_air_transformation(year): data_aux = data[:, 2:14 + 1, :, :].sum(axis=1) elif sector == 'air_crs': data_aux = data[:, 15:24 + 1, :, :].sum(axis=1) + else: + print 'ERROR' + sys.exit(1) for month in xrange(1, 12 + 1, 1): emission = { @@ -218,13 +303,14 @@ def do_air_transformation(year): 'units': 'kg.m-2.s-1', 'data': data_aux[month - 1, :, :].reshape((1,) + cell_area.shape) } - write_netcdf(os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), - c_lats['data'], c_lons['data'], [emission], - date=datetime(year=year, month=month, day=1), - boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], - cell_area=cell_area, global_attributes=global_attributes) + write_netcdf( + os.path.join(file_path, '{0}_{1}{2}.nc'.format(pollutant_name, year, str(month).zfill(2))), + c_lats['data'], c_lons['data'], [emission], date=datetime(year=year, month=month, day=1), + boundary_latitudes=b_lats['data'], boundary_longitudes=b_lons['data'], cell_area=cell_area, + global_attributes=global_attributes) else: raise IOError('File not found {0}'.format(file_name)) + return True if __name__ == '__main__': @@ -232,5 +318,3 @@ if __name__ == '__main__': # do_transformation(y) if do_air: do_air_transformation(y) - - diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index 197abb7..6a359b5 100644 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -18,6 +18,13 @@ # along with HERMESv3_GR. If not, see . +import os +from datetime import datetime +from netCDF4 import Dataset +import numpy as np +from cf_units import Unit + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/iiasa/eclipsev5a/original_files' output_path = '/esarchive/recon/iiasa/eclipsev5a/original_files/test' @@ -29,21 +36,64 @@ list_years = [1990, 1995, 2000, 2005, 2010, 2015, 2020, 2025, 2030, 2040, 2050] list_pollutants = ['BC', 'CH4', 'CO', 'NH3', 'NOx', 'OC', 'OM', 'PM10', 'PM25', 'SO2', 'VOC'] # ============================================================== -import os -import sys - -from datetime import datetime -from netCDF4 import Dataset -import numpy as np -from cf_units import Unit -from hermesv3_gr.tools.coordinates_tools import * month_factor = 1000000. / (30. * 24. * 3600.) # To pass from kt/month to Kg/s year_factor = 1000000. / (365. * 24. * 3600.) # To pass from kt/year to Kg/s var_units = 'kg.m-2.s-1' +def get_grid_area(filename): + """ + Calculates the area for each cell of the grid using CDO + + :param filename: Path to the file to calculate the cell area + :type filename: str + + :return: Area of each cell of the grid. + :rtype: numpy.array + """ + from cdo import Cdo + from netCDF4 import Dataset + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area + + +def create_bounds(coordinates, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coordinates: Coordinates in degrees (latitude or longitude) + :type coordinates: numpy.ndarray + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.ndarray + """ + interval = coordinates[1] - coordinates[0] + + coords_left = coordinates - interval / 2 + coords_right = coordinates + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') + + return bound_coords + + def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cell_area, date): + # TODO Documentation print output_name_path # Creating NetCDF & Dimensions nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") @@ -122,7 +172,8 @@ def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cel def extract_sector_by_name(name): - sector_dict ={ + # TODO Documentation + sector_dict = { 'emis_agr': 'agriculture', 'emis_awb': 'agriculture_waste', 'emis_dom': 'residential', @@ -141,7 +192,8 @@ def extract_sector_by_name(name): def extract_month_profile_by_sector(sector, month, pollutant=None): - sector_dict ={ + # TODO Documentation + sector_dict = { 'residential': 'dom', 'energy': 'ene', 'agriculture_waste': 'agr_awb', @@ -166,14 +218,17 @@ def extract_month_profile_by_sector(sector, month, pollutant=None): def get_output_name(pollutant, sector, year, month): + # TODO Docuemtnation output_path_aux = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) - return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=month, day=1).strftime('%Y%m'))) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format( + pollutant, datetime(year=year, month=month, day=1).strftime('%Y%m'))) def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area): + # TODO Docuemtnation for i in xrange(len(list_years)): for month in xrange(12): @@ -200,10 +255,12 @@ def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area) 'data': data_aux, 'units': Unit(var_units), }] - write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=list_years[i], month=month + 1, day=1)) + write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, + datetime(year=list_years[i], month=month + 1, day=1)) def do_transformation(): + # TODO Documentation for pollutant in list_pollutants: file_name = os.path.join(input_path, input_name.replace('', pollutant)) print file_name @@ -220,14 +277,17 @@ def do_transformation(): def get_flaring_output_name(pollutant, sector, year): + # TODO Docuemtnation output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) - return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=1, day=1).strftime('%Y'))) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, + datetime(year=year, month=1, day=1).strftime('%Y'))) def get_flaring_var_name(nc_var): + # TODO Docuemtnation nc_var_2_var = { 'emis_SO2_flr': 'so2', 'emis_NOx_flr': 'nox_no2', @@ -248,6 +308,7 @@ def get_flaring_var_name(nc_var): def do_flaring_transformation(): + # TODO Documentation nc_in = Dataset(os.path.join(input_path, input_name_flaring), mode='r') c_lats = nc_in.variables['lat'][:] c_lons = nc_in.variables['lon'][:] @@ -268,19 +329,23 @@ def do_flaring_transformation(): 'data': data_aux, 'units': Unit(var_units), }] - write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=list_years[i], month=1, day=1)) + write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, + datetime(year=list_years[i], month=1, day=1)) nc_in.close() def get_ship_output_name(pollutant, sector, year): + # TODO Docuemntation output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) - return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, datetime(year=year, month=1, day=1).strftime('%Y'))) + return os.path.join(output_path_aux, '{0}_{1}.nc'.format(pollutant, + datetime(year=year, month=1, day=1).strftime('%Y'))) def get_ship_var_name(nc_var): + # TODO Documentation nc_var_2_var = { 'SO2': 'so2', 'NOx': 'nox_no2', @@ -298,8 +363,9 @@ def get_ship_var_name(nc_var): return_value = None return return_value -def do_ship_transformation(): +def do_ship_transformation(): + # TODO Documentation for year in list_years: in_path = os.path.join(input_path, input_name_ship.replace('', str(year))) nc_in = Dataset(in_path, mode='r') @@ -332,4 +398,3 @@ if __name__ == '__main__': do_transformation() do_flaring_transformation() do_ship_transformation() - diff --git a/preproc/edgarv432_ap_preproc.py b/preproc/edgarv432_ap_preproc.py index 70e9f9d..34f77af 100755 --- a/preproc/edgarv432_ap_preproc.py +++ b/preproc/edgarv432_ap_preproc.py @@ -18,11 +18,19 @@ # along with HERMESv3_GR. If not, see . +import os +from netCDF4 import Dataset +import numpy as np +from warnings import warn as warning + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/jrc/edgarv432_ap/original_files/' output_path = '/esarchive/recon/jrc/edgarv432_ap' list_pollutants = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] -#list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] +# list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, +# 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, +# 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] list_years = [2012] # To do yearly emissions @@ -42,18 +50,11 @@ This script also calculates the boundaries of teh cells and teh cell area. Carles Tena Medina (carles.tena@bsc.es) from Barcelona Supercomputing Center (BSC-CNS). """ -import os -import timeit -from netCDF4 import Dataset -import numpy as np - -from hermesv3_gr.tools.coordinates_tools import * -from warnings import warn as warning def ipcc_to_sector_dict(): - ipcc_sector_dict = \ - { + # TODO Documentation + ipcc_sector_dict = { "IPCC_1A1a": "ENE", "IPCC_1A1b_1A1c_1A5b1_1B1b_1B2a5_1B2a6_1B2b5_2C1b": "REF_TRF", "IPCC_1A2": "IND", @@ -84,8 +85,59 @@ def ipcc_to_sector_dict(): return ipcc_sector_dict +def create_bounds(coordinates, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coordinates: Coordinates in degrees (latitude or longitude) + :type coordinates: numpy.array + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.array + """ + interval = coordinates[1] - coordinates[0] + + coords_left = coordinates - interval / 2 + coords_right = coordinates + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') + + return bound_coords + + +def get_grid_area(filename): + """ + Calculates the area for each cell of the grid using CDO + + :param filename: Path to the file to calculate the cell area + :type filename: str + + :return: Area of each cell of the grid. + :rtype: numpy.array + """ + from cdo import Cdo + from netCDF4 import Dataset + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area + + def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, grid_cell_area, year, sector, month=None): + # TODO Documentation # Creating NetCDF & Dimensions print output_name_path nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") @@ -152,8 +204,8 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr crs.semi_major_axis = 6371000.0 crs.inverse_flattening = 0 - nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format(sector, data_atts[ - 'long_name']), ) + nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format( + sector, data_atts['long_name']), ) nc_output.setncattr('Conventions', 'CF-1.6', ) nc_output.setncattr('institution', 'JRC', ) nc_output.setncattr('source', 'EDGARv4.3.2_AP', ) @@ -172,10 +224,13 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr def do_yearly_transformation(year): - print year + # TODO Documentation for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): - file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + file_path = os.path.join( + input_path, + yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -185,6 +240,8 @@ def do_yearly_transformation(year): if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: in_pollutant = pollutant pollutant = 'PM2.5' + else: + in_pollutant = None data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] @@ -213,14 +270,19 @@ def do_yearly_transformation(year): data, data_attributes, lats, lons, grid_area, year, sector.lower()) else: - warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True def do_monthly_transformation(year): - print year + # TODO Documentation for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): - file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + file_path = os.path.join( + input_path, + yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -230,6 +292,8 @@ def do_monthly_transformation(year): if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: in_pollutant = pollutant pollutant = 'PM2.5' + else: + in_pollutant = None data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] @@ -259,19 +323,26 @@ def do_monthly_transformation(year): month_factors = nc_month_factors.variables[sector][:] for month in xrange(1, 12 + 1, 1): data_aux = data * month_factors[month - 1, :, :] - write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), year, str(month).zfill(2))), + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), year, + str(month).zfill(2))), data_aux, data_attributes, lats, lons, grid_area, year, sector.lower()) else: warning( - "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True def do_2010_monthly_transformation(): + # TODO Documentation for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): for month in xrange(1, 12 + 1, 1): - file_path = os.path.join(input_path, monthly_input_name.replace('', pollutant).replace('', str(month)).replace('', ipcc)) + file_path = os.path.join( + input_path, + monthly_input_name.replace('', pollutant).replace('', + str(month)).replace('', ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -281,6 +352,8 @@ def do_2010_monthly_transformation(): if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: in_pollutant = pollutant pollutant = 'PM2.5' + else: + in_pollutant = None data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] data = np.array(data) @@ -304,11 +377,14 @@ def do_2010_monthly_transformation(): out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), year, str(month).zfill(2))), - data, data_attributes, lats, lons, grid_area, year, sector.lower()) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), 2010, + str(month).zfill(2))), + data, data_attributes, lats, lons, grid_area, 2010, sector.lower()) else: - warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True if __name__ == '__main__': @@ -323,4 +399,3 @@ if __name__ == '__main__': do_2010_monthly_transformation() else: do_monthly_transformation(y) - diff --git a/preproc/edgarv432_voc_preproc.py b/preproc/edgarv432_voc_preproc.py index 484c397..a42d8aa 100755 --- a/preproc/edgarv432_voc_preproc.py +++ b/preproc/edgarv432_voc_preproc.py @@ -18,13 +18,21 @@ # along with HERMESv3_GR. If not, see . +import os +from netCDF4 import Dataset +import numpy as np +from warnings import warn as warning + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/jrc/edgarv432_voc/original_files/' output_path = '/esarchive/recon/jrc/edgarv432_voc' list_pollutants = ['voc1', 'voc2', 'voc3', 'voc4', 'voc5', 'voc6', 'voc7', 'voc8', 'voc9', 'voc10', 'voc11', 'voc12', 'voc13', 'voc14', 'voc15', 'voc16', 'voc17', 'voc18', 'voc19', 'voc20', 'voc21', 'voc22', 'voc23', 'voc24', 'voc25'] -#list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] +# list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, +# 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, +# 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] list_years = [2010] # To do yearly emissions @@ -44,18 +52,61 @@ This script also calculates the boundaries of teh cells and teh cell area. Carles Tena Medina (carles.tena@bsc.es) from Barcelona Supercomputing Center (BSC-CNS). """ -import os -import timeit -from netCDF4 import Dataset -import numpy as np -from hermesv3_gr.tools.coordinates_tools import * -from warnings import warn as warning + +def create_bounds(coordinates, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coordinates: Coordinates in degrees (latitude or longitude) + :type coordinates: numpy.ndarray + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.ndarray + """ + interval = coordinates[1] - coordinates[0] + + coords_left = coordinates - interval / 2 + coords_right = coordinates + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') + + return bound_coords + + +def get_grid_area(filename): + """ + Calculates the area for each cell of the grid using CDO + + :param filename: Path to the file to calculate the cell area + :type filename: str + + :return: Area of each cell of the grid. + :rtype: numpy.array + """ + from cdo import Cdo + from netCDF4 import Dataset + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area def ipcc_to_sector_dict(): - ipcc_sector_dict = \ - { + # TODO Documentation + ipcc_sector_dict = { "IPCC_4F": "AWB", "IPCC_1A1": "ENE", "IPCC_7A": "FFF", @@ -79,6 +130,7 @@ def ipcc_to_sector_dict(): def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, grid_cell_area, year, sector, month=None): + # TODO Documentation # Creating NetCDF & Dimensions print output_name_path nc_output = Dataset(output_name_path, mode='w', format="NETCDF4") @@ -145,8 +197,8 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr crs.semi_major_axis = 6371000.0 crs.inverse_flattening = 0 - nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format(sector, data_atts[ - 'long_name']), ) + nc_output.setncattr('title', 'EDGARv4.3.2_AP inventory for the sector {0} and pollutant {1}'.format( + sector, data_atts['long_name']), ) nc_output.setncattr('Conventions', 'CF-1.6', ) nc_output.setncattr('institution', 'JRC', ) nc_output.setncattr('source', 'EDGARv4.3.2_AP', ) @@ -164,12 +216,18 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr nc_output.close() + return True + def do_yearly_transformation(year): + # TODO Documentation print year for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): - file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + file_path = os.path.join( + input_path, + yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -203,14 +261,20 @@ def do_yearly_transformation(year): data, data_attributes, lats, lons, grid_area, year, sector.lower()) else: - warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True def do_monthly_transformation(year): + # TODO Documentation print year for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): - file_path = os.path.join(input_path, yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', ipcc)) + file_path = os.path.join( + input_path, + yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -246,19 +310,26 @@ def do_monthly_transformation(year): month_factors = nc_month_factors.variables[sector][:] for month in xrange(1, 12 + 1, 1): data_aux = data * month_factors[month - 1, :, :] - write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant_aux.lower(), year, str(month).zfill(2))), + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format( + pollutant_aux.lower(), year, str(month).zfill(2))), data_aux, data_attributes, lats, lons, grid_area, year, sector.lower()) else: warning( - "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + "The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True def do_2010_monthly_transformation(): + # TODO Documentation for pollutant in list_pollutants: for ipcc in ipcc_to_sector_dict().keys(): for month in xrange(1, 12 + 1, 1): - file_path = os.path.join(input_path, monthly_input_name.replace('', pollutant).replace('', str(month)).replace('', ipcc)) + file_path = os.path.join( + input_path, + monthly_input_name.replace('', pollutant).replace('', + str(month)).replace('', ipcc)) if os.path.exists(file_path): grid_area = get_grid_area(file_path) @@ -286,14 +357,18 @@ def do_2010_monthly_transformation(): 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) + out_path_aux = os.path.join( + output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant_aux.lower(), year, str(month).zfill(2))), - data, data_attributes, lats, lons, grid_area, year, sector.lower()) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format( + pollutant_aux.lower(), 2010, str(month).zfill(2))), + data, data_attributes, lats, lons, grid_area, 2010, sector.lower()) else: - warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format(pollutant, ipcc, file_path)) + warning("The pollutant {0} for the IPCC sector {1} does not exist.\n File not found: {2}".format( + pollutant, ipcc, file_path)) + return True if __name__ == '__main__': @@ -308,4 +383,3 @@ if __name__ == '__main__': do_2010_monthly_transformation() else: do_monthly_transformation(y) - diff --git a/preproc/emep_preproc.py b/preproc/emep_preproc.py index 8ba58d3..e4f526a 100644 --- a/preproc/emep_preproc.py +++ b/preproc/emep_preproc.py @@ -18,6 +18,11 @@ # along with HERMESv3_GR. If not, see . +import os +from warnings import warn as warning +from datetime import datetime + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/ceip/emepv18/original_files' output_path = '/esarchive/recon/ceip/emepv18/yearly_mean' @@ -28,13 +33,8 @@ list_pollutants = ['NOx', 'NMVOC', 'SOx', 'NH3', 'PM2_5', 'PM10', 'CO'] # ============================================================== -import sys -import os -from warnings import warn as warning -from datetime import datetime - - def correct_input_error(df): + # TODO Documentation df.loc[df['LATITUDE'] == 36.14, 'LATITUDE'] = 36.15 df.loc[df['LONGITUDE'] == 29.58, 'LONGITUDE'] = 29.55 @@ -42,15 +42,18 @@ def correct_input_error(df): def get_sectors(): + # TODO Documentation return ['A_PublicPower', 'B_Industry', 'C_OtherStationaryComb', 'D_Fugitive', 'E_Solvents', 'F_RoadTransport', 'G_Shipping', 'H_Aviation', 'I_Offroad', 'J_Waste', 'K_AgriLivestock', 'L_AgriOther'] def calculate_grid_definition(in_path): + # TODO Documentation import pandas as pd import numpy as np - df = pd.read_table(in_path, sep=';', skiprows=[0, 1, 2, 3], names=['ISO2', 'YEAR', 'SECTOR', 'POLLUTANT', 'LONGITUDE', 'LATITUDE', 'UNIT', 'EMISSION']) + df = pd.read_table(in_path, sep=';', skiprows=[0, 1, 2, 3], names=[ + 'ISO2', 'YEAR', 'SECTOR', 'POLLUTANT', 'LONGITUDE', 'LATITUDE', 'UNIT', 'EMISSION']) df = correct_input_error(df) # Longitudes @@ -72,6 +75,7 @@ def calculate_grid_definition(in_path): def do_transformation(year): + # TODO Documentation from hermesv3_gr.tools.netcdf_tools import write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds import pandas as pd @@ -81,7 +85,9 @@ def do_transformation(year): for pollutant in list_pollutants: for sector in get_sectors(): - in_file = os.path.join(input_path, input_name.replace('', str(year)).replace('', sector).replace('', pollutant)) + in_file = os.path.join( + input_path, + input_name.replace('', str(year)).replace('', sector).replace('', pollutant)) if os.path.exists(in_file): print in_file @@ -102,8 +108,9 @@ def do_transformation(year): 'data': np.zeros((len(c_lats), len(c_lons))) } - df = pd.read_table(in_file, sep=';', skiprows=[0,1,2,3], - names=['ISO2','YEAR','SECTOR','POLLUTANT','LONGITUDE','LATITUDE','UNIT','EMISSION']) + df = pd.read_table( + in_file, sep=';', skiprows=[0, 1, 2, 3], + names=['ISO2', 'YEAR', 'SECTOR', 'POLLUTANT', 'LONGITUDE', 'LATITUDE', 'UNIT', 'EMISSION']) df = correct_input_error(df) @@ -125,15 +132,19 @@ def do_transformation(year): boundary_latitudes=b_lats, boundary_longitudes=b_lons) cell_area = get_grid_area(complete_output_dir) element['data'] = element['data'] * unit_factor / cell_area - write_netcdf(complete_output_dir, c_lats, c_lons, [element], date=datetime(year, month=1, day=1), - boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, - global_attributes={ - 'references': 'web: web: http://www.ceip.at/ms/ceip_home1/ceip_home/webdab_emepdatabase/emissions_emepmodels/', - 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + - '(Barcelona Supercomputing Center)' - }) + write_netcdf( + complete_output_dir, c_lats, c_lons, [element], date=datetime(year, month=1, day=1), + boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, + global_attributes={ + 'references': "web: http://www.ceip.at/ms/ceip_home1/ceip_home/webdab_emepdatabase/" + + "emissions_emepmodels/", + 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + + '(Barcelona Supercomputing Center)' + }) else: - warning("The pollutant {0} for the GNFR14 sector {1} does not exist.\n File not found: {2}".format(pollutant, sector, in_file)) + warning("The pollutant {0} for the GNFR14 sector {1} does not exist.\n File not found: {2}".format( + pollutant, sector, in_file)) + return True if __name__ == '__main__': diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py index 301c65f..ef2de31 100755 --- a/preproc/gfas12_preproc.py +++ b/preproc/gfas12_preproc.py @@ -17,6 +17,12 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + +import os +from netCDF4 import Dataset +import cf_units +import pandas as pd +import datetime from datetime import datetime, timedelta # ============== CONFIGURATION PARAMETERS ====================== @@ -30,24 +36,60 @@ ending_date = datetime(year=2018, month=8, day=29) parameters_file = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/GFAS_Parameters.csv' # ============================================================== -import os -import sys -import timeit -import numpy as np -from netCDF4 import Dataset -import cf_units -import pandas as pd -import datetime -import cdo +def create_bounds(coords, number_vertices=2): + """ + Calculates the vertices coordinates. + + :param coords: Coordinates in degrees (latitude or longitude) + :type coords: numpy.ndarray + + :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. + (by default 2) + :type number_vertices: int + + :return: Array with as many elements as vertices for each value of coords. + :rtype: numpy.ndarray + """ + import numpy as np + + interval = coords[1] - coords[0] -from hermesv3_gr.tools.coordinates_tools import * + coords_left = coords - interval / 2 + coords_right = coords + interval / 2 + if number_vertices == 2: + bound_coords = np.dstack((coords_left, coords_right)) + elif number_vertices == 4: + bound_coords = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boudaries must be 2 or 4') -cdo = cdo.Cdo() + return bound_coords + + +def get_grid_area(filename): + """ + Calculates the area for each cell of the grid using CDO + + :param filename: Path to the file to calculate the cell area + :type filename: str + + :return: Area of each cell of the grid. + :rtype: numpy.array + """ + from cdo import Cdo + from netCDF4 import Dataset + + cdo = Cdo() + s = cdo.gridarea(input=filename) + nc_aux = Dataset(s, mode='r') + grid_area = nc_aux.variables['cell_area'][:] + nc_aux.close() + + return grid_area def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cell_area, date): - # TODO Documentation """ Write a NetCDF with the given information. @@ -57,10 +99,13 @@ def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cel :param data_list :param center_lats: Latitudes of the center of each cell. - :type center_lats: numpy.ndarray + :type center_lats: numpy.array :param center_lons: Longitudes of the center of each cell. - :type center_lons: numpy.ndarray + :type center_lons: numpy.array + + :param grid_cell_area: Area of each cell of the grid. + :type grid_cell_area: numpy.array :param date: Date of the current netCDF. :type date: datetime.datetime @@ -145,19 +190,27 @@ def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cel '(Barcelona Supercomputing Center)', ) nc_output.close() + return True def do_transformation(input_file, date, output_dir, variables_list): """ + Transform the original file into a NEtCDF file that follows the conventions. :param input_file: - :param output_file: - :param date: + :type input_file: str + + :param date: Date of the file to do the transformation. :type date: datetime.datetime - :param output_dir: - :param variables_list: - :return: + + :param output_dir: Path where have to be stored the output file. + :type output_dir: str + + :param variables_list: LIst of dictionaries with the information of each variable of the output files. + :type variables_list: list """ + from cdo import Cdo + cdo = Cdo() nc_temp = cdo.copy(input=input_file, options='-R -r -f nc4c -z zip_4') @@ -176,11 +229,22 @@ def do_transformation(input_file, date, output_dir, variables_list): out_path_aux = os.path.join(output_dir, 'daily_mean', 'multivar') if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - output_path = os.path.join(out_path_aux, 'ga_{0}.nc'.format(date.strftime('%Y%m%d'))) - write_netcdf(output_path, variables_list, lats, lons, cell_area, date) + out_path_aux = os.path.join(out_path_aux, 'ga_{0}.nc'.format(date.strftime('%Y%m%d'))) + write_netcdf(out_path_aux, variables_list, lats, lons, cell_area, date) + + return True def do_var_list(variables_file): + """ + Creates the List of dictionaries + + :param variables_file: CSV file with the information of each variable + :type variables_file: str + + :return: Dictionaries list with the information of each variable. + :rtype: list + """ df = pd.read_csv(variables_file, sep=';') list_aux = [] for i, element in df.iterrows(): diff --git a/preproc/htapv2_preproc.py b/preproc/htapv2_preproc.py index 7d0a28d..d178c71 100755 --- a/preproc/htapv2_preproc.py +++ b/preproc/htapv2_preproc.py @@ -17,6 +17,11 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + +import os +import sys + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/jrc/htapv2/original_files' output_path = '/esarchive/recon/jrc/htapv2' @@ -35,12 +40,8 @@ voc_ratio_path = '/esarchive/recon/jrc/htapv2/original_files/retro_nmvoc_ratio_2 voc_ratio_name = 'retro_nmvoc_ratio__2000_0.1deg.nc' voc_ratio_air_name = 'VOC_split_AIR.csv' voc_ratio_ships_name = 'VOC_split_SHIP.csv' - # ============================================================== -import os -import sys - def do_transformation_annual(filename, out_path, pollutant, sector, year): """ @@ -104,6 +105,7 @@ def do_transformation_annual(filename, out_path, pollutant, sector, year): write_netcdf(out_path, c_lats['data'], c_lons['data'], [data], boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), cell_area=get_grid_area(filename), global_attributes=global_attributes,) + return True def do_transformation(filename_list, out_path, pollutant, sector, year): @@ -169,9 +171,11 @@ def do_transformation(filename_list, out_path, pollutant, sector, year): write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data], boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), cell_area=get_grid_area(filename_list[month - 1]), global_attributes=global_attributes,) + return True def do_ratio_list(sector=None): + # TODO Documentation if sector == 'SHIPS': return {'all': os.path.join(voc_ratio_path, voc_ratio_ships_name)} elif sector == 'AIR_CDS': @@ -209,6 +213,7 @@ def do_ratio_list(sector=None): def do_nmvoc_month_transformation(filename_list, out_path, sector, year): + # TODO Docuemtnation from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds @@ -238,7 +243,6 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): nmvoc_ratio_list.pop('voc24', None) nmvoc_ratio_list.pop('voc25', None) - print type(nmvoc_ratio_list), nmvoc_ratio_list for month in xrange(1, 13): @@ -265,7 +269,10 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): 'source': 'HTAPv2', 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + '2017-04-28: ...', - 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and ' + + 'global emission grid maps for 2008 and 2010 to study hemispheric transport of air ' + + 'pollution, Atmos. Chem. Phys., 15, 11411-11432, ' + + 'https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)', @@ -278,8 +285,9 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): out_path_aux = os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant, year, str(month).zfill(2))) print out_path_aux write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data_aux], - boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), - global_attributes=global_attributes,) + boundary_latitudes=create_bounds(c_lats['data']), + boundary_longitudes=create_bounds(c_lons['data']),global_attributes=global_attributes,) + return True def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year): diff --git a/preproc/tno_mac_iii_preproc.py b/preproc/tno_mac_iii_preproc.py index 47fe12f..b9841db 100644 --- a/preproc/tno_mac_iii_preproc.py +++ b/preproc/tno_mac_iii_preproc.py @@ -17,6 +17,10 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + +import os + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/tno/tno_macc_iii/original_files/ascii' output_path = '/esarchive/recon/tno/tno_macc_iii/yearly_mean' @@ -28,9 +32,6 @@ vor_ratio_name = 'ratio_.nc' # ============================================================== -import sys -import os - def get_pollutants(in_path): """ @@ -49,6 +50,7 @@ def get_pollutants(in_path): def calculate_grid_definition(in_path): + # TODO Documentation import pandas as pd import numpy as np @@ -74,6 +76,7 @@ def calculate_grid_definition(in_path): def create_pollutant_empty_list(in_path, len_c_lats, len_c_lons): + # TODO Documentation import numpy as np pollutant_list = [] @@ -95,6 +98,7 @@ def create_pollutant_empty_list(in_path, len_c_lats, len_c_lons): def do_transformation(year): + # TODO Docuemtnation from hermesv3_gr.tools.netcdf_tools import write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds from datetime import datetime @@ -154,16 +158,45 @@ def do_transformation(year): write_netcdf(aux_output_path, c_lats, c_lons, [pollutant_list[i]], date=datetime(year, month=1, day=1), boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, global_attributes={ - 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. Denier van der Gon TNO-MACC_II emission inventory; a multi-year (2003–2009) consistent high-resolution European emission inventory for air quality modelling Atmospheric Chemistry and Physics 14 10963–10976 2014', + 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. ' + + 'Denier van der Gon TNO-MACC_II emission inventory; a multi-year ' + + '(2003-2009) consistent high-resolution European emission inventory ' + + 'for air quality modelling Atmospheric Chemistry and Physics 14 ' + + '10963-10976 2014', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)' } ) + return True + + +def extract_vars(netcdf_path, variables_list, attributes_list=list()): + # TODO Documentation + from netCDF4 import Dataset + data_list = [] + # print netcdf_path + netcdf = Dataset(netcdf_path, mode='r') + for var in variables_list: + if var == 'emi_nox_no2': + var1 = var + var2 = 'emi_nox' + else: + var1 = var2 = var + dict_aux = \ + { + 'name': var1, + 'data': netcdf.variables[var2][:], + } + for attribute in attributes_list: + dict_aux.update({attribute: netcdf.variables[var2].getncattr(attribute)}) + data_list.append(dict_aux) + netcdf.close() + + return data_list def get_voc_ratio(ratio_path, snap): - import numpy as np - from hermesv3_gr.tools.netcdf_tools import extract_vars + # TODO Documentation if snap == 'snap34': snap = 'snap3' try: @@ -174,14 +207,18 @@ def get_voc_ratio(ratio_path, snap): def get_voc_list(): - return ['voc{0}'.format(str(x).zfill(2)) for x in [1,2,3,4,5,6,7,8,9,12,13,14,15,16,17,18,19,20,21,22,23,24,25]] + # TODO Documentation + return ['voc{0}'.format(str(x).zfill(2)) for x in [1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25]] def get_sector_list(): + # TODO Documentation return ['snap{0}'.format(x) for x in [1, 2, 34, 5, 6, 71, 72, 73, 74, 8, 9]] def do_voc_transformation(year): + # TODO Docuemtnation from hermesv3_gr.tools.netcdf_tools import write_netcdf, extract_vars from hermesv3_gr.tools.coordinates_tools import create_bounds from warnings import warn as warning @@ -212,19 +249,23 @@ def do_voc_transformation(year): c_lons['data'], [new_voc], boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area['data'], global_attributes={ - 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. Denier van der Gon TNO-MACC_II emission inventory; a multi-year (2003–2009) consistent high-resolution European emission inventory for air quality modelling Atmospheric Chemistry and Physics 14 10963–10976 2014', + 'references': 'J. J. P. Kuenen, A. J. H. Visschedijk, M. Jozwicka, and H. A. C. ' + + 'Denier van der Gon TNO-MACC_II emission inventory; a multi-year ' + + '(2003-2009) consistent high-resolution European emission inventory ' + + 'for air quality modelling Atmospheric Chemistry and Physics 14 ' + + '10963-10976 2014', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)' }) else: - warning("The pollutant {0} for the sector {1} does not exist.\n SNAP not found: {2}".format(voc, snap, ratio_path)) + warning("The pollutant {0} for the sector {1} does not exist.\n SNAP not found: {2}".format( + voc, snap, ratio_path)) return True def check_vocs(year): - from hermesv3_gr.tools.netcdf_tools import extract_vars - + # TODO Documentation for snap in get_sector_list(): nmvoc_path = os.path.join(output_path, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) @@ -237,8 +278,9 @@ def check_vocs(year): [new_voc] = extract_vars(voc_path, [voc]) voc_sum += new_voc['data'].sum() - print '{0} NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format(snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum) - + print '{0} NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format( + snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum) + return True if __name__ == '__main__': diff --git a/preproc/tno_mac_iii_preproc_voc_ratios.py b/preproc/tno_mac_iii_preproc_voc_ratios.py index 4814278..f1800a3 100644 --- a/preproc/tno_mac_iii_preproc_voc_ratios.py +++ b/preproc/tno_mac_iii_preproc_voc_ratios.py @@ -17,6 +17,11 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + +import sys +import os + + # ============== CONFIGURATION PARAMETERS ====================== output_path = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' world_info_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/temporal/tz_world_country_iso3166.csv' @@ -25,11 +30,9 @@ csv_path = '/esarchive/recon/tno/tno_macc_iii/original_files/TNO_MACC_NMVOC prof # ============================================================== -import sys -import os - -def extract_vars(netcdf_path, variables_list, attributes_list=[]): +def extract_vars(netcdf_path, variables_list, attributes_list=list()): + # TODO Docuemtnation from netCDF4 import Dataset data_list = [] # print netcdf_path @@ -59,29 +62,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, RegularLatLon=False, Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ + # TODO Docuemtnation from cf_units import Unit, encode_time from netCDF4 import Dataset @@ -140,6 +121,10 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.createDimension('x', len(lcc_x)) lon_dim = ('y', 'x',) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if levels is not None: @@ -268,7 +253,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # print var[:].shape, variable['data'].shape try: var[:] = variable['data'] - except: + except Exception: print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping @@ -305,6 +290,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.setncatts(global_attributes) netcdf.close() + return True def get_grid_area(filename): @@ -318,6 +304,7 @@ def get_grid_area(filename): :rtype: numpy.ndarray """ from cdo import Cdo + from netCDF4 import Dataset cdo = Cdo() s = cdo.gridarea(input=filename) @@ -359,8 +346,8 @@ def create_bounds(coords, number_vertices=2): def create_voc_ratio(voc): + # TODO Docuemtnation import numpy as np - import pandas as pd country_values, lat, lon = extract_vars(tno_world_mask, ['timezone_id', 'lat', 'lon']) country_values = country_values['data'].reshape((country_values['data'].shape[1], country_values['data'].shape[1])) print output_path @@ -378,8 +365,7 @@ def create_voc_ratio(voc): for country_code, factor in get_country_code_and_factor(voc, snap).iteritems(): try: mask_factor[country_values == iso_codes[country_code]] = factor - except: - # print 'passed', country_code + except Exception: pass # To fulfill the blanks on the map mask_factor[mask_factor <= 0] = get_default_ratio(voc, snap) @@ -390,13 +376,13 @@ def create_voc_ratio(voc): 'data': mask_factor.reshape((1,) + mask_factor.shape) }) write_netcdf(complete_output_path, lat['data'], lon['data'], data_list) - # print '->', country_code, iso_codes[country_code], factor else: print 'Ratio file for {0} already created\npath: {1}'.format(voc, complete_output_path) return True def get_default_ratio(voc, snap): + # TODO Documentation import pandas as pd df = pd.read_csv(csv_path, sep=';') @@ -408,6 +394,7 @@ def get_default_ratio(voc, snap): def get_iso_codes(): + # TODO Documentation import pandas as pd # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) @@ -422,6 +409,7 @@ def get_iso_codes(): def get_voc_list(): + # TODO Documentation import pandas as pd df = pd.read_csv(csv_path, sep=';') @@ -434,6 +422,7 @@ def get_voc_list(): def get_sector_list(voc): + # TODO Documentation import pandas as pd voc = voc.replace('voc', 'v') df = pd.read_csv(csv_path, sep=';') @@ -444,8 +433,9 @@ def get_sector_list(voc): def get_sector_list_text(voc): + # TODO Documentation voc = voc.replace('voc', 'v') - sector_list = get_sector_list(csv_path, voc) + sector_list = get_sector_list(voc) new_list = [] for int_sector in sector_list: new_list.append('snap{0}'.format(int_sector)) @@ -453,6 +443,7 @@ def get_sector_list_text(voc): def get_country_code_and_factor(voc, snap): + # TODO Documentation import pandas as pd voc = voc.replace('voc', 'v') df = pd.read_csv(csv_path, sep=';') @@ -469,5 +460,5 @@ def get_country_code_and_factor(voc, snap): if __name__ == '__main__': - for voc in get_voc_list(): - create_voc_ratio(voc) + for voc_name in get_voc_list(): + create_voc_ratio(voc_name) diff --git a/preproc/wiedinmyer_preproc.py b/preproc/wiedinmyer_preproc.py index f0616d0..069ca06 100755 --- a/preproc/wiedinmyer_preproc.py +++ b/preproc/wiedinmyer_preproc.py @@ -17,6 +17,12 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + +import os +import timeit +from netCDF4 import Dataset + + # ============== CONFIGURATION PARAMETERS ====================== input_path = '/esarchive/recon/ucar/wiedinmyer/original_files/' output_path = '/esarchive/recon/ucar/wiedinmyer/' @@ -27,16 +33,9 @@ input_name = 'ALL_Emiss_04282014.nc' year = 2010 # ============================================================== -import sys -import os -import timeit -from netCDF4 import Dataset -import cf_units - -# from hermesv3_gr.tools.coordinates_tools import * - def out_pollutant_to_in_pollutant(out_p): + # TODO Documentation pollutant_dict = { 'co2': 'CO2grid', 'co': 'COgrid', @@ -120,7 +119,7 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr :type output_name_path: str :param data: Data of the variable to be stored. - :type data: numpy.ndarray + :type data: numpy.array :param data_atts: Information of the data to fill the data attributes of the NetCDF variable. 'long_name': Name of the pollutant. @@ -130,19 +129,19 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr :type data_atts: dict :param center_lats: Latitudes of the center of each cell. - :type center_lats: numpy.ndarray + :type center_lats: numpy.array :param center_lons: Longitudes of the center of each cell. - :type center_lons: numpy.ndarray + :type center_lons: numpy.array + + :param grid_cell_area: Area of each cell of the grid. + :type: numpy.array :param time_year: Year. :type time_year: int :param time_month: Number of the month. :type time_month: int - - :param sector: Sector name of the pollutant. - :type sector: str """ from hermesv3_gr.tools.coordinates_tools import create_bounds -- GitLab From 4cff5b68c1d451198a9882eacc56fc8777d8aedf Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 11:19:10 +0200 Subject: [PATCH 05/51] Cleaned config.py --- hermesv3_gr/config/config.py | 168 +++++++++++++++++++++-------------- setup.py | 1 + 2 files changed, 102 insertions(+), 67 deletions(-) diff --git a/hermesv3_gr/config/config.py b/hermesv3_gr/config/config.py index dae6c2a..c2b1ad2 100644 --- a/hermesv3_gr/config/config.py +++ b/hermesv3_gr/config/config.py @@ -19,9 +19,6 @@ from configargparse import ArgParser -from hermesv3_gr.config import settings -import os -import sys class Config(ArgParser): @@ -43,77 +40,114 @@ class Config(ArgParser): """ # p = ArgParser(default_config_files=['/home/Earth/mguevara/HERMES/HERMESv3/IN/conf/hermes.conf']) p = ArgParser() - p.add('-c', '--my-config', required=False, is_config_file=True, help='Path to the configuration file.') + p.add_argument('-c', '--my-config', required=False, is_config_file=True, help='Path to the configuration file.') # TODO Detallar mas que significan 1, 2 y 3 los log_level - p.add('--log_level', required=True, help='Level of detail of the running process information.', - type=int, choices=[1, 2, 3]) - # p.add('--do_fix_part', required=False, help='Indicates if is needed to do the fix part', - # type=str, default='True') - p.add('--input_dir', required=True, help='Path to the input directory of the model.') - p.add('--data_path', required=True, help='Path to the data necessary for the model.') - p.add('--output_dir', required=True, help='Path to the output directory of the model.') - p.add('--output_name', required=True, help="Name of the output file. You can add the string '' that " + - "will be substitute by the starting date of the simulation day.") - p.add('--start_date', required=True, help='Starting Date to simulate (UTC)') - p.add('--end_date', required=False, help='If you want to simulate more than one day you have to specify the ' + - 'ending date of simulation in this parameter. ' + - 'If it is not set end_date = start_date.') - - p.add('--output_timestep_type', required=True, help='Type of timestep.', - type=str, choices=['hourly', 'daily', 'monthly', 'yearly']) - p.add('--output_timestep_num', required=True, help='Number of timesteps to simulate.', type=int) - p.add('--output_timestep_freq', required=True, help='Frequency between timesteps.', type=int) - - p.add('--output_model', required=True, help='Name of the output model.', choices=['MONARCH', 'CMAQ', 'WRF_CHEM']) - p.add('--output_attributes', required=False, help='Path to the file that contains the global attributes.') - - p.add('--domain_type', required=True, help='Type of domain to simulate.', - choices=['global', 'lcc', 'rotated', 'mercator']) - p.add('--auxiliar_files_path', required=True, help='Path to the directory where the necessary auxiliary ' + - 'files will be created if them are not created yet.') - - p.add('--vertical_description', required=True, help='Path to the file that contains the vertical description ' + - 'of the desired output.') + p.add_argument('--log_level', required=True, help='Level of detail of the running process information.', + type=int, choices=[1, 2, 3]) + + p.add_argument('--input_dir', required=True, help='Path to the input directory of the model.') + p.add_argument('--data_path', required=True, help='Path to the data necessary for the model.') + p.add_argument('--output_dir', required=True, help='Path to the output directory of the model.') + p.add_argument('--output_name', required=True, + help="Name of the output file. You can add the string '' that will be substitute by the " + + "starting date of the simulation day.") + p.add_argument('--start_date', required=True, help='Starting Date to simulate (UTC)') + p.add_argument('--end_date', required=False, + help='If you want to simulate more than one day you have to specify the ending date of ' + + 'simulation in this parameter. If it is not set end_date = start_date.') + + p.add_argument('--output_timestep_type', required=True, help='Type of timestep.', + type=str, choices=['hourly', 'daily', 'monthly', 'yearly']) + p.add_argument('--output_timestep_num', required=True, help='Number of timesteps to simulate.', type=int) + p.add_argument('--output_timestep_freq', required=True, help='Frequency between timesteps.', type=int) + + p.add_argument('--output_model', required=True, help='Name of the output model.', + choices=['MONARCH', 'CMAQ', 'WRF_CHEM']) + p.add_argument('--output_attributes', required=False, + help='Path to the file that contains the global attributes.') + + p.add_argument('--domain_type', required=True, help='Type of domain to simulate.', + choices=['global', 'lcc', 'rotated', 'mercator']) + p.add_argument('--auxiliar_files_path', required=True, + help='Path to the directory where the necessary auxiliary files will be created if them are ' + + 'not created yet.') + + p.add_argument('--vertical_description', required=True, + help='Path to the file that contains the vertical description of the desired output.') # Global options - p.add('--inc_lat', required=False, help='Latitude resolution for a global domain.', type=float) - p.add('--inc_lon', required=False, help='Longitude resolution for a global domain.', type=float) + p.add_argument('--inc_lat', required=False, help='Latitude resolution for a global domain.', type=float) + p.add_argument('--inc_lon', required=False, help='Longitude resolution for a global domain.', type=float) # Rotated options - p.add('--centre_lat', required=False, help='Central geographic latitude of grid (non-rotated degrees). Corresponds to the TPH0D parameter in NMMB-MONARCH.', type=float) - p.add('--centre_lon', required=False, help='Central geographic longitude of grid (non-rotated degrees, positive east). Corresponds to the TLM0D parameter in NMMB-MONARCH.', type=float) - p.add('--west_boundary', required=False, help="Grid's western boundary from center point (rotated degrees). Corresponds to the WBD parameter in NMMB-MONARCH.", type=float) - p.add('--south_boundary', required=False, help="Grid's southern boundary from center point (rotated degrees). Corresponds to the SBD parameter in NMMB-MONARCH.", type=float) - p.add('--inc_rlat', required=False, help='Latitudinal grid resolution (rotated degrees). Corresponds to the DPHD parameter in NMMB-MONARCH.', type=float) - p.add('--inc_rlon', required=False, help='Longitudinal grid resolution (rotated degrees). Corresponds to the DLMD parameter in NMMB-MONARCH.', type=float) + p.add_argument('--centre_lat', required=False, + help='Central geographic latitude of grid (non-rotated degrees). Corresponds to the TPH0D ' + + 'parameter in NMMB-MONARCH.', type=float) + p.add_argument('--centre_lon', required=False, + help='Central geographic longitude of grid (non-rotated degrees, positive east). Corresponds ' + + 'to the TLM0D parameter in NMMB-MONARCH.', type=float) + p.add_argument('--west_boundary', required=False, + help="Grid's western boundary from center point (rotated degrees). Corresponds to the WBD " + + "parameter in NMMB-MONARCH.", type=float) + p.add_argument('--south_boundary', required=False, + help="Grid's southern boundary from center point (rotated degrees). Corresponds to the SBD " + + "parameter in NMMB-MONARCH.", type=float) + p.add_argument('--inc_rlat', required=False, + help='Latitudinal grid resolution (rotated degrees). Corresponds to the DPHD parameter in ' + + 'NMMB-MONARCH.', type=float) + p.add_argument('--inc_rlon', required=False, + help='Longitudinal grid resolution (rotated degrees). Corresponds to the DLMD parameter ' + + 'in NMMB-MONARCH.', type=float) # Lambert conformal conic options - p.add('--lat_1', required=False, help='Standard parallel 1 (in deg). Corresponds to the P_ALP parameter of the GRIDDESC file.', type=float) - p.add('--lat_2', required=False, help='Standard parallel 2 (in deg). Corresponds to the P_BET parameter of the GRIDDESC file.', type=float) - p.add('--lon_0', required=False, help='Longitude of the central meridian (degrees). Corresponds to the P_GAM parameter of the GRIDDESC file.', type=float) - p.add('--lat_0', required=False, help='Latitude of the origin of the projection (degrees). Corresponds to the Y_CENT parameter of the GRIDDESC file.', type=float) - p.add('--nx', required=False, help='Number of grid columns. Corresponds to the NCOLS parameter of the GRIDDESC file.', type=float) - p.add('--ny', required=False, help='Number of grid rows. Corresponds to the NROWS parameter of the GRIDDESC file.', type=float) - p.add('--inc_x', required=False, help='X-coordinate cell dimension (meters). Corresponds to the XCELL parameter of the GRIDDESC file.', type=float) - p.add('--inc_y', required=False, help='Y-coordinate cell dimension (meters). Corresponds to the YCELL parameter of the GRIDDESC file.', type=float) - p.add('--x_0', required=False, help='X-coordinate origin of grid (meters). Corresponds to the XORIG parameter of the GRIDDESC file.', type=float) - p.add('--y_0', required=False, help='Y-coordinate origin of grid (meters). Corresponds to the YORIG parameter of the GRIDDESC file.', type=float) + p.add_argument('--lat_1', required=False, + help='Standard parallel 1 (in deg). Corresponds to the P_ALP parameter of the GRIDDESC file.', + type=float) + p.add_argument('--lat_2', required=False, + help='Standard parallel 2 (in deg). Corresponds to the P_BET parameter of the GRIDDESC file.', + type=float) + p.add_argument('--lon_0', required=False, + help='Longitude of the central meridian (degrees). Corresponds to the P_GAM parameter of ' + + 'the GRIDDESC file.', type=float) + p.add_argument('--lat_0', required=False, + help='Latitude of the origin of the projection (degrees). Corresponds to the Y_CENT ' + + 'parameter of the GRIDDESC file.', type=float) + p.add_argument('--nx', required=False, + help='Number of grid columns. Corresponds to the NCOLS parameter of the GRIDDESC file.', + type=float) + p.add_argument('--ny', required=False, + help='Number of grid rows. Corresponds to the NROWS parameter of the GRIDDESC file.', + type=float) + p.add_argument('--inc_x', required=False, + help='X-coordinate cell dimension (meters). Corresponds to the XCELL parameter of the ' + + 'GRIDDESC file.', type=float) + p.add_argument('--inc_y', required=False, + help='Y-coordinate cell dimension (meters). Corresponds to the YCELL parameter of the ' + + 'GRIDDESC file.', type=float) + p.add_argument('--x_0', required=False, + help='X-coordinate origin of grid (meters). Corresponds to the XORIG parameter of the ' + + 'GRIDDESC file.', type=float) + p.add_argument('--y_0', required=False, + help='Y-coordinate origin of grid (meters). Corresponds to the YORIG parameter of the ' + + 'GRIDDESC file.', type=float) # Mercator - p.add('--lat_ts', required=False, help='...', type=float) - - p.add('--cross_table', required=True, help='Path to the file that contains the information ' + - 'of the datasets to use.') - p.add('--p_vertical', required=True, help='Path to the file that contains all the needed vertical profiles.') - p.add('--p_month', required=True, help='Path to the file that contains all the needed monthly profiles.') - p.add('--p_day', required=True, help='Path to the file that contains all the needed daily profiles.') - p.add('--p_hour', required=True, help='Path to the file that contains all the needed hourly profiles.') - p.add('--p_speciation', required=True, help='Path to the file that contains all the needed ' + - 'speciation profiles.') - p.add('--molecular_weights', required=True, help='Path to the file that contains the molecular weights ' + - 'of the input pollutants.') - p.add('--world_info', required=True, help='Path to the file that contains the world information ' + - 'like timezones, ISO codes, ...') + p.add_argument('--lat_ts', required=False, help='...', type=float) + + p.add_argument('--cross_table', required=True, + help='Path to the file that contains the information of the datasets to use.') + p.add_argument('--p_vertical', required=True, + help='Path to the file that contains all the needed vertical profiles.') + p.add_argument('--p_month', required=True, + help='Path to the file that contains all the needed monthly profiles.') + p.add_argument('--p_day', required=True, help='Path to the file that contains all the needed daily profiles.') + p.add_argument('--p_hour', required=True, help='Path to the file that contains all the needed hourly profiles.') + p.add_argument('--p_speciation', required=True, + help='Path to the file that contains all the needed speciation profiles.') + p.add_argument('--molecular_weights', required=True, + help='Path to the file that contains the molecular weights of the input pollutants.') + p.add_argument('--world_info', required=True, + help='Path to the file that contains the world information like timezones, ISO codes, ...') options = p.parse_args() for item in vars(options): @@ -132,8 +166,8 @@ class Config(ArgParser): exec("options.{0} = options.{0}.replace('', '{1}_{2}')".format( item, options.inc_x, options.inc_y)) - options.start_date = self._parse_start_date(options.start_date) - options.end_date = self._parse_end_date(options.end_date, options.start_date) + options.start_date = self._parse_start_date(str(options.start_date)) + options.end_date = self._parse_end_date(str(options.end_date), options.start_date) self.create_dir(options.output_dir) self.create_dir(options.auxiliar_files_path) diff --git a/setup.py b/setup.py index 8c1491b..91aeac8 100644 --- a/setup.py +++ b/setup.py @@ -62,6 +62,7 @@ setup( # 're', 'timezonefinder', # 'unittest' + 'mpi4py' ], packages=find_packages(), classifiers=[ -- GitLab From 72d620b7759ce463edda8fa08084dafbd55687db Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 11:21:53 +0200 Subject: [PATCH 06/51] medium clean settings.py --- hermesv3_gr/config/settings.py | 41 +++++----------------------------- 1 file changed, 6 insertions(+), 35 deletions(-) diff --git a/hermesv3_gr/config/settings.py b/hermesv3_gr/config/settings.py index 789a24d..d16ac27 100644 --- a/hermesv3_gr/config/settings.py +++ b/hermesv3_gr/config/settings.py @@ -19,12 +19,10 @@ import os -import sys import numpy as np global refresh_log - global precision precision = np.float64 @@ -48,16 +46,7 @@ global df_times def define_global_vars(in_log_level): - # TODO definir millor que significa cada log level - """ - Defines the global values of log_level. - - :param in_log_level: Level of detail of the information given on STDOUT [1, 2, 3]. - :type in_log_level: int - - :return: global values of Log Level. - :rtype: bool - """ + # TODO Documentation from mpi4py import MPI global icomm @@ -73,31 +62,9 @@ def define_global_vars(in_log_level): global log_level log_level = in_log_level - # global log_level_1 - # global log_level_2 - # global log_level_3 - # - # if log_level is 1 and rank == 0: - # log_level_1 = True - # log_level_2 = False - # log_level_3 = False - # elif log_level is 2 and rank == 0: - # log_level_1 = True - # log_level_2 = True - # log_level_3 = False - # elif log_level is 3 and rank == 0: - # log_level_1 = True - # log_level_2 = True - # log_level_3 = True - # else: - # log_level_1 = False - # log_level_2 = False - # log_level_3 = False - # - # return log_level_1, log_level_2, log_level_3 - def define_log_file(log_path, date): + # TODO Documentation log_path = os.path.join(log_path, 'logs') if not os.path.exists(log_path): os.makedirs(log_path) @@ -113,6 +80,7 @@ def define_log_file(log_path, date): def define_times_file(): + # TODO Documentation import pandas as pd global df_times @@ -120,18 +88,21 @@ def define_times_file(): def write_log(msg, level=1): + # TODO Documentation if log_level >= level: log_file.write(msg + '\n') log_file.flush() def write_time(module, func, time, level=1): + # TODO Documentation global df_times if log_level >= level: df_times = df_times.append({'Class': module, 'Function': func, rank: time}, ignore_index=True) def finish_logs(output_dir, date): + # TODO Documentation import pandas as pd from functools import reduce log_file.close() -- GitLab From fb706a87ce2101940cc5f6c84eaa783754935cf9 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 11:52:57 +0200 Subject: [PATCH 07/51] cleaning files --- hermesv3_gr/hermes.py | 12 +- .../emision_inventories/emission_inventory.py | 199 +- .../gfas_emission_inventory.py | 149 +- .../point_source_emission_inventory.py | 31 +- hermesv3_gr/modules/grids/grid.py | 56 +- hermesv3_gr/modules/grids/grid_global.py | 21 +- hermesv3_gr/modules/grids/grid_lcc.py | 30 +- hermesv3_gr/modules/grids/grid_mercator.py | 32 +- hermesv3_gr/modules/grids/grid_rotated.py | 41 +- hermesv3_gr/modules/masking/masking.py | 38 +- hermesv3_gr/modules/regrid/regrid.py | 26 +- .../modules/regrid/regrid_conservative.py | 22 +- hermesv3_gr/modules/speciation/speciation.py | 26 +- hermesv3_gr/modules/temporal/temporal.py | 78 +- hermesv3_gr/modules/vertical/vertical.py | 30 +- hermesv3_gr/modules/vertical/vertical_gfas.py | 26 +- hermesv3_gr/modules/writing/writer.py | 273 +- hermesv3_gr/modules/writing/writer_cmaq.py | 18 +- hermesv3_gr/modules/writing/writer_monarch.py | 20 +- .../modules/writing/writer_wrf_chem.py | 18 +- hermesv3_gr/tools/netcdf_tools.py | 3278 +---------------- 21 files changed, 596 insertions(+), 3828 deletions(-) diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index 2ebd719..9ab582e 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -22,7 +22,7 @@ import sys import os from mpi4py import MPI -from timeit import default_timer as gettime +import timeit from hermesv3_gr.config import settings from hermesv3_gr.config.config import Config @@ -45,7 +45,7 @@ class Hermes(object): from hermesv3_gr.modules.temporal.temporal import TemporalDistribution from hermesv3_gr.modules.writing.writer import Writer global full_time - st_time = full_time = gettime() + st_time = full_time = timeit.default_timer() self.config = config self.options = config.options @@ -88,7 +88,7 @@ class Hermes(object): parallel=not settings.writing_serial) settings.write_log('End of HERMESv3 initialization.') - settings.write_time('HERMES', 'Init', gettime() - st_time, level=1) + settings.write_time('HERMES', 'Init', timeit.default_timer() - st_time, level=1) # @profile def main(self): @@ -103,7 +103,7 @@ class Hermes(object): from datetime import timedelta from cf_units import Unit - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('') settings.write_log('***** Starting HERMESv3 *****') num = 1 @@ -137,8 +137,8 @@ class Hermes(object): self.writer.write(self.emission_list) settings.write_log("***** HERMESv3 simulation finished successfully *****") - settings.write_time('HERMES', 'main', gettime() - st_time) - settings.write_time('HERMES', 'TOTAL', gettime() - full_time) + settings.write_time('HERMES', 'main', timeit.default_timer() - st_time) + settings.write_time('HERMES', 'TOTAL', timeit.default_timer() - full_time) settings.finish_logs(self.options.output_dir, self.options.start_date) if self.options.start_date < self.options.end_date: diff --git a/hermesv3_gr/modules/emision_inventories/emission_inventory.py b/hermesv3_gr/modules/emision_inventories/emission_inventory.py index 2f90120..3259949 100644 --- a/hermesv3_gr/modules/emision_inventories/emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/emission_inventory.py @@ -19,7 +19,7 @@ import os import sys -from timeit import default_timer as get_time +import timeit import hermesv3_gr.config.settings as settings from hermesv3_gr.modules.regrid.regrid_conservative import ConservativeRegrid @@ -79,7 +79,7 @@ class EmissionInventory(object): p_vertical=None, p_month=None, p_day=None, p_hour=None, p_speciation=None): from hermesv3_gr.modules.masking.masking import Masking - st_time = get_time() + st_time = timeit.default_timer() settings.write_log('\t\tCreating area source emission inventory.', level=3) # Emission Inventory parameters @@ -141,7 +141,7 @@ class EmissionInventory(object): settings.write_log('\t\tNone temporal profile set.', level=2) self.temporal_factors = None - # Creating Speciation Object + # Creating Speciation Object if p_speciation is not None: self.speciation = Speciation(p_speciation, options.p_speciation, options.molecular_weights) else: @@ -152,11 +152,19 @@ class EmissionInventory(object): self.emissions = [] - settings.write_time('EmissionInventory', 'Init', get_time() - st_time, level=3) - - return None + settings.write_time('EmissionInventory', 'Init', timeit.default_timer() - st_time, level=3) def create_pollutants_dicts(self, pollutants): + """ + Creates a list of dictionaries with the information of the name, paht and Dataset of each pollutant + + :param pollutants: List of pollutants names + :type pollutants: list + + :return: List of dictionaries + :rtype: list + """ + pollutant_list = [] for pollutant_name in pollutants: @@ -192,8 +200,6 @@ class EmissionInventory(object): :param pollutant: Name of the pollutant. :type pollutant: str - :param extension: Extension of the input file - :return: Full path of the needed file. :rtype: str """ @@ -244,14 +250,14 @@ class EmissionInventory(object): def do_regrid(self): - st_time = get_time() + st_time = timeit.default_timer() settings.write_log("\tRegridding", level=2) regridded_emissions = self.regrid.start_regridding() for emission in regridded_emissions: dict_aux = {'name': emission['name'], 'data': emission['data'], 'units': 'm'} self.emissions.append(dict_aux) - settings.write_time('EmissionInventory', 'do_regrid', get_time() - st_time, level=2) + settings.write_time('EmissionInventory', 'do_regrid', timeit.default_timer() - st_time, level=2) @staticmethod def make_emission_list(options, grid, vertical_output_profile, date): @@ -260,6 +266,12 @@ class EmissionInventory(object): :param options: Full list of parameters given by passing argument or in the configuration file. :type options: Namespace + + :param grid: Grid to use. + :type grid: Grid + + :param vertical_output_profile: Path to eht file that contains the vertical profile. + :type vertical_output_profile: str :param date: Date to the day to simulate. :type date: datetime.datetime @@ -272,7 +284,7 @@ class EmissionInventory(object): from gfas_emission_inventory import GfasEmissionInventory from point_source_emission_inventory import PointSourceEmissionInventory - st_time = get_time() + st_time = timeit.default_timer() settings.write_log('Loading emissions') path = options.cross_table @@ -349,174 +361,15 @@ class EmissionInventory(object): else: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: - raise ValueError("ERROR: The emission inventory source type '{0}'".format(emission_inventory.source_type) + + raise ValueError("ERROR: The emission inventory source type '{0}'".format( + emission_inventory.source_type) + " is not implemented. Use 'area' or 'point'") sys.exit(1) settings.write_log('', level=2) - settings.write_time('EmissionInventory', 'make_emission_list', get_time() - st_time, level=3) + settings.write_time('EmissionInventory', 'make_emission_list', timeit.default_timer() - st_time, level=3) return emission_inventory_list - # @staticmethod - # def create_output_shape(vertical_description_path, timestep_num, grid): - # """ - # Create a shape with the output format - # - # :param vertical_description_path: path to the file that contain the vertical description of the required output - # file. - # :type vertical_description_path: str - # - # :param timestep_num: Number of time steps. - # :type timestep_num: int - # - # :param grid: 3D Grid (time, latitude, longitude) of the output file. - # :type grid: Grid - # - # :return: Shape of the output file (4D: time, level, latitude, longitude). - # :rtype: tuple - # """ - # from hermesv3_gr.modules.vertical.vertical import VerticalDistribution - # - # if settings.log_level_3: - # st_time = get_time() - # else: - # st_time = None - # if vertical_description_path is 1: - # levels = [0] - # else: - # levels = VerticalDistribution.get_vertical_output_profile(vertical_description_path) - # if grid.grid_type == 'lcc': - # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) - # elif grid.grid_type == 'rotated': - # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) - # else: - # shape = (timestep_num, len(levels), len(grid.center_latitudes), len(grid.center_longitudes)) - # - # if settings.log_level_3: - # print "TIME -> EmissionInventory.create_output_shape: {0} s".format(round(get_time() - st_time, 2)) - # - # return shape - # - # @staticmethod - # def create_output_shape_parallel(vertical_description_path, timestep_num, grid): - # """ - # Create a shape with the output format - # - # :param vertical_description_path: path to the file that contain the vertical description of the required output - # file. - # :type vertical_description_path: str - # - # :param timestep_num: Number of time steps. - # :type timestep_num: int - # - # :param grid: 3D Grid (time, latitude, longitude) of the output file. - # :type grid: Grid - # - # :return: Shape of the output file (4D: time, level, latitude, longitude). - # :rtype: tuple - # """ - # from hermesv3_gr.modules.vertical.vertical import VerticalDistribution - # - # if settings.log_level_3: - # st_time = get_time() - # else: - # st_time = None - # if vertical_description_path is 1: - # levels = [0] - # else: - # levels = VerticalDistribution.get_vertical_output_profile(vertical_description_path) - # if grid.grid_type == 'lcc': - # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) - # elif grid.grid_type == 'rotated': - # shape = (timestep_num, len(levels), grid.center_latitudes.shape[0], grid.center_longitudes.shape[1]) - # else: - # shape = (timestep_num, len(levels), grid.x_upper_bound - grid.x_lower_bound, grid.y_upper_bound - grid.y_lower_bound) - # - # if settings.log_level_3: - # print "TIME -> EmissionInventory.create_output_shape: {0} s".format(round(get_time() - st_time, 2)) - # - # return shape - - @staticmethod - def create_aux_output_emission_list(speciation_profile_path): - """ - Creates the list of output pollutants - - :param speciation_profile_path: Path to the file that contains all the speciation profiles. - :type speciation_profile_path: str - - :param shape: Shape of the output. - :type shape: tuple - - :return: Empty list of the output pollutants. - """ - # print 'OUT_PROFILE_SHAPE', shape - import pandas as pd - import numpy as np - - if settings.log_level_3: - st_time = get_time() - else: - st_time = None - - output_emission_list = [] - - df_speciation = pd.read_csv(speciation_profile_path, sep=';', nrows=2) - del df_speciation['ID'] - for column in df_speciation: - output_emission_list.append({ - 'name': column, - 'units': df_speciation[column][0], - 'long_name': df_speciation[column][1], - }) - - if settings.log_level_3: - print "TIME -> EmissionInventory.create_aux_output_emission_list: {0} s".format( - round(get_time() - st_time, 2)) - - return output_emission_list - - @staticmethod - def create_aux_output_emission_list_full(speciation_profile_path, shape): - """ - Creates the list of output pollutants - - :param speciation_profile_path: Path to the file that contains all the speciation profiles. - :type speciation_profile_path: str - - :param shape: Shape of the output. - :type shape: tuple - - :return: Empty list of the output pollutants. - """ - # print 'OUT_PROFILE_SHAPE', shape - import pandas as pd - import numpy as np - - if settings.log_level_3: - st_time = get_time() - else: - st_time = None - - output_emission_list = [] - - df_speciation = pd.read_csv(speciation_profile_path, sep=';', nrows=2) - del df_speciation['ID'] - for column in df_speciation: - output_emission_list.append({ - 'name': column, - 'units': df_speciation[column][0], - 'long_name': df_speciation[column][1], - 'data': np.zeros(shape) - # 'data': np.empty(shape) - }) - - if settings.log_level_3: - print "TIME -> EmissionInventory.create_aux_output_emission_list: {0} s".format( - round(get_time() - st_time, 2)) - - return output_emission_list - if __name__ == "__main__": pass diff --git a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py index 63243b1..e53c8da 100755 --- a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py @@ -18,8 +18,7 @@ # along with HERMESv3_GR. If not, see . import os -import sys -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings from emission_inventory import EmissionInventory @@ -75,25 +74,23 @@ class GfasEmissionInventory(EmissionInventory): p_hour=None, p_speciation=None): from hermesv3_gr.modules.vertical.vertical_gfas import GfasVerticalDistribution - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating GFAS emission inventory.', level=3) super(GfasEmissionInventory, self).__init__( options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, frequency, vertical_output_profile, - reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, p_vertical=None, + reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, p_vertical='', p_month=p_month, p_day=p_day, p_hour=p_hour, p_speciation=p_speciation) self.approach = self.get_approach(p_vertical) self.method = self.get_method(p_vertical) - # self.altitude = self.get_altitude() + self.altitude = self.get_altitude() self.vertical = GfasVerticalDistribution(vertical_output_profile, self.approach, self.get_altitude()) - settings.write_time('GFAS_EmissionInventory', 'Init', gettime() - st_time, level=3) - - return None + settings.write_time('GFAS_EmissionInventory', 'Init', timeit.default_timer() - st_time, level=3) def get_input_path(self, pollutant=None, extension='nc'): """ @@ -102,15 +99,18 @@ class GfasEmissionInventory(EmissionInventory): :param pollutant: Name of the pollutant of the NetCDF. :type pollutant: str + :param extension: Extension of the input file. + :type: str + :return: Full path of the needed NetCDF. :rtype: str """ - st_time = gettime() + st_time = timeit.default_timer() netcdf_path = os.path.join(self.inputs_path, 'multivar', 'ga_{0}.{1}'.format( self.date.strftime('%Y%m%d'), extension)) - settings.write_time('GfasEmissionInventory', 'get_input_path', gettime() - st_time, level=3) + settings.write_time('GfasEmissionInventory', 'get_input_path', timeit.default_timer() - st_time, level=3) return netcdf_path @@ -123,7 +123,7 @@ class GfasEmissionInventory(EmissionInventory): """ from hermesv3_gr.tools.netcdf_tools import extract_vars - st_time = gettime() + st_time = timeit.default_timer() if self.method == 'sovief': alt_var = 'apt' @@ -138,10 +138,11 @@ class GfasEmissionInventory(EmissionInventory): alt = alt['data'] - settings.write_time('GfasEmissionInventory', 'get_altitude', gettime() - st_time, level=3) + settings.write_time('GfasEmissionInventory', 'get_altitude', timeit.default_timer() - st_time, level=3) return alt - def get_approach(self, p_vertical): + @ staticmethod + def get_approach(p_vertical): """ Extracts the given approach value. @@ -150,7 +151,7 @@ class GfasEmissionInventory(EmissionInventory): """ import re - st_time = gettime() + st_time = timeit.default_timer() return_value = None aux_list = re.split(', |,| , | ,', p_vertical) @@ -159,11 +160,12 @@ class GfasEmissionInventory(EmissionInventory): if aux_value[0] == 'approach': return_value = aux_value[1] - settings.write_time('GfasEmissionInventory', 'get_approach', gettime() - st_time, level=3) + settings.write_time('GfasEmissionInventory', 'get_approach', timeit.default_timer() - st_time, level=3) return return_value - def get_method(self, p_vertical): + @ staticmethod + def get_method(p_vertical): """ Extracts the given method value. @@ -172,7 +174,7 @@ class GfasEmissionInventory(EmissionInventory): """ import re - st_time = gettime() + st_time = timeit.default_timer() return_value = None aux_list = re.split(', |,| , | ,', p_vertical) @@ -181,7 +183,7 @@ class GfasEmissionInventory(EmissionInventory): if aux_value[0] == 'method': return_value = aux_value[1] - settings.write_time('GfasEmissionInventory', 'get_method', gettime() - st_time, level=3) + settings.write_time('GfasEmissionInventory', 'get_method', timeit.default_timer() - st_time, level=3) return return_value @@ -195,17 +197,18 @@ class GfasEmissionInventory(EmissionInventory): :return: Emissions already allocated on the top altitude of each fire. :rtype: numpy.array """ - st_time = gettime() + st_time = timeit.default_timer() return_value = self.vertical.do_vertical_interpolation_allocation(values, self.altitude) - settings.write_time('GfasEmissionInventory', 'do_vertical_allocation', gettime() - st_time, level=3) + settings.write_time('GfasEmissionInventory', 'do_vertical_allocation', timeit.default_timer() - st_time, + level=3) return return_value def do_regrid(self): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tRegridding", level=2) for i in xrange(len(self.emissions)): @@ -219,108 +222,8 @@ class GfasEmissionInventory(EmissionInventory): self.emissions.append(dict_aux) self.vertical = None - settings.write_time('GfasEmissionInventory', 'do_regrid', gettime() - st_time, level=2) - - # def do_vertical(self, values): - # """ - # Distribute the emissions on the top layer into their low layers depending on the method - # - # :param values: Emissions to distribute. - # :type values: numpy.ndarray - # - # :return: Emission already distributed. - # :rtype: numpy.ndarray - # """ - # if settings.log_level_3: - # st_time = gettime() - # else: - # st_time = None - # if settings.log_level_1: - # print "\tVertical interpolation (Inventory: {0}; Sector: {1})".format(self.inventory_name, self.sector) - # - # return_value = self.vertical.do_vertical_interpolation(values) - # - # if settings.log_level_3: - # print "TIME -> EmissionInventory.do_vertical: {0} s".format(round(gettime() - st_time, 2)) - # - # return return_value - # - # def process_emissions(self, auxiliary_files_path, grid_type, vertical_description_path, vertical_profile_path, - # p_month, p_day, p_hour, date, timestep_type, timestep_num, timestep_freq, world_info, - # speciation_profile_path, molecular_weights_path, fluxes=False, - # inc_lat=None, inc_lon=None, - # new_pole_longitude_degrees=None, new_pole_latitude_degrees=None, centre_lat=None, centre_lon=None, - # west_boundary=None, south_boundary=None, inc_rlat=None, inc_rlon=None, - # lat_1=None, lat_2=None, lon_0=None, lat_0=None, nx=None, ny=None, inc_x=None, inc_y=None, - # x_0=None, y_0=None, - # is_nmmb=False): - # """ - # Manages all the process to get the desired output. - # - # :param auxiliary_files_path: Path to the folder where will be stored the weight matrix files if them are not created yet. - # :type auxiliary_files_path: str - # - # :param grid_type: Type of the destination grid - # :type grid_type: str - # - # :param inc_lat: Resolution of the latitude coordinates. - # :type inc_lat: float - # - # :param inc_lon: Resolution of the longitude coordinates. - # :type inc_lon: float - # - # :param vertical_profile_path: Path to the file that contains all the vertical profiles. - # :type vertical_profile_path: str - # - # :param vertical_description_path: path to the file that contain the vertical description of the required output - # file. - # :type vertical_description_path: str - # - # :param speciation_profile_path: Path to the file that contains all the speciation profiles. - # :type speciation_profile_path: str - # - # :param molecular_weights_path: Path to the file that contains all the needed molecular weights. - # :type molecular_weights_path: str - # - # :param is_nmmb: Indicates if the required ouptut is for NMMB-MONARCH - # :type is_nmmb: bool - # """ - # from hermesv3_gr.modules.vertical.vertical_gfas import GfasVerticalDistribution - # from hermesv3_gr.modules.regrid.regrid_conservative import ConservativeRegrid - # - # if settings.log_level_3: - # st_time = gettime() - # else: - # st_time = None - # - # self.emissions = self.read_emissions() - # self.masking.check_regrid_mask(self.get_input_path(pollutant=self.pollutant_dicts[0])) - # - # self.vertical = GfasVerticalDistribution(vertical_description_path, self.approach) - # - # weight_matrix_file = os.path.join(auxiliary_files_path, "Weight_Matrix_{0}.nc".format(self.inventory_name)) - # self.regrid = ConservativeRegrid(self.get_input_path(), weight_matrix_file, grid_type, auxiliary_files_path, - # masking=self.masking, inc_lat=inc_lat, inc_lon=inc_lon, - # centre_lat=centre_lat, - # centre_lon=centre_lon, west_boundary=west_boundary, - # south_boundary=south_boundary, inc_rlat=inc_rlat, inc_rlon=inc_rlon, - # lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, lat_0=lat_0, nx=nx, ny=ny, inc_x=inc_x, - # inc_y=inc_y, x_0=x_0, y_0=y_0, - # is_nmmb=is_nmmb) - # - # for i in xrange(len(self.emissions)): - # self.emissions[i]["data"] = self.do_vertical_allocation(self.emissions[i]["data"]) - # self.emissions[i]["data"] = self.do_regrid(self.emissions[i]["data"]) - # self.emissions[i]["data"] = self.do_vertical(self.emissions[i]["data"]) - # self.do_temporal(date, timestep_type, timestep_num, timestep_freq, p_month, p_day, p_hour, world_info, - # auxiliary_files_path, self.regrid.grid) - # self.do_speciation(speciation_profile_path, molecular_weights_path, self.regrid.grid.cell_area, fluxes) - # if is_nmmb and self.regrid.grid.grid_type == 'global': - # self.unify_borders() - # self.add_ghost_cell() - # - # if settings.log_level_3: - # print "TIME -> EmissionInventory.process_emissions: {0} s".format(round(gettime() - st_time, 2)) + settings.write_time('GfasEmissionInventory', 'do_regrid', timeit.default_timer() - st_time, level=2) + if __name__ == "__main__": pass diff --git a/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py index f9098a4..2238a73 100755 --- a/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py @@ -17,10 +17,8 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import os -import sys -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings from emission_inventory import EmissionInventory @@ -47,9 +45,6 @@ class PointSourceEmissionInventory(EmissionInventory): :param pollutants: List of the pollutant name to take into account. :type pollutants: list of str - :param path: Path where are stored all the datasets to use. eg: /esarchive/recon/jrc/htapv2/monthly_mean - :type path: str - :param frequency: Frequency of the inputs. [yearly, monthly, daily] :type frequency: str @@ -82,7 +77,7 @@ class PointSourceEmissionInventory(EmissionInventory): frequency, vertical_output_profile, reference_year=2010, factors=None, regrid_mask=None, p_vertical=None, p_month=None, p_day=None, p_hour=None, p_speciation=None): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating point source emission inventory.', level=3) super(PointSourceEmissionInventory, self).__init__( @@ -95,9 +90,7 @@ class PointSourceEmissionInventory(EmissionInventory): self.area = None self.vertical = 'custom' - settings.write_time('PointSourceEmissionInventory', 'Init', gettime() - st_time, level=3) - - return None + settings.write_time('PointSourceEmissionInventory', 'Init', timeit.default_timer() - st_time, level=3) def do_regrid(self): """ @@ -110,7 +103,7 @@ class PointSourceEmissionInventory(EmissionInventory): import geopandas as gpd from shapely.geometry import Point - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tAllocating point sources on grid:", level=2) num = 1 @@ -144,7 +137,7 @@ class PointSourceEmissionInventory(EmissionInventory): } self.emissions.append(dict_aux) - settings.write_time('PointSourceEmissionInventory', 'do_regrid', gettime() - st_time, level=2) + settings.write_time('PointSourceEmissionInventory', 'do_regrid', timeit.default_timer() - st_time, level=2) return True def calculate_altitudes(self, vertical_description_path): @@ -159,7 +152,7 @@ class PointSourceEmissionInventory(EmissionInventory): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\tCalculating vertical allocation.", level=3) df = pd.read_csv(vertical_description_path, sep=';') # df.sort_values(by='height_magl', ascending=False, inplace=True) @@ -170,7 +163,8 @@ class PointSourceEmissionInventory(EmissionInventory): self.location.loc[self.location['Alt_Injection'] <= line['height_magl'], 'Alt_Injection'] = None del self.location['Alt_Injection'] - settings.write_time('PointSourceEmissionInventory', 'calculate_altitudes', gettime() - st_time, level=2) + settings.write_time('PointSourceEmissionInventory', 'calculate_altitudes', timeit.default_timer() - st_time, + level=2) return True @@ -180,12 +174,9 @@ class PointSourceEmissionInventory(EmissionInventory): :return: None """ - import pandas as pd - import geopandas as gpd - from shapely.geometry import Point - - st_time = gettime() + st_time = timeit.default_timer() + aux_df = None for emission in self.emissions: aux_df = self.location.copy() @@ -196,7 +187,7 @@ class PointSourceEmissionInventory(EmissionInventory): self.location = aux_df.loc[:, ['FID', 'layer']] - settings.write_time('PointSourceEmissionInventory', 'Init', gettime() - st_time, level=3) + settings.write_time('PointSourceEmissionInventory', 'Init', timeit.default_timer() - st_time, level=3) return None diff --git a/hermesv3_gr/modules/grids/grid.py b/hermesv3_gr/modules/grids/grid.py index 55af335..250ef8d 100644 --- a/hermesv3_gr/modules/grids/grid.py +++ b/hermesv3_gr/modules/grids/grid.py @@ -17,12 +17,12 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . + import os import sys -from timeit import default_timer as gettime +import timeit import numpy as np import ESMF - import hermesv3_gr.config.settings as settings @@ -38,12 +38,9 @@ class Grid(object): :param temporal_path: Path to the temporal folder. :type temporal_path: str - - :param nprocs: Number of processors to slice the map. - :type nprocs: int """ def __init__(self, grid_type, vertical_description_path, temporal_path): - st_time = gettime() + st_time = timeit.default_timer() # settings.write_log('Creating Grid...', level=1) # Defining class atributes @@ -75,15 +72,15 @@ class Grid(object): self.y_upper_bound = None self.shape = None - settings.write_time('Grid', 'Init', gettime() - st_time, level=1) + self.crs = None - return None + settings.write_time('Grid', 'Init', timeit.default_timer() - st_time, level=1) @staticmethod def create_esmf_grid_from_file(file_name, sphere=True): import ESMF - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating ESMF grid from file {0}'.format(file_name), level=3) # ESMF.Manager(debug=True) @@ -91,10 +88,9 @@ class Grid(object): grid = ESMF.Grid(filename=file_name, filetype=ESMF.FileFormat.GRIDSPEC, is_sphere=sphere, add_corner_stagger=True) - settings.write_time('Grid', 'create_esmf_grid_from_file', gettime() - st_time, level=3) + settings.write_time('Grid', 'create_esmf_grid_from_file', timeit.default_timer() - st_time, level=3) return grid - @staticmethod def select_grid(grid_type, vertical_description_path, timestep_num, temporal_path, inc_lat, inc_lon, centre_lat, centre_lon, west_boundary, south_boundary, inc_rlat, inc_rlon, @@ -176,7 +172,7 @@ class Grid(object): :rtype: Grid """ - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('Selecting grid', level=1) # Creating a different object depending on the grid type @@ -205,7 +201,7 @@ class Grid(object): + " Use 'global', 'rotated' or 'lcc'.") sys.exit(1) - settings.write_time('Grid', 'select_grid', gettime() - st_time, level=3) + settings.write_time('Grid', 'select_grid', timeit.default_timer() - st_time, level=3) return grid @@ -223,14 +219,14 @@ class Grid(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tSetting vertical levels', level=3) df = pd.read_csv(vertical_description_path, sep=';') heights = df.height_magl.values - settings.write_time('Grid', 'set_vertical_levels', gettime() - st_time, level=3) + settings.write_time('Grid', 'set_vertical_levels', timeit.default_timer() - st_time, level=3) return heights @@ -242,7 +238,7 @@ class Grid(object): # TODO Not to write two NetCDF. Open one and modify it. from hermesv3_gr.tools.netcdf_tools import write_netcdf - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\twrite_coords_netcdf', level=3) if not self.chech_coords_file(): @@ -263,7 +259,7 @@ class Grid(object): else: self.cell_area = self.get_cell_area() - settings.write_time('Grid', 'write_coords_netcdf', gettime() - st_time, level=3) + settings.write_time('Grid', 'write_coords_netcdf', timeit.default_timer() - st_time, level=3) def get_cell_area(self): """ @@ -275,7 +271,7 @@ class Grid(object): from cdo import Cdo from netCDF4 import Dataset - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tGetting cell area from {0}'.format(self.coords_netcdf_file), level=3) # Initialises the CDO @@ -287,7 +283,7 @@ class Grid(object): cell_area = nc_aux.variables['cell_area'][:] nc_aux.close() - settings.write_time('Grid', 'get_cell_area', gettime() - st_time, level=3) + settings.write_time('Grid', 'get_cell_area', timeit.default_timer() - st_time, level=3) return cell_area @@ -309,7 +305,7 @@ class Grid(object): :rtype: numpy.array """ - st_time = gettime() + st_time = timeit.default_timer() # Calculates first center point. origin = center - abs(boundary) @@ -318,7 +314,7 @@ class Grid(object): # Calculates all the values values = np.arange(origin + inc, origin + (n * inc) - inc + inc / 2, inc, dtype=np.float) - settings.write_time('Grid', 'create_regular_grid_1d_array', gettime() - st_time, level=3) + settings.write_time('Grid', 'create_regular_grid_1d_array', timeit.default_timer() - st_time, level=3) return values @@ -343,7 +339,7 @@ class Grid(object): :return: Array with as many elements as vertices for each value of coords. :rtype: numpy.array """ - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\t\tCreating boundaries.', level=3) # Creates new arrays moving the centers half increment less and more. @@ -369,7 +365,7 @@ class Grid(object): settings.write_log('ERROR: Check the .err file to get more info.') sys.exit(1) - settings.write_time('Grid', 'create_bounds', gettime() - st_time, level=3) + settings.write_time('Grid', 'create_bounds', timeit.default_timer() - st_time, level=3) return bound_coords @@ -384,13 +380,13 @@ class Grid(object): coordinate. :rtype: tuple """ - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tGetting 2D coordinates from ESMPy Grid', level=3) lat = self.esmf_grid.get_coords(1, ESMF.StaggerLoc.CENTER).T lon = self.esmf_grid.get_coords(0, ESMF.StaggerLoc.CENTER).T - settings.write_time('Grid', 'get_coordinates_2d', gettime() - st_time, level=3) + settings.write_time('Grid', 'get_coordinates_2d', timeit.default_timer() - st_time, level=3) return lat, lon @@ -400,11 +396,9 @@ class Grid(object): def to_shapefile(self, full_grid=True): import geopandas as gpd import pandas as pd - import numpy as np - import pyproj - from shapely.geometry import Polygon, Point + from shapely.geometry import Polygon - st_time = gettime() + st_time = timeit.default_timer() # settings.write_log('\t\tGetting grid shapefile', level=3) if full_grid: @@ -436,7 +430,6 @@ class Grid(object): x = x.reshape((x.shape[1], x.shape[2])) y = y.reshape((y.shape[1], y.shape[2])) - # x_aux = np.empty((x.shape[0], y.shape[0], 4)) # x_aux[:, :, 0] = x[:, np.newaxis, 0] # x_aux[:, :, 1] = x[:, np.newaxis, 1] @@ -516,7 +509,6 @@ class Grid(object): list_points = df.as_matrix() del df['p1'], df['p2'], df['p3'], df['p4'] - # List of polygons from the list of points geometry = [Polygon(list(points)) for points in list_points] # geometry = [] @@ -537,7 +529,7 @@ class Grid(object): settings.write_log('\t\tGrid shapefile already done. Lets try to read it.', level=3) gdf = gpd.read_file(self.shapefile_path) - settings.write_time('Grid', 'to_shapefile', gettime() - st_time, level=1) + settings.write_time('Grid', 'to_shapefile', timeit.default_timer() - st_time, level=1) return gdf diff --git a/hermesv3_gr/modules/grids/grid_global.py b/hermesv3_gr/modules/grids/grid_global.py index 28e563f..ebbd97e 100644 --- a/hermesv3_gr/modules/grids/grid_global.py +++ b/hermesv3_gr/modules/grids/grid_global.py @@ -20,9 +20,7 @@ import os import sys -from mpi4py import MPI - -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings from grid import Grid @@ -59,7 +57,7 @@ class GlobalGrid(Grid): center_longitude=float(0)): import ESMF - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tCreating Global grid.', level=2) # Initialize the class using parent @@ -85,13 +83,13 @@ class GlobalGrid(Grid): self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) - - self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, + self.y_upper_bound-self.y_lower_bound) - settings.write_time('GlobalGrid', 'Init', gettime() - st_time, level=1) + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, + self.y_lower_bound:self.y_upper_bound] - return None + settings.write_time('GlobalGrid', 'Init', timeit.default_timer() - st_time, level=1) def create_coords(self): """ @@ -99,7 +97,7 @@ class GlobalGrid(Grid): """ import numpy as np - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating global coordinates', level=3) self.center_latitudes = self.create_regular_grid_1d_array(self.center_lat, self.inc_lat, -90) @@ -139,9 +137,8 @@ class GlobalGrid(Grid): self.boundary_latitudes = self.boundary_latitudes.reshape((1,) + self.boundary_latitudes.shape) self.boundary_longitudes = self.boundary_longitudes.reshape((1,) + self.boundary_longitudes.shape) - settings.write_time('GlobalGrid', 'create_coords', gettime() - st_time, level=2) + settings.write_time('GlobalGrid', 'create_coords', timeit.default_timer() - st_time, level=2) if __name__ == '__main__': pass - diff --git a/hermesv3_gr/modules/grids/grid_lcc.py b/hermesv3_gr/modules/grids/grid_lcc.py index ca553b6..43c535f 100644 --- a/hermesv3_gr/modules/grids/grid_lcc.py +++ b/hermesv3_gr/modules/grids/grid_lcc.py @@ -20,10 +20,7 @@ import os import sys -from mpi4py import MPI -from timeit import default_timer as gettime - - +import timeit import hermesv3_gr.config.settings as settings from grid import Grid @@ -82,7 +79,7 @@ class LccGrid(Grid): def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, lat_1, lat_2, lon_0, lat_0, nx, ny, inc_x, inc_y, x_0, y_0, earth_radius=6370000.000): import ESMF - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tCreating Lambert Conformal Conic (LCC) grid.', level=2) # Initialises with parent class @@ -106,8 +103,8 @@ class LccGrid(Grid): self.y = None # Creating coordinates - self.crs = "+proj=lcc +lat_1={0} +lat_2={1} +lat_0={2} +lon_0={3} +x_0={4} +y_0={5} +datum=WGS84 +units=m".format( - self.lat_1, self.lat_2, self.lat_0, self.lon_0, 0, 0) + self.crs = "+proj=lcc +lat_1={0} +lat_2={1} +lat_0={2} +lon_0={3} +x_0={4} +y_0={5} +datum=WGS84".format( + self.lat_1, self.lat_2, self.lat_0, self.lon_0, 0, 0) + " +units=m" self.create_coords() if not os.path.exists(self.coords_netcdf_file): @@ -123,15 +120,15 @@ class LccGrid(Grid): self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, + self.y_upper_bound-self.y_lower_bound) # print 'Rank {0} _3_\n'.format(settings.rank) settings.comm.Barrier() # print 'Rank {0} _4_\n'.format(settings.rank) - self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, + self.y_lower_bound:self.y_upper_bound] - settings.write_time('LccGrid', 'Init', gettime() - st_time, level=1) - - return None + settings.write_time('LccGrid', 'Init', timeit.default_timer() - st_time, level=1) def write_coords_netcdf(self): """ @@ -140,7 +137,7 @@ class LccGrid(Grid): """ from hermesv3_gr.tools.netcdf_tools import write_netcdf - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tWriting {0} file.'.format(self.coords_netcdf_file), level=3) if not self.chech_coords_file(): @@ -164,7 +161,7 @@ class LccGrid(Grid): else: self.cell_area = self.get_cell_area() - settings.write_time('LccGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + settings.write_time('LccGrid', 'write_coords_netcdf', timeit.default_timer() - st_time, level=3) def create_coords(self): """ @@ -173,7 +170,7 @@ class LccGrid(Grid): import numpy as np from pyproj import Proj - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating lcc coordinates', level=3) # Creates a regular grid in metres (Two 1D arrays) @@ -213,9 +210,8 @@ class LccGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - settings.write_time('LccGrid', 'create_coords', gettime() - st_time, level=2) + settings.write_time('LccGrid', 'create_coords', timeit.default_timer() - st_time, level=2) if __name__ == '__main__': pass - diff --git a/hermesv3_gr/modules/grids/grid_mercator.py b/hermesv3_gr/modules/grids/grid_mercator.py index d3fc6e1..f537130 100644 --- a/hermesv3_gr/modules/grids/grid_mercator.py +++ b/hermesv3_gr/modules/grids/grid_mercator.py @@ -20,10 +20,7 @@ import os import sys -from mpi4py import MPI -from timeit import default_timer as gettime - - +import timeit import hermesv3_gr.config.settings as settings from grid import Grid @@ -44,18 +41,9 @@ class MercatorGrid(Grid): :param temporal_path: Path to the temporal folder. :type temporal_path: str - :param lat_1: Value of the Lat1 for the LCC grid type. - :type lat_1: float - - :param lat_2: Value of the Lat2 for the LCC grid type. - :type lat_2: float - :param lon_0: Value of the Lon0 for the LCC grid type. :type lon_0: float - :param lat_0: Value of the Lat0 for the LCC grid type. - :type lat_0: float - :param nx: Number of cells on the x dimension. :type nx: int @@ -82,7 +70,7 @@ class MercatorGrid(Grid): def __init__(self, grid_type, vertical_description_path, timestep_num, temporal_path, lat_ts, lon_0, nx, ny, inc_x, inc_y, x_0, y_0, earth_radius=6370000.000): import ESMF - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tCreating Mercator grid.', level=2) # Initialises with parent class @@ -120,13 +108,15 @@ class MercatorGrid(Grid): self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, + self.y_upper_bound-self.y_lower_bound) # print 'Rank {0} _3_\n'.format(settings.rank) settings.comm.Barrier() # print 'Rank {0} _4_\n'.format(settings.rank) - self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, + self.y_lower_bound:self.y_upper_bound] - settings.write_time('MercatorGrid', 'Init', gettime() - st_time, level=1) + settings.write_time('MercatorGrid', 'Init', timeit.default_timer() - st_time, level=1) def write_coords_netcdf(self): """ @@ -135,7 +125,7 @@ class MercatorGrid(Grid): """ from hermesv3_gr.tools.netcdf_tools import write_netcdf - st_time = gettime() + st_time = timeit.default_timer() if not self.chech_coords_file(): # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. @@ -160,7 +150,7 @@ class MercatorGrid(Grid): else: self.cell_area = self.get_cell_area() - settings.write_time('MercatorGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + settings.write_time('MercatorGrid', 'write_coords_netcdf', timeit.default_timer() - st_time, level=3) def create_coords(self): """ @@ -169,7 +159,7 @@ class MercatorGrid(Grid): import numpy as np from pyproj import Proj - st_time = gettime() + st_time = timeit.default_timer() # Creates a regular grid in metres (Two 1D arrays) self.x = np.arange(self.x_0, self.x_0 + self.inc_x * self.nx, self.inc_x, dtype=np.float) @@ -196,7 +186,7 @@ class MercatorGrid(Grid): self.center_longitudes, self.center_latitudes = projection(x, y, inverse=True) self.boundary_longitudes, self.boundary_latitudes = projection(x_b, y_b, inverse=True) - settings.write_time('MercatorGrid', 'create_coords', gettime() - st_time, level=3) + settings.write_time('MercatorGrid', 'create_coords', timeit.default_timer() - st_time, level=3) if __name__ == '__main__': diff --git a/hermesv3_gr/modules/grids/grid_rotated.py b/hermesv3_gr/modules/grids/grid_rotated.py index f945612..02dc6e4 100644 --- a/hermesv3_gr/modules/grids/grid_rotated.py +++ b/hermesv3_gr/modules/grids/grid_rotated.py @@ -17,11 +17,9 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -from timeit import default_timer as gettime +import timeit import sys import os -from mpi4py import MPI - import hermesv3_gr.config.settings as settings from grid import Grid @@ -44,7 +42,7 @@ class RotatedGrid(Grid): west_boundary, south_boundary, inc_rlat, inc_rlon): import ESMF - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tCreating Rotated grid.', level=2) # Initialises with parent class @@ -85,13 +83,13 @@ class RotatedGrid(Grid): self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, self.y_upper_bound-self.y_lower_bound) - - self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, self.y_lower_bound:self.y_upper_bound] + self.shape = (timestep_num, len(self.vertical_description), self.x_upper_bound-self.x_lower_bound, + self.y_upper_bound-self.y_lower_bound) - settings.write_time('RotatedGrid', 'Init', gettime() - st_time, level=1) + self.cell_area = self.get_cell_area()[self.x_lower_bound:self.x_upper_bound, + self.y_lower_bound:self.y_upper_bound] - return None + settings.write_time('RotatedGrid', 'Init', timeit.default_timer() - st_time, level=1) def create_coords(self): """ @@ -100,7 +98,7 @@ class RotatedGrid(Grid): from hermesv3_gr.tools.coordinates_tools import create_regular_rotated import numpy as np - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating rotated coordinates.', level=3) # Create rotated coordinates @@ -124,7 +122,7 @@ class RotatedGrid(Grid): self.boundary_longitudes, self.boundary_latitudes = self.rotated2latlon(b_lons, b_lats) self.center_longitudes, self.center_latitudes = self.rotated2latlon(c_lons, c_lats) - settings.write_time('RotatedGrid', 'create_coords', gettime() - st_time, level=2) + settings.write_time('RotatedGrid', 'create_coords', timeit.default_timer() - st_time, level=2) def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180): """ @@ -145,7 +143,7 @@ class RotatedGrid(Grid): import numpy as np import math - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\t\tTransforming rotated coordinates to latitude, longitude coordinates.', level=3) # TODO Document this function @@ -193,7 +191,7 @@ class RotatedGrid(Grid): almd[almd > (lon_min + 360)] -= 360 almd[almd < lon_min] += 360 - settings.write_time('RotatedGrid', 'rotated2latlon', gettime() - st_time, level=3) + settings.write_time('RotatedGrid', 'rotated2latlon', timeit.default_timer() - st_time, level=3) return almd, aphd @@ -204,16 +202,18 @@ class RotatedGrid(Grid): """ from hermesv3_gr.modules.writing.writer import Writer - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\tWriting {0} file.'.format(self.coords_netcdf_file), level=3) if not self.chech_coords_file(): # Writes an auxiliary empty NetCDF only with the coordinates and an empty variable. Writer.write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], - boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, + boundary_latitudes=self.boundary_latitudes, + boundary_longitudes=self.boundary_longitudes, Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, - north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) + north_pole_lat=self.new_pole_latitude_degrees, + north_pole_lon=self.new_pole_longitude_degrees) # Calculates the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() @@ -221,14 +221,15 @@ class RotatedGrid(Grid): # Re-writes the NetCDF adding the cell area Writer.write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], - boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, - cell_area=self.cell_area, + boundary_latitudes=self.boundary_latitudes, + boundary_longitudes=self.boundary_longitudes, cell_area=self.cell_area, Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, - north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) + north_pole_lat=self.new_pole_latitude_degrees, + north_pole_lon=self.new_pole_longitude_degrees) else: self.cell_area = self.get_cell_area() - settings.write_time('RotatedGrid', 'write_coords_netcdf', gettime() - st_time, level=3) + settings.write_time('RotatedGrid', 'write_coords_netcdf', timeit.default_timer() - st_time, level=3) if __name__ == '__main__': diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py index d8c7d24..744a145 100644 --- a/hermesv3_gr/modules/masking/masking.py +++ b/hermesv3_gr/modules/masking/masking.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings import os @@ -29,7 +29,7 @@ class Masking(object): def __init__(self, world_info, factors_mask_values, regrid_mask_values, grid, world_mask_file=None): from timezonefinder import TimezoneFinder - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tCreating mask.', level=2) self.adding = None @@ -44,14 +44,14 @@ class Masking(object): self.grid = grid - settings.write_time('Masking', 'Init', gettime() - st_time, level=3) + settings.write_time('Masking', 'Init', timeit.default_timer() - st_time, level=3) return None def get_country_codes(self): import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() # settings.write_log('\t\t\tGetting country codes.', level=3) # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) @@ -62,7 +62,7 @@ class Masking(object): countries_dict = df.to_dict() countries_dict = countries_dict['country_code'] - settings.write_time('Masking', 'get_country_codes', gettime() - st_time, level=3) + settings.write_time('Masking', 'get_country_codes', timeit.default_timer() - st_time, level=3) return countries_dict @staticmethod @@ -81,7 +81,7 @@ class Masking(object): from hermesv3_gr.tools.netcdf_tools import extract_vars from hermesv3_gr.modules.writing.writer import Writer - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\t\tCreating {0} file.'.format(self.world_mask_file), level=2) # output_path = os.path.join(output_dir, 'iso.nc') @@ -118,13 +118,13 @@ class Masking(object): Writer.write_netcdf(self.world_mask_file, lat, lon, data, RegularLatLon=True) settings.comm.Barrier() - settings.write_time('Masking', 'create_country_iso', gettime() - st_time, level=3) + settings.write_time('Masking', 'create_country_iso', timeit.default_timer() - st_time, level=3) return True def find_timezone(self, latitude, longitude): - st_time = gettime() + st_time = timeit.default_timer() if longitude < -180: longitude += 360 @@ -133,14 +133,14 @@ class Masking(object): tz = self.tf.timezone_at(lng=longitude, lat=latitude) - settings.write_time('Masking', 'find_timezone', gettime() - st_time, level=3) + settings.write_time('Masking', 'find_timezone', timeit.default_timer() - st_time, level=3) return tz def get_iso_code_from_tz(self, tz): import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() zero_values = [None, ] if tz in zero_values: @@ -149,14 +149,14 @@ class Masking(object): df = pd.read_csv(self.world_info, sep=';') code = df.country_code[df.time_zone == tz].values - settings.write_time('Masking', 'get_iso_code_from_tz', gettime() - st_time, level=3) + settings.write_time('Masking', 'get_iso_code_from_tz', timeit.default_timer() - st_time, level=3) return code[0] def parse_factor_values(self, values): import re - st_time = gettime() + st_time = timeit.default_timer() if type(values) != str: return None @@ -167,7 +167,7 @@ class Masking(object): scale_dict[int(self.country_codes[element[0]])] = element[1] settings.write_log('\t\t\tApplying scaling factors for {0}.'.format(values), level=3) - settings.write_time('Masking', 'parse_factor_values', gettime() - st_time, level=3) + settings.write_time('Masking', 'parse_factor_values', timeit.default_timer() - st_time, level=3) return scale_dict @@ -180,7 +180,7 @@ class Masking(object): """ import re - st_time = gettime() + st_time = timeit.default_timer() if type(values) != str: return None @@ -203,7 +203,7 @@ class Masking(object): settings.write_log("\t\t\tCreating mask to do {0} countries.".format(values[1:]), level=3) else: settings.write_log("\t\t\tCreating mask to avoid {0} countries.".format(values[1:]), level=3) - settings.write_time('Masking', 'parse_masking_values', gettime() - st_time, level=3) + settings.write_time('Masking', 'parse_masking_values', timeit.default_timer() - st_time, level=3) return code_list @@ -222,7 +222,7 @@ class Masking(object): import numpy as np from netCDF4 import Dataset - st_time = gettime() + st_time = timeit.default_timer() netcdf = Dataset(self.world_mask_file, mode='r') values = netcdf.variables['timezone_id'][:] @@ -237,7 +237,7 @@ class Masking(object): for code in self.regrid_mask_values: mask[values == code] = 0 - settings.write_time('Masking', 'custom_regrid_mask', gettime() - st_time, level=3) + settings.write_time('Masking', 'custom_regrid_mask', timeit.default_timer() - st_time, level=3) return mask @@ -245,7 +245,7 @@ class Masking(object): import numpy as np from hermesv3_gr.tools.netcdf_tools import extract_vars - st_time = gettime() + st_time = timeit.default_timer() [values] = extract_vars(self.world_mask_file, ['timezone_id']) @@ -254,7 +254,7 @@ class Masking(object): for code, factor in self.factors_mask_values.iteritems(): mask[values == code] = factor - settings.write_time('Masking', 'custom_scale_mask', gettime() - st_time, level=3) + settings.write_time('Masking', 'custom_scale_mask', timeit.default_timer() - st_time, level=3) return mask diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py index b8c8273..c26f113 100644 --- a/hermesv3_gr/modules/regrid/regrid.py +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -20,7 +20,7 @@ import os import sys import numpy as np -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings @@ -46,7 +46,7 @@ class Regrid(object): :type masking: Masking """ def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\t\tInitializing Regrid.', level=3) self.grid = grid @@ -60,7 +60,7 @@ class Regrid(object): settings.comm.Barrier() self.create_weight_matrix() - settings.write_time('Regrid', 'Init', round(gettime() - st_time), level=3) + settings.write_time('Regrid', 'Init', round(timeit.default_timer() - st_time), level=3) return None @@ -83,7 +83,7 @@ class Regrid(object): """ from netCDF4 import Dataset - st_time = gettime() + st_time = timeit.default_timer() # Read weight matrix nc_weights = Dataset(self.weight_matrix_file, mode='r') @@ -112,7 +112,7 @@ class Regrid(object): # Reduce dst values dst_field = self.reduce_dst_field(dst_field_aux, dst_indices_counts, max_index) - settings.write_time('Regrid', 'apply_weights', gettime() - st_time, level=3) + settings.write_time('Regrid', 'apply_weights', timeit.default_timer() - st_time, level=3) return dst_field @@ -142,7 +142,7 @@ class Regrid(object): :return: :rtype: numpy.array """ - st_time = gettime() + st_time = timeit.default_timer() # Create new dst_field = np.zeros((dst_field_extended.shape[0], max_index), dtype=settings.precision) @@ -158,7 +158,7 @@ class Regrid(object): count += 1 previous = i - settings.write_time('Regrid', 'reduce_dst_field', gettime() - st_time, level=3) + settings.write_time('Regrid', 'reduce_dst_field', timeit.default_timer() - st_time, level=3) return dst_field @@ -168,7 +168,7 @@ class Regrid(object): # import numpy as np # # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # @@ -188,7 +188,7 @@ class Regrid(object): # previous = i # # if settings.log_level_3: - # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # # return dst_field # @@ -197,7 +197,7 @@ class Regrid(object): # import numpy as np # # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # @@ -236,7 +236,7 @@ class Regrid(object): # previous = i # # if settings.log_level_3: - # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # # return dst_field @@ -265,14 +265,14 @@ class Regrid(object): # :rtype: numpy.ndarray # """ # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # # values = self.apply_weights(values) # # if settings.log_level_3: - # print 'TIME -> Regrid.start_regridding: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> Regrid.start_regridding: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # # return values diff --git a/hermesv3_gr/modules/regrid/regrid_conservative.py b/hermesv3_gr/modules/regrid/regrid_conservative.py index ee29353..c4fdeae 100644 --- a/hermesv3_gr/modules/regrid/regrid_conservative.py +++ b/hermesv3_gr/modules/regrid/regrid_conservative.py @@ -20,7 +20,7 @@ import os import sys import numpy as np -from timeit import default_timer as gettime +import timeit import ESMF import hermesv3_gr.config.settings as settings @@ -53,12 +53,12 @@ class ConservativeRegrid(Regrid): :type masking: Masking """ def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tInitializing Conservative regrid.', level=2) super(ConservativeRegrid, self).__init__(pollutant_dicts, weight_matrix_file, grid, masking=masking) - settings.write_time('ConservativeRegrid', 'Init', gettime() - st_time, level=2) + settings.write_time('ConservativeRegrid', 'Init', timeit.default_timer() - st_time, level=2) return None @@ -67,7 +67,7 @@ class ConservativeRegrid(Regrid): Calls to ESMF_RegridWeightGen to generate the weight matrix. """ - st_time = gettime() + st_time = timeit.default_timer() src_grid = self.grid.create_esmf_grid_from_file(self.pollutant_dicts[0]['path']) src_field = ESMF.Field(src_grid, name='my input field') @@ -78,12 +78,12 @@ class ConservativeRegrid(Regrid): regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.CONSERVE, )#src_mask_values=self.masking.regrid_mask) # regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.BILINEAR, )#src_mask_values=self.masking.regrid_mask) - settings.write_time('ConservativeRegrid', 'create_weight_matrix', gettime() - st_time, level=1) + settings.write_time('ConservativeRegrid', 'create_weight_matrix', timeit.default_timer() - st_time, level=1) def start_regridding(self, gfas=False, vertical=None): from hermesv3_gr.tools.netcdf_tools import extract_vars - st_time = gettime() + st_time = timeit.default_timer() weights = self.read_weight_matrix() @@ -129,7 +129,7 @@ class ConservativeRegrid(Regrid): dst_field_list.append({'data': dst_field, 'name': pollutant_single_dict['name']}) - settings.write_time('ConservativeRegrid', 'start_regridding', gettime() - st_time, level=3) + settings.write_time('ConservativeRegrid', 'start_regridding', timeit.default_timer() - st_time, level=3) return dst_field_list # def start_carles_regridding_1(self): @@ -251,7 +251,7 @@ class ConservativeRegrid(Regrid): import numpy as np if settings.log_level_3: - st_time = gettime() + st_time = timeit.default_timer() # Read ESMF Weight matrix NetCDF nc_weights = Dataset(self.weight_matrix_file, mode='r') @@ -295,7 +295,7 @@ class ConservativeRegrid(Regrid): nc_weights.close() if settings.log_level_3: - print 'TIME -> ConservativeRegrid.re_order_weight_matrix: {0} s'.format(round(gettime() - st_time, 2)) + print 'TIME -> ConservativeRegrid.re_order_weight_matrix: {0} s'.format(round(timeit.default_timer() - st_time, 2)) def apply_weights_test(self, values): """ @@ -309,7 +309,7 @@ class ConservativeRegrid(Regrid): """ from netCDF4 import Dataset if settings.log_level_3: - st_time = gettime() + st_time = timeit.default_timer() dst_field = super(ConservativeRegrid, self).apply_weights(values) nc_weights = Dataset(self.weight_matrix_file, mode='r') @@ -329,7 +329,7 @@ class ConservativeRegrid(Regrid): if frac_b[i] != 0: dst_field[:, i] = dst_field[:, i] / frac_b[i] if settings.log_level_3: - print 'TIME -> ConservativeRegrid.apply_weights: {0} s'.format(round(gettime() - st_time, 2)) + print 'TIME -> ConservativeRegrid.apply_weights: {0} s'.format(round(timeit.default_timer() - st_time, 2)) return dst_field def apply_weights(self, values): diff --git a/hermesv3_gr/modules/speciation/speciation.py b/hermesv3_gr/modules/speciation/speciation.py index d145c34..09676c8 100644 --- a/hermesv3_gr/modules/speciation/speciation.py +++ b/hermesv3_gr/modules/speciation/speciation.py @@ -20,7 +20,7 @@ import os import sys -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings from warnings import warn as warning @@ -40,7 +40,7 @@ class Speciation(object): :type molecular_weights_path: str """ def __init__(self, speciation_id, speciation_profile_path, molecular_weights_path): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tInitializing Speciation.', level=2) self.id = speciation_id @@ -48,7 +48,7 @@ class Speciation(object): self.molecular_weights_path = molecular_weights_path self.molecular_weights = self.extract_molecular_weights(molecular_weights_path) - settings.write_time('Speciation', 'Init', gettime() - st_time, level=2) + settings.write_time('Speciation', 'Init', timeit.default_timer() - st_time, level=2) return None @@ -64,7 +64,7 @@ class Speciation(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\t\tGetting speciation profile id '{0}' from {1} .".format( self.id, speciation_profile_path), level=3) @@ -93,7 +93,7 @@ class Speciation(object): 'long_name': long_name_dict[key] }) - settings.write_time('Speciation', 'get_speciation_profile', gettime() - st_time, level=3) + settings.write_time('Speciation', 'get_speciation_profile', timeit.default_timer() - st_time, level=3) return profile_list @staticmethod @@ -110,7 +110,7 @@ class Speciation(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() df = pd.read_csv(molecular_weights_path, sep=';') @@ -119,7 +119,7 @@ class Speciation(object): for i, element in df.iterrows(): dict_aux.update({element.Specie: element.MW}) - settings.write_time('Speciation', 'extract_molecular_weights', gettime() - st_time, level=3) + settings.write_time('Speciation', 'extract_molecular_weights', timeit.default_timer() - st_time, level=3) return dict_aux @@ -136,7 +136,7 @@ class Speciation(object): from cf_units import Unit import numpy as np - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tSpeciating", level=2) input_pollutants = [] @@ -200,31 +200,31 @@ class Speciation(object): if settings.rank == 0: warning("WARNING: The input pollutants {0} do not appear in the speciation profile {1}.".format( input_pollutants, self.id)) - settings.write_time('Speciation', 'do_speciation', gettime() - st_time, level=3) + settings.write_time('Speciation', 'do_speciation', timeit.default_timer() - st_time, level=3) return speciated_emissions def get_long_name(self, name): - st_time = gettime() + st_time = timeit.default_timer() value = '' for pollutant in self.speciation_profile: if pollutant['name'] == name: value = pollutant['long_name'] - settings.write_time('Speciation', 'get_long_name', gettime() - st_time, level=3) + settings.write_time('Speciation', 'get_long_name', timeit.default_timer() - st_time, level=3) return value def get_units(self, name): - st_time = gettime() + st_time = timeit.default_timer() value = '' for pollutant in self.speciation_profile: if pollutant['name'] == name: value = pollutant['units'] - settings.write_time('Speciation', 'get_units', gettime() - st_time, level=3) + settings.write_time('Speciation', 'get_units', timeit.default_timer() - st_time, level=3) return value diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index a56a348..7b552a2 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings import os @@ -75,7 +75,7 @@ class TemporalDistribution(object): import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tInitializing Temporal.', level=2) self.grid = grid @@ -140,7 +140,7 @@ class TemporalDistribution(object): self.create_netcdf_timezones(grid) self.timezones_array = self.calculate_timezones() - settings.write_time('TemporalDistribution', 'Init', gettime() - st_time, level=2) + settings.write_time('TemporalDistribution', 'Init', timeit.default_timer() - st_time, level=2) return None @@ -153,7 +153,7 @@ class TemporalDistribution(object): """ from datetime import timedelta - st_time = gettime() + st_time = timeit.default_timer() if self.timestep_type == 'hourly': end_date = self.starting_date + (self.timestep_num - 1) * timedelta(hours=self.timestep_freq) @@ -170,7 +170,7 @@ class TemporalDistribution(object): else: end_date = self.starting_date - settings.write_time('TemporalDistribution', 'calculate_ending_date', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_ending_date', timeit.default_timer() - st_time, level=3) return end_date @@ -187,7 +187,7 @@ class TemporalDistribution(object): from datetime import timedelta from calendar import monthrange, isleap - st_time = gettime() + st_time = timeit.default_timer() if self.timestep_type == 'hourly': delta = timedelta(hours=self.timestep_freq) @@ -204,7 +204,7 @@ class TemporalDistribution(object): else: delta = None - settings.write_time('TemporalDistribution', 'calculate_ending_date', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_ending_date', timeit.default_timer() - st_time, level=3) return delta @@ -302,7 +302,7 @@ class TemporalDistribution(object): :rtype: str """ - st_time = gettime() + st_time = timeit.default_timer() dg = 0 tz = None @@ -310,7 +310,7 @@ class TemporalDistribution(object): tz = self.tf.closest_timezone_at(lng=longitude, lat=latitude, delta_degree=dg) dg += 1 - settings.write_time('TemporalDistribution', 'find_closest_timezone', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'find_closest_timezone', timeit.default_timer() - st_time, level=3) return tz @@ -335,7 +335,7 @@ class TemporalDistribution(object): """ from hermesv3_gr.tools.netcdf_tools import write_netcdf - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\tCreating {0} file.".format(self.netcdf_timezones), level=2) lat, lon = grid.get_coordinates_2d() @@ -368,14 +368,14 @@ class TemporalDistribution(object): write_netcdf(self.netcdf_timezones, total_lat, total_lon, data, RegularLatLon=True) settings.comm.Barrier() - settings.write_time('TemporalDistribution', 'create_netcdf_timezones', gettime() - st_time, level=2) + settings.write_time('TemporalDistribution', 'create_netcdf_timezones', timeit.default_timer() - st_time, level=2) return True def read_gridded_profile(self, path, value): from netCDF4 import Dataset - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\t\tGetting gridded temporal monthly profile from {0} .'.format(path), level=3) @@ -386,7 +386,7 @@ class TemporalDistribution(object): profile[profile <= 0] = 1 - settings.write_time('TemporalDistribution', 'read_gridded_profile', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'read_gridded_profile', timeit.default_timer() - st_time, level=3) return profile @@ -400,7 +400,7 @@ class TemporalDistribution(object): from netCDF4 import Dataset from hermesv3_gr.tools.netcdf_tools import extract_vars - st_time = gettime() + st_time = timeit.default_timer() nc_in = Dataset(self.netcdf_timezones) timezones = nc_in.variables['timezone_id'][:, self.grid.x_lower_bound:self.grid.x_upper_bound, @@ -414,7 +414,7 @@ class TemporalDistribution(object): tz_list[timezones == id_aux] = tz except: pass - settings.write_time('TemporalDistribution', 'calculate_timezones', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_timezones', timeit.default_timer() - st_time, level=3) return tz_list @@ -431,7 +431,7 @@ class TemporalDistribution(object): import pytz import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() df = pd.DataFrame(self.timezones_array.flatten(), columns=['tz']) df['i'] = df.index @@ -506,7 +506,7 @@ class TemporalDistribution(object): factors = np.array(df['factor'].values).reshape((self.timezones_array.shape[1], self.timezones_array.shape[2])) del df - settings.write_time('TemporalDistribution', 'calculate_2d_temporal_factors', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_2d_temporal_factors', timeit.default_timer() - st_time, level=3) return factors @@ -523,7 +523,7 @@ class TemporalDistribution(object): :return: 3D array with the factors to correct the input data to the date of this timestep. :rtype: numpy.array """ - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tCalculating temporal factors.", level=2) factors = [] @@ -543,7 +543,7 @@ class TemporalDistribution(object): factors = np.array(factors) - settings.write_time('TemporalDistribution', 'calculate_3d_temporal_factors', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_3d_temporal_factors', timeit.default_timer() - st_time, level=3) return factors def parse_hourly_profile_id(self): @@ -577,7 +577,7 @@ class TemporalDistribution(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() # settings.write_log("\t\t\tGetting temporal hourly profile '{0}' from {1} .".format( # profile_id, self.hourly_profile_path), level=3) if date is None: @@ -594,7 +594,7 @@ class TemporalDistribution(object): else: # print self.hourly_profile profile = None - settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', timeit.default_timer() - st_time, level=3) return profile @@ -611,7 +611,7 @@ class TemporalDistribution(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() if self.daily_profile_id is not None: df = pd.read_csv(self.daily_profile_path) @@ -629,7 +629,7 @@ class TemporalDistribution(object): else: profile = None - settings.write_time('TemporalDistribution', 'get_temporal_daily_profile', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'get_temporal_daily_profile', timeit.default_timer() - st_time, level=3) return profile @@ -647,12 +647,12 @@ class TemporalDistribution(object): :return: Rebalance factor to be sum to the daily factor. :rtype: float """ - st_time = gettime() + st_time = timeit.default_timer() weekdays = self.calculate_weekdays(date) rebalance_factor = self.calculate_weekday_factor_full_month(profile, weekdays) - settings.write_time('TemporalDistribution', 'calculate_rebalance_factor', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_rebalance_factor', timeit.default_timer() - st_time, level=3) return rebalance_factor @@ -665,7 +665,7 @@ class TemporalDistribution(object): :param weekdays: :return: """ - st_time = gettime() + st_time = timeit.default_timer() weekdays_factors = 0 num_days = 0 @@ -673,7 +673,7 @@ class TemporalDistribution(object): weekdays_factors += profile[day] * weekdays[day] num_days += weekdays[day] - settings.write_time('TemporalDistribution', 'calculate_weekday_factor_full_month', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_weekday_factor_full_month', timeit.default_timer() - st_time, level=3) return (num_days - weekdays_factors) / num_days @@ -681,7 +681,7 @@ class TemporalDistribution(object): def calculate_weekdays(date): from calendar import monthrange, weekday, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY - st_time = gettime() + st_time = timeit.default_timer() weekdays = [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY] days = [weekday(date.year, date.month, d + 1) for d in xrange(monthrange(date.year, date.month)[1])] @@ -692,7 +692,7 @@ class TemporalDistribution(object): count += 1 - settings.write_time('TemporalDistribution', 'calculate_weekdays', gettime() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_weekdays', timeit.default_timer() - st_time, level=3) return weekdays_dict @staticmethod @@ -712,7 +712,7 @@ class TemporalDistribution(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\t\tGetting temporal monthly profile id '{0}' from {1} .".format( profile_id, profile_path), level=3) @@ -731,7 +731,7 @@ class TemporalDistribution(object): else: profile = None - settings.write_time('TemporalDistribution', 'get_temporal_monthly_profile', gettime() - st_time, level=2) + settings.write_time('TemporalDistribution', 'get_temporal_monthly_profile', timeit.default_timer() - st_time, level=2) return profile @@ -740,7 +740,7 @@ class TemporalDistribution(object): from datetime import timedelta from calendar import monthrange - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('Calculating time array of {0} time steps starting from {1}.'.format( time_step_num, st_date.strftime('%Y/%m/%d %H:%M:%S'))) @@ -783,7 +783,7 @@ class TemporalDistribution(object): date_aux = date_aux + delta - settings.write_time('TemporalDistribution', 'calculate_delta_hours', gettime() - st_time, level=2) + settings.write_time('TemporalDistribution', 'calculate_delta_hours', timeit.default_timer() - st_time, level=2) return hours_since @@ -804,7 +804,7 @@ class TemporalDistribution(object): # import copy # # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # @@ -860,7 +860,7 @@ class TemporalDistribution(object): # date_aux = date_aux + self.calculate_timedelta(date_aux) # # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # # return data_to_fill # @@ -882,7 +882,7 @@ class TemporalDistribution(object): # import copy # # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # @@ -942,7 +942,7 @@ class TemporalDistribution(object): # count += 1 # # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # # # @profile # def do_time_step(self, date, data, grid): @@ -962,7 +962,7 @@ class TemporalDistribution(object): # import copy # # if settings.log_level_3: - # st_time = gettime() + # st_time = timeit.default_timer() # else: # st_time = None # @@ -983,7 +983,7 @@ class TemporalDistribution(object): # temporal_data = self.calculate_time_step(data, data_empty, date) # # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(gettime() - st_time, 2)) + # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) # return temporal_data # # def calculate_time_step(self, data, data_empty, date): diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py index b6c786b..dea4cd5 100644 --- a/hermesv3_gr/modules/vertical/vertical.py +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -20,7 +20,7 @@ import os import sys -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings @@ -40,7 +40,7 @@ class VerticalDistribution(object): :type vertical_output_profile: str """ def __init__(self, vertical_id, vertical_profile_path, vertical_output_profile): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('\t\tInitializing Vertical.', level=2) self.id = vertical_id @@ -48,7 +48,7 @@ class VerticalDistribution(object): self.output_heights = vertical_output_profile self.vertical_profile = self.get_vertical_profile(vertical_profile_path) - settings.write_time('VerticalDistribution', 'Init', gettime() - st_time, level=2) + settings.write_time('VerticalDistribution', 'Init', timeit.default_timer() - st_time, level=2) def get_vertical_profile(self, path): """ @@ -64,7 +64,7 @@ class VerticalDistribution(object): import pandas as pd import re - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\t\tGetting vertical profile id '{0}' from {1} .".format(self.id, path), level=3) df = pd.read_csv(path, sep=';') @@ -88,7 +88,7 @@ class VerticalDistribution(object): else: return_value = zip(v_profile['layers'], v_profile['weights']) - settings.write_time('VerticalDistribution', 'get_vertical_profile', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'get_vertical_profile', timeit.default_timer() - st_time, level=3) return return_value @@ -105,14 +105,14 @@ class VerticalDistribution(object): """ import pandas as pd - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('Calculating vertical levels from {0} .'.format(path)) df = pd.read_csv(path, sep=';') heights = df.height_magl.values - settings.write_time('VerticalDistribution', 'get_vertical_output_profile', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'get_vertical_output_profile', timeit.default_timer() - st_time, level=3) return heights @@ -131,7 +131,7 @@ class VerticalDistribution(object): :param output_vertical_profile: :return: """ - st_time = gettime() + st_time = timeit.default_timer() output_vertical_profile_aux = [s for s in output_vertical_profile if s >= prev_layer] output_vertical_profile_aux = [s for s in output_vertical_profile_aux if s < layer] @@ -146,7 +146,7 @@ class VerticalDistribution(object): weight_list.append({'index': index, 'weight': weight}) index += 1 - settings.write_time('VerticalDistribution', 'get_weights', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'get_weights', timeit.default_timer() - st_time, level=3) return weight_list @@ -159,7 +159,7 @@ class VerticalDistribution(object): """ import numpy as np - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\tCalculating vertical weights.", level=3) weights = np.zeros(len(self.output_heights)) @@ -171,7 +171,7 @@ class VerticalDistribution(object): prev_layer = layer - settings.write_time('VerticalDistribution', 'calculate_weights', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'calculate_weights', timeit.default_timer() - st_time, level=3) return weights @@ -192,11 +192,11 @@ class VerticalDistribution(object): """ import numpy as np - st_time = gettime() + st_time = timeit.default_timer() data_aux = np.multiply(weights.reshape(weights.shape + (1, 1)), data, dtype=settings.precision) - settings.write_time('VerticalDistribution', 'apply_weights', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'apply_weights', timeit.default_timer() - st_time, level=3) return data_aux @@ -204,13 +204,13 @@ class VerticalDistribution(object): def apply_weights_level(data, weight): import numpy as np - st_time = gettime() + st_time = timeit.default_timer() for emi in data: if emi['data'] is not 0: emi['data'] = emi['data'] * weight - settings.write_time('VerticalDistribution', 'apply_weights_level', gettime() - st_time, level=3) + settings.write_time('VerticalDistribution', 'apply_weights_level', timeit.default_timer() - st_time, level=3) return data diff --git a/hermesv3_gr/modules/vertical/vertical_gfas.py b/hermesv3_gr/modules/vertical/vertical_gfas.py index 14c6fc0..86fbfe8 100644 --- a/hermesv3_gr/modules/vertical/vertical_gfas.py +++ b/hermesv3_gr/modules/vertical/vertical_gfas.py @@ -20,7 +20,7 @@ import os import sys -from timeit import default_timer as gettime +import timeit import hermesv3_gr.config.settings as settings from vertical import VerticalDistribution @@ -37,13 +37,13 @@ class GfasVerticalDistribution(VerticalDistribution): :type approach: str """ def __init__(self, vertical_output_profile, approach, altitude): - st_time = gettime() + st_time = timeit.default_timer() self.altitude = altitude self.output_heights = vertical_output_profile self.approach = approach - settings.write_time('GfasVerticalDistribution', 'Init', gettime() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'Init', timeit.default_timer() - st_time, level=3) return None @@ -58,7 +58,7 @@ class GfasVerticalDistribution(VerticalDistribution): :return: List with the width of each vertical level. :rtype: list """ - st_time = gettime() + st_time = timeit.default_timer() widths = [] for i in xrange(len(heights_list)): @@ -67,7 +67,7 @@ class GfasVerticalDistribution(VerticalDistribution): else: widths.append(heights_list[i] - heights_list[i - 1]) - settings.write_time('GfasVerticalDistribution', 'calculate_widths', gettime() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'calculate_widths', timeit.default_timer() - st_time, level=3) return widths def get_weights(self, heights_list): @@ -80,7 +80,7 @@ class GfasVerticalDistribution(VerticalDistribution): :return: List of the weight to apply to each layer. :rtype: list """ - st_time = gettime() + st_time = timeit.default_timer() weights = [] width_list = self.calculate_widths(heights_list) @@ -100,7 +100,7 @@ class GfasVerticalDistribution(VerticalDistribution): else: weights.append(0.5) - settings.write_time('GfasVerticalDistribution', 'get_weights', gettime() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'get_weights', timeit.default_timer() - st_time, level=3) return weights def apply_approach(self, top_fires): @@ -116,7 +116,7 @@ class GfasVerticalDistribution(VerticalDistribution): """ import numpy as np - st_time = gettime() + st_time = timeit.default_timer() fires = np.zeros(top_fires.shape) for i in xrange(len(self.output_heights)): @@ -125,7 +125,7 @@ class GfasVerticalDistribution(VerticalDistribution): for i_weight in xrange(len(weight_list)): fires[i_weight] += top_fires[i] * weight_list[i_weight] - settings.write_time('GfasVerticalDistribution', 'apply_approach', gettime() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'apply_approach', timeit.default_timer() - st_time, level=3) return fires def do_vertical_interpolation_allocation(self, values, altitude): @@ -143,7 +143,7 @@ class GfasVerticalDistribution(VerticalDistribution): """ import numpy as np - st_time = gettime() + st_time = timeit.default_timer() fire_list = [] aux_var = values @@ -155,7 +155,7 @@ class GfasVerticalDistribution(VerticalDistribution): fire_list.append(aux_data) fire_list = np.array(fire_list).reshape((len(fire_list), values.shape[1], values.shape[2])) - settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation_allocation', gettime() - st_time, + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation_allocation', timeit.default_timer() - st_time, level=3) return fire_list @@ -169,11 +169,11 @@ class GfasVerticalDistribution(VerticalDistribution): :return: Emissions already vertically distributed. :rtype: numpy.ndarray """ - st_time = gettime() + st_time = timeit.default_timer() fire_list = self.apply_approach(values) - settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation', gettime() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation', timeit.default_timer() - st_time, level=3) return fire_list diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index 08e54e0..4fdf4ca 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -18,7 +18,7 @@ # along with HERMESv3_GR. If not, see . -from timeit import default_timer as gettime +import timeit from hermesv3_gr.config import settings import os @@ -46,7 +46,7 @@ class Writer(object): return None def write(self, inventory_list): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log('') settings.write_log("Writing netCDF output file {0} .".format(self.path)) @@ -59,7 +59,7 @@ class Writer(object): else: self.write_serial_netcdf(inventory_list) - settings.write_time('Writer', 'write', gettime() - st_time) + settings.write_time('Writer', 'write', timeit.default_timer() - st_time) return True def create_parallel_netcdf(self): @@ -81,7 +81,7 @@ class Writer(object): return None def set_variable_attributes(self, inventory_list): - st_time = gettime() + st_time = timeit.default_timer() empty_dict = {} for ei in inventory_list: for emi in ei.emissions: @@ -92,7 +92,7 @@ class Writer(object): self.variables_attributes = empty_dict.values() - settings.write_time('Writer', 'set_variable_attributes', gettime() - st_time, level=3) + settings.write_time('Writer', 'set_variable_attributes', timeit.default_timer() - st_time, level=3) return True @@ -105,10 +105,10 @@ class Writer(object): :param shape: :return: """ - from timeit import default_timer as gettime + import timeit import numpy as np - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\t\t\t\tGetting data for '{0}' pollutant.".format(variable), level=3) data = None @@ -117,7 +117,7 @@ class Writer(object): for emission in ei.emissions: if emission['name'] == variable: if emission['data'] is not 0: - vertical_time = gettime() + vertical_time = timeit.default_timer() if ei.source_type == 'area': if ei.vertical_factors is not None: aux_data = emission['data'][np.newaxis, :, :] * ei.vertical_factors[:, np.newaxis, np.newaxis] @@ -132,24 +132,24 @@ class Writer(object): aux_data[ei.location['layer'], ei.location['FID']] = emission['data'] aux_data = aux_data.reshape((shape[1], shape[2], shape[3])) - settings.write_time('VerticalDistribution', 'calculate_data_by_var', gettime() - vertical_time, + settings.write_time('VerticalDistribution', 'calculate_data_by_var', timeit.default_timer() - vertical_time, level=2) del emission['data'] - temporal_time = gettime() + temporal_time = timeit.default_timer() if data is None: data = np.zeros(shape) if ei.temporal_factors is not None: data += aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] else: data += aux_data[np.newaxis, :, :, :] - settings.write_time('TemporalDistribution', 'calculate_data_by_var', gettime() - temporal_time, + settings.write_time('TemporalDistribution', 'calculate_data_by_var', timeit.default_timer() - temporal_time, level=2) # Unit changes data = self.unit_change(variable, data) if data is not None: data[data < 0] = 0 - settings.write_time('Writer', 'calculate_data_by_var', gettime() - st_time, level=3) + settings.write_time('Writer', 'calculate_data_by_var', timeit.default_timer() - st_time, level=3) return data def unit_change(self, variable, data): @@ -160,7 +160,7 @@ class Writer(object): @staticmethod def calculate_displacements(counts): - st_time = gettime() + st_time = timeit.default_timer() new_list = [0] accum = 0 @@ -168,13 +168,13 @@ class Writer(object): accum += counter new_list.append(accum) - settings.write_time('Writer', 'calculate_displacements', gettime() - st_time, level=3) + settings.write_time('Writer', 'calculate_displacements', timeit.default_timer() - st_time, level=3) return new_list @staticmethod def tuple_to_index(tuple_list, bidimensional=False): from operator import mul - st_time = gettime() + st_time = timeit.default_timer() new_list = [] for tuple in tuple_list: @@ -182,7 +182,7 @@ class Writer(object): new_list.append(tuple[-1] * tuple[-2]) else: new_list.append(reduce(mul, tuple)) - settings.write_time('Writer', 'tuple_to_index', gettime() - st_time, level=3) + settings.write_time('Writer', 'tuple_to_index', timeit.default_timer() - st_time, level=3) return new_list @staticmethod @@ -225,3 +225,244 @@ class Writer(object): raise AttributeError("The desired '{0}' output model is not available. ".format(output_model) + "Only accepted 'MONARCH, CMAQ or WRF_CHEM.") sys.exit(1) + + @staticmethod + def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + RegularLatLon=False, + Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, + Mercator=False, lat_ts=None): + from netCDF4 import Dataset + from cf_units import Unit, encode_time + + if not (RegularLatLon or LambertConformalConic or Rotated or Mercator): + RegularLatLon = True + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") + + # ===== Dimensions ===== + if RegularLatLon: + var_dim = ('lat', 'lon',) + + # Latitude + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lat',) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) + lat_dim = ('lon', 'lat',) + else: + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) + sys.exit(1) + + # Longitude + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) + lon_dim = ('lon',) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) + lon_dim = ('lon', 'lat',) + else: + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format( + len(center_longitudes.shape)) + sys.exit(1) + elif Rotated: + var_dim = ('rlat', 'rlon',) + + # Rotated Latitude + if rotated_lats is None: + print 'ERROR: For rotated grids is needed the rotated latitudes.' + sys.exit(1) + netcdf.createDimension('rlat', len(rotated_lats)) + lat_dim = ('rlat', 'rlon',) + + # Rotated Longitude + if rotated_lons is None: + print 'ERROR: For rotated grids is needed the rotated longitudes.' + sys.exit(1) + netcdf.createDimension('rlon', len(rotated_lons)) + lon_dim = ('rlat', 'rlon',) + elif LambertConformalConic or Mercator: + var_dim = ('y', 'x',) + + netcdf.createDimension('y', len(lcc_y)) + lat_dim = ('y', 'x',) + + netcdf.createDimension('x', len(lcc_x)) + lon_dim = ('y', 'x',) + + # Levels + if levels is not None: + netcdf.createDimension('lev', len(levels)) + + # Bounds + if boundary_latitudes is not None: + # print boundary_latitudes.shape + # print len(boundary_latitudes[0, 0]) + try: + netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) + except TypeError: + netcdf.createDimension('nv', boundary_latitudes.shape[1]) + + # sys.exit() + + # Time + netcdf.createDimension('time', None) + + # ===== Variables ===== + # Time + if date is None: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + time.units = "months since 2000-01-01 00:00:00" + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = [0.] + else: + time = netcdf.createVariable('time', 'd', ('time',), zlib=True) + u = Unit('hours') + # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) + # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') + time.units = str( + u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.standard_name = "time" + time.calendar = "gregorian" + time.long_name = "time" + time[:] = hours + + # Latitude + lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + lats[:] = center_latitudes + + if boundary_latitudes is not None: + lats.bounds = "lat_bnds" + lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) + # print lat_bnds[:].shape, boundary_latitudes.shape + lat_bnds[:] = boundary_latitudes + + # Longitude + lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + # print 'lons:', lons[:].shape, center_longitudes.shape + lons[:] = center_longitudes + if boundary_longitudes is not None: + lons.bounds = "lon_bnds" + lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) + # print lon_bnds[:].shape, boundary_longitudes.shape + lon_bnds[:] = boundary_longitudes + + if Rotated: + # Rotated Latitude + rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) + rlat.long_name = "latitude in rotated pole grid" + rlat.units = Unit("degrees").symbol + rlat.standard_name = "grid_latitude" + rlat[:] = rotated_lats + + # Rotated Longitude + rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) + rlon.long_name = "longitude in rotated pole grid" + rlon.units = Unit("degrees").symbol + rlon.standard_name = "grid_longitude" + rlon[:] = rotated_lons + if LambertConformalConic or Mercator: + x = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x.units = Unit("km").symbol + x.long_name = "x coordinate of projection" + x.standard_name = "projection_x_coordinate" + x[:] = lcc_x + + y = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y.units = Unit("km").symbol + y.long_name = "y coordinate of projection" + y.standard_name = "projection_y_coordinate" + y[:] = lcc_y + + cell_area_dim = var_dim + # Levels + if levels is not None: + var_dim = ('lev',) + var_dim + lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) + lev.units = Unit("m").symbol + lev.positive = 'up' + lev[:] = levels + + # All variables + if len(data_list) is 0: + var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) + var[:] = 0 + for variable in data_list: + # print ('time',) + var_dim + var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) + var.units = Unit(variable['units']).symbol + if 'long_name' in variable: + var.long_name = str(variable['long_name']) + if 'standard_name' in variable: + var.standard_name = str(variable['standard_name']) + if 'cell_method' in variable: + var.cell_method = str(variable['cell_method']) + var.coordinates = "lat lon" + if cell_area is not None: + var.cell_measures = 'area: cell_area' + if RegularLatLon: + var.grid_mapping = 'crs' + elif Rotated: + var.grid_mapping = 'rotated_pole' + elif LambertConformalConic: + var.grid_mapping = 'Lambert_conformal' + elif Mercator: + var.grid_mapping = 'mercator' + try: + var[:] = variable['data'] + except: + print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) + + # Grid mapping + if RegularLatLon: + # CRS + mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + elif Rotated: + # Rotated pole + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = 'rotated_latitude_longitude' + mapping.grid_north_pole_latitude = north_pole_lat + mapping.grid_north_pole_longitude = north_pole_lon + elif LambertConformalConic: + # CRS + mapping = netcdf.createVariable('Lambert_conformal', 'i') + mapping.grid_mapping_name = "lambert_conformal_conic" + mapping.standard_parallel = lat_1_2 + mapping.longitude_of_central_meridian = lon_0 + mapping.latitude_of_projection_origin = lat_0 + elif Mercator: + # Mercator + mapping = netcdf.createVariable('mercator', 'i') + mapping.grid_mapping_name = "mercator" + mapping.longitude_of_projection_origin = lon_0 + mapping.standard_parallel = lat_ts + + # Cell area + if cell_area is not None: + c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) + c_area.long_name = "area of the grid cell" + c_area.standard_name = "cell_area" + c_area.units = Unit("m2").symbol + # print c_area[:].shape, cell_area.shape + c_area[:] = cell_area + + if global_attributes is not None: + netcdf.setncatts(global_attributes) + + netcdf.close() diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 2eefb26..7b23615 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -19,7 +19,7 @@ from hermesv3_gr.modules.writing.writer import Writer -from timeit import default_timer as gettime +import timeit from hermesv3_gr.config import settings import os import sys @@ -394,7 +394,7 @@ class WriterCmaq(Writer): netcdf.close() def create_parallel_netcdf(self): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tCreating parallel NetCDF file.", level=2) # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) netcdf = Dataset(self.path, mode='w', format="NETCDF4") @@ -447,10 +447,10 @@ class WriterCmaq(Writer): netcdf.close() - settings.write_time('WriterCmaq', 'create_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tAppending data to parallel NetCDF file.", level=2) if settings.size > 1: @@ -474,10 +474,10 @@ class WriterCmaq(Writer): settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) netcdf.close() - settings.write_time('WriterCmaq', 'write_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_serial_netcdf(self, emission_list): - st_time = gettime() + st_time = timeit.default_timer() mpi_numpy = False mpi_vector = True @@ -579,7 +579,7 @@ class WriterCmaq(Writer): data = np.concatenate(data, axis=3) except: data = 0 - st_time = gettime() + st_time = timeit.default_timer() index += 1 var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) @@ -608,7 +608,7 @@ class WriterCmaq(Writer): sys.exit(1) elif mpi_vector: - var_time = gettime() + var_time = timeit.default_timer() # data_list = []#np.empty(shape, dtype=np.float64) @@ -635,5 +635,5 @@ class WriterCmaq(Writer): netcdf.setncattr(attribute, global_attributes[attribute]) netcdf.close() - settings.write_time('WriterCmaq', 'write_serial_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterCmaq', 'write_serial_netcdf', timeit.default_timer() - st_time, level=3) return True diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index e45fc6d..f823928 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -19,7 +19,7 @@ from hermesv3_gr.modules.writing.writer import Writer -from timeit import default_timer as gettime +import timeit from hermesv3_gr.config import settings import os import sys @@ -39,7 +39,7 @@ class WriterMonarch(Writer): def unit_change(self, variable, data): from cf_units import Unit - st_time = gettime() + st_time = timeit.default_timer() if data is not None: units = None @@ -58,13 +58,13 @@ class WriterMonarch(Writer): raise TypeError("The unit '{0}' of specie {1} is not defined correctly. ".format(units, variable) + "Should be 'mol.s-1.m-2' or 'kg.s-1.m-2'") sys.exit(1) - settings.write_time('WriterMonarch', 'unit_change', gettime() - st_time, level=3) + settings.write_time('WriterMonarch', 'unit_change', timeit.default_timer() - st_time, level=3) return data def create_parallel_netcdf(self): from cf_units import Unit, encode_time - st_time = gettime() + st_time = timeit.default_timer() RegularLatLon = False Rotated = False @@ -332,11 +332,11 @@ class WriterMonarch(Writer): netcdf.close() - settings.write_time('WriterMonarch', 'create_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterMonarch', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tAppending data to parallel NetCDF file.", level=2) if settings.size > 1: @@ -349,7 +349,7 @@ class WriterMonarch(Writer): for variable in self.variables_attributes: data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) - st_time = gettime() + st_time = timeit.default_timer() index += 1 var = netcdf.variables[variable['name']] @@ -367,12 +367,12 @@ class WriterMonarch(Writer): c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area netcdf.close() - settings.write_time('WriterMonarch', 'write_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterMonarch', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_serial_netcdf(self, emission_list,): from cf_units import Unit, encode_time - st_time = gettime() + st_time = timeit.default_timer() mpi_numpy = False mpi_vector = True @@ -747,5 +747,5 @@ class WriterMonarch(Writer): netcdf.setncatts(self.global_attributes) if settings.rank == 0: netcdf.close() - settings.write_time('WriterMonarch', 'write_serial_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterMonarch', 'write_serial_netcdf', timeit.default_timer() - st_time, level=3) diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index c9b4490..b42a0db 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -19,7 +19,7 @@ from hermesv3_gr.modules.writing.writer import Writer -from timeit import default_timer as gettime +import timeit from hermesv3_gr.config import settings import os import sys @@ -260,7 +260,7 @@ class WriterWrfChem(Writer): return str_out def create_parallel_netcdf(self): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tCreating parallel NetCDF file.", level=2) netcdf = Dataset(self.path, mode='w', format="NETCDF4") @@ -302,10 +302,10 @@ class WriterWrfChem(Writer): netcdf.close() - settings.write_time('WriterCmaq', 'create_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): - st_time = gettime() + st_time = timeit.default_timer() settings.write_log("\tAppending data to parallel NetCDF file.", level=2) if settings.size > 1: @@ -329,10 +329,10 @@ class WriterWrfChem(Writer): settings.write_log("\t\t\t'{0}' variable filled".format(var_name)) netcdf.close() - settings.write_time('WriterCmaq', 'write_parallel_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_serial_netcdf(self, emission_list): - st_time = gettime() + st_time = timeit.default_timer() # Gathering the index rank_position = np.array( @@ -406,13 +406,13 @@ class WriterWrfChem(Writer): data = np.concatenate(data, axis=3) except: data = 0 - st_time = gettime() + st_time = timeit.default_timer() index += 1 var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), zlib=self.compress) var.setncatts(self.variables_attributes[var_name]) - var_time = gettime() + var_time = timeit.default_timer() # data_list = []#np.empty(shape, dtype=np.float64) @@ -437,7 +437,7 @@ class WriterWrfChem(Writer): netcdf.setncattr(attribute, global_attributes[attribute]) netcdf.close() - settings.write_time('WriterWrfChem', 'write_serial_netcdf', gettime() - st_time, level=3) + settings.write_time('WriterWrfChem', 'write_serial_netcdf', timeit.default_timer() - st_time, level=3) return True diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py index 4adbd9b..d094233 100644 --- a/hermesv3_gr/tools/netcdf_tools.py +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -18,13 +18,14 @@ # along with HERMESv3_GR. If not, see . -import os -from timeit import default_timer as gettime import sys -import numpy as np from netCDF4 import Dataset from mpi4py import MPI -from hermesv3_gr.config import settings + +icomm = MPI.COMM_WORLD +comm = icomm.Split(color=0, key=0) +rank = comm.Get_rank() +size = comm.Get_size() def open_netcdf(netcdf_path): @@ -58,7 +59,7 @@ def get_grid_area(filename): return grid_area -def extract_vars(netcdf_path, variables_list, attributes_list=[]): +def extract_vars(netcdf_path, variables_list, attributes_list=list()): data_list = [] # print netcdf_path netcdf = Dataset(netcdf_path, mode='r') @@ -81,7 +82,6 @@ def extract_vars(netcdf_path, variables_list, attributes_list=[]): return data_list - def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, @@ -89,29 +89,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, Mercator=False, lat_ts=None): - """ - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ from cf_units import Unit, encode_time if not (RegularLatLon or LambertConformalConic or Rotated or Mercator): @@ -342,63 +320,44 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.close() -def write_netcdf_parallel(netcdf_path, grid, data_list, + +def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, RegularLatLon=False, Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ from cf_units import Unit, encode_time + import sys + from netCDF4 import Dataset + import numpy as np if not (RegularLatLon or LambertConformalConic or Rotated): RegularLatLon = True - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") # ===== Dimensions ===== if RegularLatLon: var_dim = ('lat', 'lon',) # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + if len(center_latitudes.shape) == 1: + netcdf.createDimension('lat', center_latitudes.shape[0]) lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) + elif len(center_latitudes.shape) == 2: + netcdf.createDimension('lat', center_latitudes.shape[0]) lat_dim = ('lon', 'lat', ) else: print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) sys.exit(1) # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) + if len(center_longitudes.shape) == 1: + netcdf.createDimension('lon', center_longitudes.shape[0]) lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) + elif len(center_longitudes.shape) == 2: + netcdf.createDimension('lon', center_longitudes.shape[1]) lon_dim = ('lon', 'lat', ) else: print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) @@ -461,8 +420,7 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, time.standard_name = "time" time.calendar = "gregorian" time.long_name = "time" - if rank == 0: - time[:] = hours + time[:] = hours # Latitude lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) @@ -470,31 +428,27 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, lats.axis = "Y" lats.long_name = "latitude coordinate" lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes + lats[:] = center_latitudes if boundary_latitudes is not None: lats.bounds = "lat_bnds" lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes + lat_bnds[:] = boundary_latitudes # Longitude lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) + lons.units = "degrees_east" lons.axis = "X" lons.long_name = "longitude coordinate" lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - + lons[:] = center_longitudes if boundary_longitudes is not None: lons.bounds = "lon_bnds" lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes + lon_bnds[:] = boundary_longitudes if Rotated: # Rotated Latitude @@ -502,30 +456,26 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats + rlat[:] = rotated_lats # Rotated Longitude rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons + rlon[:] = rotated_lons if LambertConformalConic: x = netcdf.createVariable('x', 'd', ('x',), zlib=True) x.units = Unit("km").symbol x.long_name = "x coordinate of projection" x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x + x[:] = lcc_x y = netcdf.createVariable('y', 'd', ('y',), zlib=True) y.units = Unit("km").symbol y.long_name = "y coordinate of projection" y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y + y[:] = lcc_y cell_area_dim = var_dim # Levels @@ -534,16 +484,15 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) lev.units = Unit("m").symbol lev.positive = 'up' - if rank == 0: - lev[:] = levels + lev[:] = levels # All variables if len(data_list) is 0: var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) var[:] = 0 for variable in data_list: - print "Rank {0} var: {1}".format(rank, variable['name']) # print ('time',) + var_dim + # print variable var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) var.units = Unit(variable['units']).symbol if 'long_name' in variable: @@ -561,11 +510,14 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, var.grid_mapping = 'rotated_pole' elif LambertConformalConic: var.grid_mapping = 'Lambert_conformal' + # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] + # print var[:].shape, variable['data'].shape, variable['data'].max() + shape = None + exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) + # exit() + print shape + var[:] = np.zeros(shape) # Grid mapping if RegularLatLon: @@ -595,3159 +547,11 @@ def write_netcdf_parallel(netcdf_path, grid, data_list, c_area.standard_name = "cell_area" c_area.units = Unit("m2").symbol # print c_area[:].shape, cell_area.shape - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area + c_area[:] = cell_area if global_attributes is not None: netcdf.setncatts(global_attributes) - - netcdf.close() - - -def write_simple_netcdf_parallel(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - from cf_units import Unit, encode_time - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - - # ===== Dimensions ===== - var_dim = ('lat', 'lon',) - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - # lat_dim = ('lat',) - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - # lon_dim = ('lon',) - netcdf.createDimension('lev', len(levels)) - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - time = netcdf.createVariable('time', 'd', ('time',))#, zlib=True) - u = Unit('hours') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', 'lat')# , zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - - # Longitude - lons = netcdf.createVariable('lon', 'f', 'lon')# , zlib=True) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - cell_area_dim = var_dim - # Levels - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',))# , zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - - index = 0 - for variable in data_list: - index += 1 - print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index,len(data_list)) - - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim) - # chunksizes=(1, len(levels), - # grid.x_upper_bound - grid.x_lower_bound, - # grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) - var.units = Unit(variable['units']).symbol - # print "Rank {0} in: {1}, out: {2}".format(rank, variable['data'].shape, var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape) - netcdf.close() - netcdf = Dataset(netcdf_path, mode='r+', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index,len(data_list)) - - var = netcdf.variables[variable['name']] - if index == 8: - pass - # print variable['data'].shape, var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape - else: - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - netcdf.close() - -def write_chunked_netcdf_parallel(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, - chunksizes=(1, len(levels), - grid.x_upper_bound - grid.x_lower_bound, - grid.y_upper_bound - grid.y_lower_bound), zlib=True) - var.units = Unit(variable['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - var.coordinates = "lat lon" - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - settings.comm.Barrier() - - print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -def write_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim)#, zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',))#, zlib=True) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim)#, zlib=True) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',))#, zlib=True) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',))#, zlib=True) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',))#, zlib=True) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',))#, zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',))#, zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',))#, zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim)#, zlib=True) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, - chunksizes=(1, len(levels), - grid.x_upper_bound - grid.x_lower_bound, - grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) - var.units = Unit(variable['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - var.coordinates = "lat lon" - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - settings.comm.Barrier() - - print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -def only_create_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - compressed = False - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, - chunksizes=(1, len(levels), - grid.x_upper_bound - grid.x_lower_bound, - grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) - # print '1' - - var.units = Unit(variable['units']).symbol - # print '2' - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - # print '3' - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - # print '4' - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - # print '5' - var.coordinates = "lat lon" - # print '6' - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # print '8' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -def only_write_chunked_uncompressed_netcdf_parallel(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='r+', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - # print "Rank {0} writing var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - - var = netcdf.variables[variable['name']] - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - # print "Rank {0} ---> 2".format(rank) - # settings.comm.Barrier() - print "TIME -> VarWritten Rank {0} {1} {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - c_area = netcdf.variables['cell_area'] - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - netcdf.close() - -def write_netcdf_serie(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - netcdf.createDimension('time', None) - # netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim)#, zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',))#, zlib=True) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim)#, zlib=True) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',))#, zlib=True) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',))#, zlib=True) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',))#, zlib=True) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',))#, zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',))#, zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',))#, zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim)#, zlib=True) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - print "Rank {0} var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, - chunksizes=(1, len(levels), - grid.x_upper_bound - grid.x_lower_bound, - grid.y_upper_bound - grid.y_lower_bound))# , zlib=True) - var.units = Unit(variable['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - var.coordinates = "lat lon" - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - print "Rank {0} var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - - -def gather_netcdf(file_list, output_path, common_vars=[]): - c_lats, c_lons, levs = extract_vars(file_list[0], ['lat', 'lon', 'lev']) - - from timeit import default_timer as get_time - - st_time = get_time() - data_list = [] - - for file in file_list: - nc_in = Dataset(file, mode='r') - # variables = nc_in.variables - for i, var in nc_in.variables.iteritems(): - if var.name not in common_vars: - dict_aux = \ - { - 'name': var.name, - 'data': var[:], - 'units': var.units, - } - data_list.append(dict_aux) - - # print type(variables) - # print variables.popitem() - # OrderedDict. - nc_in.close() - write_netcdf(output_path, c_lats['data'], c_lons['data'], data_list, levels=levs['data']) - - print 'Time of gattering netcdf: {0}'.format(round(get_time() - st_time, 2)) - -# @profile -def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - from cf_units import Unit, encode_time - import sys - from netCDF4 import Dataset - import numpy as np - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(center_latitudes.shape) == 1: - netcdf.createDimension('lat', center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(center_latitudes.shape) == 2: - netcdf.createDimension('lat', center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(center_longitudes.shape) == 1: - netcdf.createDimension('lon', center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(center_longitudes.shape) == 2: - netcdf.createDimension('lon', center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - netcdf.createDimension('time', None) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',), zlib=True) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',), zlib=True) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - lats[:] = center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) - # print lat_bnds[:].shape, boundary_latitudes.shape - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) - - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - lons[:] = center_longitudes - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) - # print lon_bnds[:].shape, boundary_longitudes.shape - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) - var[:] = 0 - for variable in data_list: - # print ('time',) + var_dim - # print variable - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) - var.units = Unit(variable['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - var.coordinates = "lat lon" - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours - # if variable['data'] is not 0: - # print var[:].shape, variable['data'].shape, variable['data'].max() - shape = None - exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) - # exit() - print shape - var[:] = np.zeros(shape) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - c_area[:] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - return netcdf - - -def create_netcdf_close(netcdf_path, center_latitudes, center_longitudes, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - from cf_units import Unit, encode_time - import sys - from netCDF4 import Dataset - import numpy as np - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(center_latitudes.shape) == 1: - netcdf.createDimension('lat', center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(center_latitudes.shape) == 2: - netcdf.createDimension('lat', center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(center_longitudes.shape) == 1: - netcdf.createDimension('lon', center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(center_longitudes.shape) == 2: - netcdf.createDimension('lon', center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - netcdf.createDimension('time', None) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',), zlib=True) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',), zlib=True) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=True) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - lats[:] = center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=True) - # print lat_bnds[:].shape, boundary_latitudes.shape - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=True) - - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - lons[:] = center_longitudes - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=True) - # print lon_bnds[:].shape, boundary_longitudes.shape - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=True) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=True) - lev.units = Unit("m").symbol - lev.positive = 'up' - lev[:] = levels - - # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=True) - var[:] = 0 - for variable in data_list: - # print ('time',) + var_dim - # print variable - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=True) - var.units = Unit(variable['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - var.coordinates = "lat lon" - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours - # if variable['data'] is not 0: - # print var[:].shape, variable['data'].shape, variable['data'].max() - shape = None - exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) - # exit() - var[:] = np.zeros(shape) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - c_area[:] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - netcdf.close - return True - - - - - -def fill_netcdf(tstep, nc, data): - import numpy as np - print tstep - - print 'Filling time step number {0}'.format(tstep) - - for pollutant in data: - # print pollutant - var = nc.variables[pollutant['name']] - # print pollutant['data'].shape - # print var.shape - # print 'isnan:', np.isnan(var[:].sum()) - # print 'var:', var[:].sum(), type(var[:].sum()) - # exit() - if pollutant['data'] is not 0: - if pollutant['data'].shape[1] is 1: - var[tstep, 0, :, :] += pollutant['data'][0, :, :] - else: - print var[tstep, :, :, :].shape, pollutant['data'].shape - var[tstep, :, :, :] += pollutant['data'][0, :, :, :] - # print var[:].shape - - -def fill_netcdf_level_tstep(nc, tstep, level, data): - # TODO Documentation - import numpy as np - from timeit import default_timer as gettime - - st_time = gettime() - - # print nc - print '\t\t\tFilling time step number {0}, level {1}'.format(tstep, level) - for pollutant in data: - if pollutant['data'] is not 0: - # Condition to do anything if is an empty layer - if float(round(pollutant['data'].sum(), 5)) != float(round(0, 5)): - var = nc.variables[pollutant['name']] - # print 'SUUUUUUUUUUUUUUM 0', var[:].flatten()[4425], pollutant['name'], tstep, level - # TODO differentiate this two conditions - if pollutant['data'].shape[1] is 1: - var[tstep, level, :, :] = pollutant['data'][0, 0, :, :] - else: - var[tstep, level, :, :] = pollutant['data'][0, :, :] - # print 'SUUUUUUUUUUUUUUM 1', var[:].flatten()[4425], pollutant['name'], tstep, level - - print 'TIME -> fill_netcdf_level_tstep: {0} s'.format(round(gettime() - st_time, 2)) - - - - - -def only_create_netcdf_old(netcdf_path, grid, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - compressed = False - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - # print 'DATA LIIIIST {0}'.format(data_list) -# # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) - # chunksizes=(1, len(levels), - # grid.x_upper_bound - grid.x_lower_bound, - # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) - # print '1' - - var.units = Unit(variable['units']).symbol - # print '2' - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - # print '3' - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - # print '4' - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - # print '5' - var.coordinates = "lat lon" - # print '6' - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - - - # print '8' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - # print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -def only_create_netcdf(netcdf_path, grid, data_list, levels=None, date=None, hours=None, global_attributes=None): - from cf_units import Unit, encode_time - - # boundary_latitudes=self.grid.boundary_latitudes, - # boundary_longitudes=self.grid.boundary_longitudes, - # cell_area=self.grid.cell_area, - # RegularLatLon=True - - compressed = False - RegularLatLon = False - Rotated = False - LambertConformalConic = False - if grid.grid_type == 'global': - RegularLatLon = True - elif grid.grid_type == 'rotated': - Rotated = True - elif grid.grid_type == 'lcc': - LambertConformalConic = True - - # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - # print 'NETCDF PATH: {0}'.format(netcdf_path) - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if grid.rlat is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(grid.rlat)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if grid.rlon is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(grid.rlon)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if grid.boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(grid.boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if settings.rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if settings.rank == 0: - lats[:] = grid.center_latitudes - - if grid.boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) - # print lat_bnds[:].shape, boundary_latitudes.shape - if settings.rank == 0: - lat_bnds[:] = grid.boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if settings.rank == 0: - lons[:] = grid.center_longitudes - - if grid.boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) - # print lon_bnds[:].shape, boundary_longitudes.shape - if settings.rank == 0: - lon_bnds[:] = grid.boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if settings.rank == 0: - rlat[:] = grid.rlat - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if settings.rank == 0: - rlon[:] = grid.rlon - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if settings.rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if settings.rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) - lev.units = Unit("m").symbol - lev.positive = 'up' - if settings.rank == 0: - lev[:] = levels - # print 'DATA LIIIIST {0}'.format(data_list) -# # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) - var[:] = 0 - - index = 0 - for variable in data_list: - st_time = gettime() - index += 1 - # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) - # chunksizes=(1, len(levels), - # grid.x_upper_bound - grid.x_lower_bound, - # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) - # print '1' - - var.units = Unit(variable['units']).symbol - # print '2' - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - # print '3' - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - # print '4' - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - # print '5' - var.coordinates = "lat lon" - # print '6' - if grid.cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - - - # print '8' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - # print "Rank {0} created var: {1}; time: {2}".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = grid.new_pole_latitude_degrees - mapping.grid_north_pole_longitude = 90 - grid.new_pole_longitude_degrees - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if grid.cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - - -# @profile -def only_write_netcdf_parallel(netcdf_path, grid, var_names, emision_list): - - # print "Rank {0} 1".format(rank) - - netcdf = Dataset(netcdf_path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - index = 0 - # print "Rank {0} 2".format(rank) - for variable in var_names: - - data = calculate_data_by_var(variable, emision_list, grid.shape) - st_time = gettime() - index += 1 - print "Writing var {1} Rank {0} {2}/{3} ".format(settings.rank, variable, index, len(var_names)) - - var = netcdf.variables[variable] - var.set_collective(True) - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = data - - # print "Rank {0} ---> 2".format(rank) - # settings.comm.Barrier() - print "TIME -> Written var {1} Rank {0} {2} s\n".format(settings.rank, variable, round(gettime() - st_time, 2)) - - if grid.cell_area is not None: - c_area = netcdf.variables['cell_area'] - c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = grid.cell_area - - netcdf.close() - - -def serial_netcdf(netcdf_path, grid, data_list, emision_list, levels=None, date=None, hours=None, global_attributes=None): - from cf_units import Unit, encode_time - - mpi_numpy = False - mpi_vector = True - - # Gathering the index - if mpi_numpy or mpi_vector: - rank_position = np.array([grid.x_lower_bound, grid.x_upper_bound, grid.y_lower_bound, grid.y_upper_bound], dtype='i') - full_position = None - if settings.rank == 0: - full_position = np.empty([settings.size, 4], dtype='i') - settings.comm.Gather(rank_position, full_position, root=0) - # print 'Rank {0} recv: {1} '.format(settings.rank, full_position) - # exit() - # if rank != 0: - # - # for variable in data_list: - # rank_data = calculate_data_by_var(variable['name'], emision_list, grid.shape) - # settings.comm.gather(rank_data, root=0) - - if settings.rank == 0: - compressed = True - - RegularLatLon = False - Rotated = False - LambertConformalConic = False - - LambertConformalConic = False - if grid.grid_type == 'global': - RegularLatLon = True - elif grid.grid_type == 'rotated': - Rotated = True - elif grid.grid_type == 'lcc': - LambertConformalConic = True - - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - # print 'NETCDF PATH: {0}'.format(netcdf_path) - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if grid.rlat is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(grid.rlat)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if grid.rlon is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(grid.rlon)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if grid.boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(grid.boundary_latitudes[0, 0])) - # sys.exit() - - # Time - # netcdf.createDimension('time', None) - netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - lats[:] = grid.center_latitudes - - if grid.boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) - # print lat_bnds[:].shape, boundary_latitudes.shape - lat_bnds[:] = grid.boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - lons[:] = grid.center_longitudes - - if grid.boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) - # print lon_bnds[:].shape, boundary_longitudes.shape - lon_bnds[:] = grid.boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - rlat[:] = grid.rlat - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - rlon[:] = grid.rlon - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) - lev.units = Unit("m").symbol - lev.positive = 'up' - lev[:] = levels - # print 'DATA LIIIIST {0}'.format(data_list) - # # All variables - if len(data_list) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) - var[:] = 0 - - full_shape = None - index = 0 - for variable in data_list: - rank_data = calculate_data_by_var(variable['name'], emision_list, grid.shape) - if mpi_numpy or mpi_vector: - if rank_data is not None: - root_shape = settings.comm.bcast(rank_data.shape, root=0) - if full_shape is None: - # rank_shape = rank_data.shape - # full_shape = settings.comm.gather(rank_data.shape, root=0) - full_shape = settings.comm.allgather(rank_data.shape) - # print 'Rank {0} full_shape: {1}\n'.format(settings.rank, full_shape) - if mpi_numpy: - if settings.size != 1: - if settings.rank == 0: - recvbuf = np.empty((settings.size,) + rank_data.shape) - else: - recvbuf = None - if root_shape != rank_data.shape: - rank_data_aux = np.empty(root_shape) - rank_data_aux[:, :, :, :-1] = rank_data - rank_data = rank_data_aux - print 'Rank {0} data.shape {1}'.format(settings.rank, rank_data.shape) - settings.comm.Gather(rank_data, recvbuf, root=0) - else: - recvbuf = rank_data - elif mpi_vector: - if rank_data is not None: - counts_i = tuple_to_index(full_shape) - rank_buff = [rank_data, counts_i[settings.rank]] - if settings.rank == 0: - displacements = calculate_displacements(counts_i) - recvdata = np.empty(sum(counts_i), dtype=settings.precision) - else: - displacements = None - recvdata = None - if settings.precision == np.float32: - recvbuf = [recvdata, counts_i, displacements, MPI.FLOAT] - elif settings.precision == np.float64: - recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] - else: - print 'ERROR: precission {0} unknown'.format(settings.precision) - - # print "Rank {0} sendbuf: {1}".format(settings.rank, sendbuf) - - settings.comm.Gatherv(rank_buff, recvbuf, root=0) - - else: - if settings.size != 1: - data = settings.comm.gather(rank_data, root=0) - else: - data = rank_data - - # print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(data_list)) - # print ('time',) + var_dim - # data = calculate_data_by_var(variable['name'], emision_list, grid.shape) - - if settings.rank == 0: - print full_shape - # print recvbuf.shape - if not (mpi_numpy or mpi_vector): - if settings.size != 1: - try: - data = np.concatenate(data, axis=3) - except: - print 'var: {0} data: {1}'.format(variable['name'], data) - data = 0 - # print 'data shape 0 : {0}'.format(np.concatenate(data, axis=0).shape) - # print 'data shape 1 : {0}'.format(np.concatenate(data, axis=1).shape) - # print 'data shape 2 : {0}'.format(np.concatenate(data, axis=2).shape) - # print 'data shape 3 : {0}'.format(np.concatenate(data, axis=3).shape) - # exit() - - # data = fix_gathered_data(data, (len(hours), len(levels), )) - st_time = gettime() - index += 1 - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) - # chunksizes=(1, len(levels), - # grid.x_upper_bound - grid.x_lower_bound, - # grid.y_upper_bound - grid.y_lower_bound), zlib=compressed) - # print '1' - - var.units = Unit(variable['units']).symbol - # print '2' - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - # print '3' - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - # print '4' - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - # print '5' - var.coordinates = "lat lon" - # print '6' - if grid.cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - - - # print '8' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - # - if mpi_numpy: - data = np.ones(var[:].shape, dtype=settings.precision) * 100 - for i in xrange(settings.size): - print i, full_shape[i] - # print recvbuf[i, :, :, :full_shape[i][-2], : full_shape[i][-1]].shape - # print i - if True: - try: - if i == 0: - # print full_position[i] - # data[:, :, :, :full_position[i][3]] = recvbuf[i] - var[:, :, :, :full_position[i][3]] = recvbuf[i] - elif i == settings.size - 1: - # data[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, : full_shape[i][-1]] - # data[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1 ] - var[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1] - else: - # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, : full_shape[i][-1]] - var[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, - : full_shape[i][-1]] - # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :,1:] - # data[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, : full_shape[i][-1]] - except: - print 'ERROR on i {0}'.format(i) - print 'data shape: {0} recvbuf shape {1}'.format(data[:, :, :, full_position[i][2]:].shape, recvbuf[i].shape) - # print 'data shape: {0} recvbuf shape {1}'.format(data[:, :, :, full_position[i][2]:full_position[i][3]].shape, recvbuf[i, :, :, :, : full_shape[i][-1]].shape) - - - # data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :full_shape[i][-2] +1, : full_shape[i][-1]+1] - # var[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :full_shape[i][-2], : full_shape[i][-1]] - # print data[:] - var[:] - # var[:] = data - elif mpi_vector: - var_time = gettime() - - # data_list = []#np.empty(shape, dtype=np.float64) - - if rank_data is not None: - data = np.empty(var[:].shape, dtype=settings.precision) - for i in xrange(settings.size): - # print 'Resizeing {0}'.format(i) - if not i == settings.size - 1: - # print dspls_i[i], dspls_i[i + 1], recvbuf[0][0:9], counts[i] - # data_aux = np.array(recvbuf[0][dspls_i[i]: dspls_i[i + 1]]).reshape(counts[i]) - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) - else: - # data_aux = np.array(recvbuf[0][dspls_i[i]:]).reshape(counts[i]) - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) - # data_list.append(data_aux) - # data = np.concatenate(data_list, axis=1) - else: - data = 0 - print "Var {0} comm time: {1}".format(variable['name'], round(gettime() - var_time, 2)) - var[:] = data - else: - var[:] = data - print "Var {0} writing time: {1}".format(variable['name'], round(gettime() - st_time, 2)) - # exit() - - - if settings.rank == 0: - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = grid.new_pole_latitude_degrees - mapping.grid_north_pole_longitude = 90 - grid.new_pole_longitude_degrees - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - if grid.cell_area is not None: - # counts_i = tuple_to_index(full_shape, bidimensional=True) - # # print grid.cell_area.shape - # rank_buff = [np.ascontiguousarray(grid.cell_area, dtype=np.float32), counts_i[settings.rank]] - # if settings.rank == 0: - # displacements = calculate_displacements(counts_i) - # recvdata = np.empty(sum(counts_i), dtype=settings.precision) - # else: - # displacements = None - # recvdata = None - # recvbuf = [recvdata, counts_i, displacements, MPI.DOUBLE] - # - # # print "Rank {0} sendbuf: {1}".format(settings.rank, sendbuf) - # - # settings.comm.Gatherv(rank_buff, recvbuf, root=0) - - cell_area = settings.comm.gather(grid.cell_area, root=0) - # cell_area = np.concatenate(cell_area, axis=1) - if settings.rank == 0: - # Cell area - if grid.cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - # cell_area = np.zeros(c_area[:].shape, dtype=settings.precision) - # # for i in xrange(settings.size): - # for i in xrange(1): - # print recvbuf[0][:10] - # print np.ascontiguousarray(grid.cell_area, dtype=np.float32).flatten()[:10] - # # print 'Resizeing {0}'.format(i) - # print full_shape[i] - # if not i == settings.size - 1: - # # print dspls_i[i], dspls_i[i + 1], recvbuf[0][0:9], counts[i] - # # data_aux = np.array(recvbuf[0][dspls_i[i]: dspls_i[i + 1]]).reshape(counts[i]) - # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape((full_shape[i][-2], full_shape[i][-1])) - # # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = grid.cell_area - # else: - # # data_aux = np.array(recvbuf[0][dspls_i[i]:]).reshape(counts[i]) - # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape((full_shape[i][-2], full_shape[i][-1])) - # # cell_area[full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = grid.cell_area - # - cell_area = np.concatenate(cell_area, axis=1) - c_area[:] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -def only_create_write_netcdf(netcdf_path, grid, var_names, emision_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): - """ - - :param netcdf_path: - :param center_latitudes: - :param center_longitudes: - :param data_list: - :param levels: - :param boundary_latitudes: - :param boundary_longitudes: - :param cell_area: - - :param global_attributes: - :type global_attributes: dict - - :param RegularLatLon: - :param Rotated: - :param rotated_lats: - :param rotated_lons: - :param north_pole_lat: - :param north_pole_lon: - :param LambertConformalConic: - :return: - """ - from cf_units import Unit, encode_time - - compressed = False - - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True - - netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") - - # ===== Dimensions ===== - if RegularLatLon: - var_dim = ('lat', 'lon',) - - # Latitude - if len(grid.center_latitudes.shape) == 1: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lat',) - elif len(grid.center_latitudes.shape) == 2: - netcdf.createDimension('lat', grid.center_latitudes.shape[0]) - lat_dim = ('lon', 'lat', ) - else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(center_latitudes.shape)) - sys.exit(1) - - # Longitude - if len(grid.center_longitudes.shape) == 1: - netcdf.createDimension('lon', grid.center_longitudes.shape[0]) - lon_dim = ('lon',) - elif len(grid.center_longitudes.shape) == 2: - netcdf.createDimension('lon', grid.center_longitudes.shape[1]) - lon_dim = ('lon', 'lat', ) - else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) - sys.exit(1) - elif Rotated: - var_dim = ('rlat', 'rlon',) - - # Rotated Latitude - if rotated_lats is None: - print 'ERROR: For rotated grids is needed the rotated latitudes.' - sys.exit(1) - netcdf.createDimension('rlat', len(rotated_lats)) - lat_dim = ('rlat', 'rlon',) - - # Rotated Longitude - if rotated_lons is None: - print 'ERROR: For rotated grids is needed the rotated longitudes.' - sys.exit(1) - netcdf.createDimension('rlon', len(rotated_lons)) - lon_dim = ('rlat', 'rlon',) - - elif LambertConformalConic: - var_dim = ('y', 'x',) - - netcdf.createDimension('y', len(lcc_y)) - lat_dim = ('y', 'x', ) - - netcdf.createDimension('x', len(lcc_x)) - lon_dim = ('y', 'x', ) - - # Levels - if levels is not None: - netcdf.createDimension('lev', len(levels)) - - # Bounds - if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) - netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) - # sys.exit() - - # Time - netcdf.createDimension('time', None) - # netcdf.createDimension('time', len(hours)) - - # ===== Variables ===== - # Time - if date is None: - time = netcdf.createVariable('time', 'd', ('time',)) - time.units = "months since 2000-01-01 00:00:00" - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - time[:] = [0.] - else: - time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) - time.standard_name = "time" - time.calendar = "gregorian" - time.long_name = "time" - if rank == 0: - time[:] = hours - - # Latitude - lats = netcdf.createVariable('lat', 'f', lat_dim, zlib=compressed) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if rank == 0: - lats[:] = grid.center_latitudes - - if boundary_latitudes is not None: - lats.bounds = "lat_bnds" - lat_bnds = netcdf.createVariable('lat_bnds', 'f', lat_dim + ('nv',), zlib=compressed) - # print lat_bnds[:].shape, boundary_latitudes.shape - if rank == 0: - lat_bnds[:] = boundary_latitudes - - # Longitude - lons = netcdf.createVariable('lon', 'f', lon_dim, zlib=compressed) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if rank == 0: - lons[:] = grid.center_longitudes - - if boundary_longitudes is not None: - lons.bounds = "lon_bnds" - lon_bnds = netcdf.createVariable('lon_bnds', 'f', lon_dim + ('nv',), zlib=compressed) - # print lon_bnds[:].shape, boundary_longitudes.shape - if rank == 0: - lon_bnds[:] = boundary_longitudes - - if Rotated: - # Rotated Latitude - rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=compressed) - rlat.long_name = "latitude in rotated pole grid" - rlat.units = Unit("degrees").symbol - rlat.standard_name = "grid_latitude" - if rank == 0: - rlat[:] = rotated_lats - - # Rotated Longitude - rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=compressed) - rlon.long_name = "longitude in rotated pole grid" - rlon.units = Unit("degrees").symbol - rlon.standard_name = "grid_longitude" - if rank == 0: - rlon[:] = rotated_lons - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=compressed) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - if rank == 0: - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=compressed) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - if rank == 0: - y[:] = lcc_y - - cell_area_dim = var_dim - # Levels - if levels is not None: - var_dim = ('lev',) + var_dim - lev = netcdf.createVariable('lev', 'f', ('lev',), zlib=compressed) - lev.units = Unit("m").symbol - lev.positive = 'up' - if rank == 0: - lev[:] = levels - # print 'DATA LIIIIST {0}'.format(data_list) -# # All variables - if len(var_names) is 0: - var = netcdf.createVariable('aux_var', 'f', ('time',) + var_dim, zlib=compressed) - var[:] = 0 - - index = 0 - for variable in var_names: - st_time = gettime() - index += 1 - print "Rank {0} creating var: {1}; {2}/{3}".format(rank, variable['name'], index, len(var_names)) - # print ('time',) + var_dim - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=compressed) - # print '1' - - var.units = Unit(variable['units']).symbol - # print '2' - if 'long_name' in variable: - var.long_name = str(variable['long_name']) - # print '3' - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) - # print '4' - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) - # print '5' - var.coordinates = "lat lon" - # print '6' - if cell_area is not None: - var.cell_measures = 'area: cell_area' - if RegularLatLon: - var.grid_mapping = 'crs' - elif Rotated: - var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: - var.grid_mapping = 'Lambert_conformal' - - var.set_collective(True) - - data = calculate_data_by_var(variable['name'], emision_list, grid.shape) - - var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = data - - # print '8' - # if variable['data'] is not 0: - # print '{0} {1}'.format(var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound].shape, variable['data'].shape) - # print variable - - # var[:, :, grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = variable['data'] - - print "Rank {0} created var: {1}; time: {2} s".format(rank, variable['name'], round(gettime() - st_time, 2)) - - # Grid mapping - if RegularLatLon: - # CRS - mapping = netcdf.createVariable('crs', 'i') - mapping.grid_mapping_name = "latitude_longitude" - mapping.semi_major_axis = 6371000.0 - mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole - mapping = netcdf.createVariable('rotated_pole', 'c') - mapping.grid_mapping_name = 'rotated_latitude_longitude' - mapping.grid_north_pole_latitude = north_pole_lat - mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: - # CRS - mapping = netcdf.createVariable('Lambert_conformal', 'i') - mapping.grid_mapping_name = "lambert_conformal_conic" - mapping.standard_parallel = lat_1_2 - mapping.longitude_of_central_meridian = lon_0 - mapping.latitude_of_projection_origin = lat_0 - - # Cell area - if cell_area is not None: - c_area = netcdf.createVariable('cell_area', 'f', cell_area_dim) - c_area.long_name = "area of the grid cell" - c_area.standard_name = "cell_area" - c_area.units = Unit("m2").symbol - # print c_area[:].shape, cell_area.shape - # c_area[grid.x_lower_bound:grid.x_upper_bound, grid.y_lower_bound:grid.y_upper_bound] = cell_area - - if global_attributes is not None: - netcdf.setncatts(global_attributes) - - netcdf.close() - -# @profile -def calculate_data_by_var(variable, inventory_list, shape): - # TODO Documentation - """ - - :param variable: - :param inventory_list: - :param shape: - :return: - """ - from timeit import default_timer as gettime - - st_time = gettime() - # print 'Getting Data' - # print variable, shape - - data = None - # data = np.zeros(shape) - - for ei in inventory_list: - for emission in ei.emissions: - if emission['name'] == variable: - if emission['data'] is not 0: - # print variable - - if ei.vertical_factors is not None: - aux_data = emission['data'] * ei.vertical_factors[:, np.newaxis, np.newaxis] - else: - if not len(emission['data'].shape) == 4: - aux_data = np.zeros((shape[1], shape[2], shape[3])) - aux_data[0, :, :] = emission['data'] - else: - aux_data = emission['data'] - - del emission['data'] - - if ei.temporal_factors is not None: - if data is None: - data = aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] - else: - data += aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] - else: - if data is None: - data = aux_data[np.newaxis, :, :, :] - else: - data += aux_data[np.newaxis, :, :, :] - - print "TIME -> get_data_by_var: Rank {2} {0} -> {1} s\n".format(variable, round(gettime() - st_time, 4), settings.rank) - - return data + return netcdf def tuple_to_index(tuple_list, bidimensional=False): -- GitLab From b76c301a181b03036357ee7a3ac70875dd710882 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 12:33:24 +0200 Subject: [PATCH 08/51] cleaning files --- hermesv3_gr/modules/masking/masking.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py index 744a145..98c113c 100644 --- a/hermesv3_gr/modules/masking/masking.py +++ b/hermesv3_gr/modules/masking/masking.py @@ -21,7 +21,6 @@ import timeit import hermesv3_gr.config.settings as settings import os -import sys from warnings import warn as warning @@ -46,8 +45,6 @@ class Masking(object): settings.write_time('Masking', 'Init', timeit.default_timer() - st_time, level=3) - return None - def get_country_codes(self): import pandas as pd @@ -163,7 +160,7 @@ class Masking(object): values = list(map(str, re.split(' , |, | ,|,', values))) scale_dict = {} for element in values: - element = list(map(str, re.split(' | ', element))) + element = list(map(str, re.split("{0}{0}|{0}".format(' '), element))) scale_dict[int(self.country_codes[element[0]])] = element[1] settings.write_log('\t\t\tApplying scaling factors for {0}.'.format(values), level=3) -- GitLab From c7518b57e274268da2da3cbb7ca43cbfc995d977 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 13:09:09 +0200 Subject: [PATCH 09/51] cleaning files --- hermesv3_gr/modules/regrid/regrid.py | 104 ---------- .../modules/regrid/regrid_conservative.py | 193 ++---------------- 2 files changed, 12 insertions(+), 285 deletions(-) diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py index c26f113..2edbc06 100644 --- a/hermesv3_gr/modules/regrid/regrid.py +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -21,7 +21,6 @@ import os import sys import numpy as np import timeit - import hermesv3_gr.config.settings as settings @@ -62,8 +61,6 @@ class Regrid(object): settings.write_time('Regrid', 'Init', round(timeit.default_timer() - st_time), level=3) - return None - def create_weight_matrix(self): """ This function is not used because all the child classes have to implement it. @@ -162,84 +159,6 @@ class Regrid(object): return dst_field - # @staticmethod - # def reduce_dst_field_test1(dst_field_aux, dst_indices_counts, max_values): - # import numexpr as ne - # import numpy as np - # - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # dst_indices_counts_2 = np.cumsum(dst_indices_counts) - # - # dst_field = np.zeros((dst_field_aux.shape[0], max_values), dtype=settings.precision) - # previous = 0 - # count = 0 - # a = 0 - # # print dst_indices_counts_2 - # for i in dst_indices_counts_2: - # # print count - # # dst_field[:, count] = ne.evaluate('sum(dst_field_aux[:, {0}:{1}], 0)'.format(previous, i, 0), out= a) - # dst_field[:, count] = ne.evaluate ('sum(u)', {'u': dst_field_aux[:, previous:i]}) - # - # count += 1 - # previous = i - # - # if settings.log_level_3: - # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # - # return dst_field - # - # @staticmethod - # def reduce_dst_field_test2(dst_field_aux, dst_indices, max_values): - # import numpy as np - # - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # # print dst_field_aux[0: 10], len(dst_field_aux), dst_field_aux.shape - # # print dst_indices[0: 10], len(dst_indices), dst_indices.shape - # print '1' - # unique, counts = np.unique(dst_indices, return_counts=True) - # print '2' - # counts_2 = np.cumsum(counts) - # print '3' - # - # # print unique[0: 10], len(unique), unique.shape - # # print counts[0: 10], len(counts), counts.shape, type(counts) - # - # dst_field_aux_2 = np.array_split(dst_field_aux, counts_2, axis=1) - # print '4' - # - # print dst_field_aux_2[0: 10], len(dst_field_aux_2), dst_field_aux_2.shape - # - # exit() - # - # dst_indices_counts_2 = np.cumsum(dst_indices_counts) - # print 'max values', max_values - # print dst_indices_counts[0:10], len(dst_indices_counts) - # print dst_indices_counts_2[0:10], len(dst_indices_counts_2) - # exit() - # dst_field = np.zeros((dst_field_aux.shape[0], max_values), dtype=settings.precision) - # previous = 0 - # count = 0 - # # print dst_indices_counts_2 - # for i in dst_indices_counts_2: - # # print count - # dst_field[:, count] = dst_field_aux[:, previous:i].sum(axis=1, dtype=settings.precision) - # - # count += 1 - # previous = i - # - # if settings.log_level_3: - # print 'TIME -> Regrid.reduce_dst_field: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # - # return dst_field - def is_created_weight_matrix(self, erase=False): """ Checks if the weight matrix is created @@ -253,29 +172,6 @@ class Regrid(object): return os.path.exists(self.weight_matrix_file) - # def start_regridding(self, values): - # """ - # Start the regridding process for the emission. - # It will create the weight matrix if it is not already created. - # - # :param values: Input values to regrid. - # :type values: numpy.ndarray - # - # :return: Values already regridded. - # :rtype: numpy.ndarray - # """ - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # values = self.apply_weights(values) - # - # if settings.log_level_3: - # print 'TIME -> Regrid.start_regridding: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # - # return values - if __name__ == '__main__': pass diff --git a/hermesv3_gr/modules/regrid/regrid_conservative.py b/hermesv3_gr/modules/regrid/regrid_conservative.py index c4fdeae..ff8d509 100644 --- a/hermesv3_gr/modules/regrid/regrid_conservative.py +++ b/hermesv3_gr/modules/regrid/regrid_conservative.py @@ -18,7 +18,6 @@ # along with HERMESv3_GR. If not, see . import os -import sys import numpy as np import timeit import ESMF @@ -26,32 +25,9 @@ import ESMF import hermesv3_gr.config.settings as settings from regrid import Regrid -# from mpi4py import MPI -# icomm = MPI.COMM_WORLD -# comm = icomm.Split(color=0, key=0) -# nprocs = comm.Get_size() -# rank = comm.Get_rank() - class ConservativeRegrid(Regrid): - """ - ConservativeRegrid class that contains all the needed information to do the conservative regrid. - - :param input_file: Path to the input emission file. - :type input_file: str - - :param pollutants: Pollutants available of the input file. - :type pollutants: list - - :param weight_matrix_file: Path to the weight matrix that will be created if it is not created yet. - :type weight_matrix_file: str - - :param grid: ... - :type grid: Grid - - :param masking: ... - :type masking: Masking - """ + # TODO Documentation def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): st_time = timeit.default_timer() settings.write_log('\t\tInitializing Conservative regrid.', level=2) @@ -60,8 +36,6 @@ class ConservativeRegrid(Regrid): settings.write_time('ConservativeRegrid', 'Init', timeit.default_timer() - st_time, level=2) - return None - def create_weight_matrix(self): """ Calls to ESMF_RegridWeightGen to generate the weight matrix. @@ -71,16 +45,17 @@ class ConservativeRegrid(Regrid): src_grid = self.grid.create_esmf_grid_from_file(self.pollutant_dicts[0]['path']) src_field = ESMF.Field(src_grid, name='my input field') - src_field.read(filename=self.pollutant_dicts[0]['path'], variable=self.pollutant_dicts[0]['name'], timeslice=0) + src_field.read(filename=self.pollutant_dicts[0]['path'], variable=self.pollutant_dicts[0]['name'], + timeslice=[0]) dst_grid = self.grid.esmf_grid dst_field = ESMF.Field(dst_grid, name='my outut field') - regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.CONSERVE, )#src_mask_values=self.masking.regrid_mask) - # regrid = ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.BILINEAR, )#src_mask_values=self.masking.regrid_mask) + ESMF.Regrid(src_field, dst_field, filename=self.weight_matrix_file, regrid_method=ESMF.RegridMethod.CONSERVE,) settings.write_time('ConservativeRegrid', 'create_weight_matrix', timeit.default_timer() - st_time, level=1) def start_regridding(self, gfas=False, vertical=None): + # TODO Documentation from hermesv3_gr.tools.netcdf_tools import extract_vars st_time = timeit.default_timer() @@ -88,7 +63,7 @@ class ConservativeRegrid(Regrid): weights = self.read_weight_matrix() dst_field_list = [] - num =1 + num = 1 for pollutant_single_dict in self.pollutant_dicts: settings.write_log('\t\tPollutant {0} ({1}/{2})'.format( pollutant_single_dict['name'], num, len(self.pollutant_dicts)), level=3) @@ -100,10 +75,10 @@ class ConservativeRegrid(Regrid): values = vertical.do_vertical_interpolation_allocation(values, vertical.altitude) # Do masking if self.masking.regrid_mask is not None: - values = np.multiply(values, self.masking.regrid_mask, dtype=settings.precision) + values = np.multiply(values, self.masking.regrid_mask) # Do scalling if self.masking.scale_mask is not None: - values = np.multiply(values, self.masking.scale_mask, dtype=settings.precision) + values = np.multiply(values, self.masking.scale_mask) if gfas: values = values.reshape((values.shape[-3], values.shape[-2] * values.shape[-1],)) else: @@ -116,7 +91,7 @@ class ConservativeRegrid(Regrid): src_aux = np.take(values, weights['col'], axis=1) # Apply weights - dst_field_aux = np.multiply(src_aux, weights['S'], dtype=settings.precision) + dst_field_aux = np.multiply(src_aux, weights['S']) # Reduce dst values dst_field = self.reduce_dst_field(dst_field_aux, new_dst_indices, self.grid.shape[-1] * self.grid.shape[-2]) @@ -132,49 +107,6 @@ class ConservativeRegrid(Regrid): settings.write_time('ConservativeRegrid', 'start_regridding', timeit.default_timer() - st_time, level=3) return dst_field_list - # def start_carles_regridding_1(self): - # from hermesv3_gr.tools.netcdf_tools import extract_vars - # - # weights = self.read_weight_matrix() - # print 'Weights Rank {3} col: {0} row: {1} S: {2}'.format(weights['col'].shape, weights['row'].shape, - # weights['S'].shape, settings.rank) - # - # dst_field_list = [] - # for pollutant_single_dict in self.pollutant_dicts: - # if settings.log_level_2: - # print '\t\t {0}'.format(pollutant_single_dict['name']) - # # dst_field = np.zeros(((self.grid.x_upper_bound - self.grid.x_lower_bound)*(self.grid.y_upper_bound - self.grid.y_lower_bound),)) - # dst_field = np.zeros((weights['max'],)) - # - # [values] = extract_vars(pollutant_single_dict['path'], [pollutant_single_dict['name']]) - # values = values['data'].reshape((values['data'].shape[-1]*values['data'].shape[-2],)) - # # print 'VALUES SUM {0}'.format(values.sum()) - # # print dst_field.shape, weights['row'].max() - # # print values.shape - # # exit() - # # print 'Weights Rank {0} dst_field: {1} '.format(rank, dst_field.shape) - # # if rank == 1: - # if True: - # for i in xrange(len(weights['S'])): - # try: - # dst_field[weights['row'][i] - 1] += weights['S'][i] * values[weights['col'][i] - 1] - # # print '{0}/{1} {2}%'.format(i+1, len(weights['S']), round((i+1)*100/len(weights['S'])),2) - # # print values[weights['col'][i] - 1], weights['col'][i] - 1 - # # print dst_field[weights['row'][i] - 1], weights['S'][i], values[weights['col'][i] - 1] - # except: - # print values[weights['col'][i] - 1] - # print weights['S'][i] - # print dst_field[weights['row'][i] - 1] - # dst_field = dst_field.reshape(len(self.grid.center_latitudes),len(self.grid.center_longitudes)) - # dst_field = dst_field[self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] - # - # print 'RESHAPE: {0}'.format((self.grid.x_upper_bound - self.grid.x_lower_bound, self.grid.y_upper_bound - self.grid.y_lower_bound)) - # dst_field_list.append({'data': dst_field, 'name': pollutant_single_dict['name']}) - # - # # dst_field_list.append({'data': dst_field.reshape((self.grid.x_upper_bound - self.grid.x_lower_bound, self.grid.y_upper_bound - self.grid.y_lower_bound)), 'name': pollutant_single_dict['name']}) - # # dst_field_list.append({'data': dst_field.reshape((self.grid.y_upper_bound - self.grid.y_lower_bound,self.grid.x_upper_bound - self.grid.x_lower_bound,)), 'name': pollutant_single_dict['name']}) - # return dst_field_list - def read_weight_matrix(self): from netCDF4 import Dataset dict_aux = {} @@ -196,9 +128,9 @@ class ConservativeRegrid(Regrid): try: if settings.rank != 0: - dict_aux['col'] = dict_aux['col'][index[settings.rank] +1: index[settings.rank + 1] + 1] - dict_aux['row'] = dict_aux['row'][index[settings.rank] +1: index[settings.rank + 1] + 1] - dict_aux['S'] = dict_aux['S'][index[settings.rank] +1: index[settings.rank + 1] + 1] + dict_aux['col'] = dict_aux['col'][index[settings.rank] + 1: index[settings.rank + 1] + 1] + dict_aux['row'] = dict_aux['row'][index[settings.rank] + 1: index[settings.rank + 1] + 1] + dict_aux['S'] = dict_aux['S'][index[settings.rank] + 1: index[settings.rank + 1] + 1] else: dict_aux['col'] = dict_aux['col'][: index[settings.rank + 1] + 1] dict_aux['row'] = dict_aux['row'][: index[settings.rank + 1] + 1] @@ -232,106 +164,6 @@ class ConservativeRegrid(Regrid): time.sleep(5) print "I'm {0}".format(settings.rank), 'Waiting Weight Matrix' - def re_order_weight_matrix(self): - """ - Takes the ESMF Weight Matrix and re-order it in our way to re-write it. - - It will re-order and re-name the variables: - - dst_indices = ordered (row) - src_indices = ordered (col) - weights = ordered (S) - - dst_indices_count: - dst_indices = [0, 0, 0, 1, 1, 1, 2, 2, 2, 2] - dst_indices_count = [ 3, 6, 10] - - """ - from netCDF4 import Dataset - import numpy as np - - if settings.log_level_3: - st_time = timeit.default_timer() - - # Read ESMF Weight matrix NetCDF - nc_weights = Dataset(self.weight_matrix_file, mode='r') - - row = nc_weights.variables['row'][:] - col = nc_weights.variables['col'][:] - s = nc_weights.variables['S'][:] - - nc_weights.close() - - # Re-order and re-name values - indices = np.argsort(row) - dst_indices = np.array(row)[indices] - 1 - weights = np.array(s, dtype=settings.precision)[indices] - src_indices = np.array(col)[indices] - 1 - - # Short indices - # dst_indices = [0, 0, 0, 1, 1, 1, 2, 2, 2, 2] - # new_dst_indices = [3, 6, 10] - unique, counts = np.unique(dst_indices, return_counts=True) - new_dst_indices = np.cumsum(counts) - - # Create new weight matrix NetCDF - nc_weights = Dataset(self.weight_matrix_file, mode='w') - - nc_weights.createDimension('dim', len(indices)) - nc_weights.createDimension('dim2', new_dst_indices.shape[0]) - - nc_dst_indices = nc_weights.createVariable('dst_indices', 'i', ('dim',), zlib=True) - nc_dst_indices[:] = dst_indices - - nc_dst_indices_count = nc_weights.createVariable('dst_indices_count', 'i', ('dim2',), zlib=True) - nc_dst_indices_count[:] = new_dst_indices - - nc_src_indices = nc_weights.createVariable('src_indices', 'i', ('dim',), zlib=True) - nc_src_indices[:] = src_indices - - weights_var = nc_weights.createVariable('weights', 'd', ('dim',), zlib=True) - weights_var[:] = weights - - nc_weights.close() - - if settings.log_level_3: - print 'TIME -> ConservativeRegrid.re_order_weight_matrix: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - - def apply_weights_test(self, values): - """ - Calculates the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. - - :param values: Input values to regrid. - :type values: numpy.ndarray - - :return: Values already regridded. - :rtype: numpy.ndarray - """ - from netCDF4 import Dataset - if settings.log_level_3: - st_time = timeit.default_timer() - dst_field = super(ConservativeRegrid, self).apply_weights(values) - - nc_weights = Dataset(self.weight_matrix_file, mode='r') - - n_b = nc_weights.dimensions['n_b'].size - frac_b = nc_weights.variables['frac_b'][:] - - nc_weights.close() - - # ! Adjust destination field by fraction - # do i=1, n_b - # if (frac_b(i) .ne. 0.0) then - # dst_field(i)=dst_field(i)/frac_b(i) - # endif - # enddo - for i in xrange(n_b): - if frac_b[i] != 0: - dst_field[:, i] = dst_field[:, i] / frac_b[i] - if settings.log_level_3: - print 'TIME -> ConservativeRegrid.apply_weights: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - return dst_field - def apply_weights(self, values): """ Calculates the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. @@ -350,4 +182,3 @@ class ConservativeRegrid(Regrid): if __name__ == '__main__': pass - -- GitLab From 4626c016ff1979d7087ddabc299bd630b6ba9259 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 13:22:55 +0200 Subject: [PATCH 10/51] Added codacy badge and some cleaning --- README.md | 3 ++- hermesv3_gr/hermes.py | 7 +------ setup.py | 22 ++++++++-------------- 3 files changed, 11 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 9146f78..ca8c806 100644 --- a/README.md +++ b/README.md @@ -1 +1,2 @@ -# HERMESv3 Global/Regional \ No newline at end of file +# HERMESv3 Global/Regional +[![Codacy Badge](https://api.codacy.com/project/badge/Grade/34fc5d6c803444178034b99dd28c7e3c)](https://www.codacy.com/app/carlestena/hermesv3_gr?utm_source=earth.bsc.es&utm_medium=referral&utm_content=gitlab/es/hermesv3_gr&utm_campaign=Badge_Grade) \ No newline at end of file diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index 9ab582e..2ca7ab6 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -18,24 +18,19 @@ # along with HERMESv3_GR. If not, see . -import sys -import os -from mpi4py import MPI - import timeit from hermesv3_gr.config import settings from hermesv3_gr.config.config import Config from hermesv3_gr.modules.emision_inventories.emission_inventory import EmissionInventory from hermesv3_gr.modules.vertical.vertical import VerticalDistribution -from hermesv3_gr.modules.temporal.temporal import TemporalDistribution -from hermesv3_gr.modules.writing.writer_cmaq import WriterCmaq from hermesv3_gr.tools.netcdf_tools import * # import pyextrae.sequential as pyextrae global full_time + class Hermes(object): """ Interface class for HERMESv3. diff --git a/setup.py b/setup.py index 91aeac8..ede6bbd 100644 --- a/setup.py +++ b/setup.py @@ -55,13 +55,10 @@ setup( 'pyproj', 'configargparse', 'cf_units>=1.1.3', - # 'calendar', 'ESMPy>=7.1.0', 'holidays', 'pytz', - # 're', 'timezonefinder', - # 'unittest' 'mpi4py' ], packages=find_packages(), @@ -72,17 +69,14 @@ setup( "Topic :: Scientific/Engineering :: Atmospheric Science" ], include_package_data=True, - # package_data={'hermes_v3': [ - # 'autosubmit/config/files/autosubmit.conf', - # 'autosubmit/config/files/expdef.conf', - # 'autosubmit/database/data/autosubmit.sql', - # 'README', - # 'CHANGELOG', - # 'VERSION', - # 'LICENSE', - # 'docs/autosubmit.pdf' - # ] - # }, + package_data={'hermesv3_gr': [ + 'data/*', + 'README', + 'CHANGELOG', + 'VERSION', + 'LICENSE', + ] + }, scripts=['bin/hermes_gr'] ) -- GitLab From 52fb1206210a3f110eacd33f2a998d4134f1f13f Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 15:00:24 +0200 Subject: [PATCH 11/51] Some cleaning and first version of conda environment --- .codacy.yml | 21 + .gitlab-ci.yml | 48 +++ .prospector.yml | 36 ++ .pylintrc | 407 ++++++++++++++++++ environment.yml | 22 + hermesv3_gr/modules/regrid/regrid.py | 35 +- hermesv3_gr/modules/speciation/speciation.py | 5 - hermesv3_gr/modules/temporal/temporal.py | 275 ++---------- hermesv3_gr/modules/vertical/vertical.py | 12 +- hermesv3_gr/modules/vertical/vertical_gfas.py | 14 +- hermesv3_gr/modules/writing/writer.py | 15 +- 11 files changed, 583 insertions(+), 307 deletions(-) create mode 100644 .codacy.yml create mode 100644 .gitlab-ci.yml create mode 100644 .prospector.yml create mode 100644 .pylintrc create mode 100644 environment.yml diff --git a/.codacy.yml b/.codacy.yml new file mode 100644 index 0000000..3e730e5 --- /dev/null +++ b/.codacy.yml @@ -0,0 +1,21 @@ +# codacy configuration file + +--- + +engines: + coverage: + enabled: true + metrics: + enabled: true + duplication: + enabled: true + prospector: + enabled: true + pylint: + enabled: true + python_version: 3 + +exclude_paths: [ + 'doc/**', + 'earthdiagnostics/cmor_tables/**', +] diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..87db4e1 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,48 @@ +before_script: + - export GIT_SSL_NO_VERIFY=1 + - export PATH="$HOME/miniconda2/bin:$PATH" + +stages: + - prepare + - test + - report + - clean + +cache: + paths: + - test/report + +prepare: + stage: prepare + script: + - conda update conda + +test_python2: + stage: test + script: + - conda env update -f environment.yml -n hermesv3_gr python=2.7 + - source activate hermesv3_gr + - python run_test.py + +#test_python3: +# stage: test +# script: +# - git submodule sync --recursive +# - git submodule update --init --recursive +# - conda env update -f environment.yml -n earthdiagnostics3 python=3.6 +# - source activate earthdiagnostics3 +# - python run_test.py + +report_codacy: + stage: report + script: + - source activate hermesv3_gr + - pip install codacy-coverage --upgrade + - python-codacy-coverage -r test/report/python2/coverage.xml + +clean: + stage: clean + script: + - conda clean --all --yes + + diff --git a/.prospector.yml b/.prospector.yml new file mode 100644 index 0000000..b9c6fa9 --- /dev/null +++ b/.prospector.yml @@ -0,0 +1,36 @@ +# prospector configuration file + +--- + +output-format: grouped + +strictness: veryhigh +doc-warnings: true +test-warnings: true +member-warnings: false + +pyroma: + run: true + +pydocroma: + run: true + +pep8: + disable: [ + E501, # Line-length, already controlled by pylint + ] + +pep257: + run: true + # see http://pep257.readthedocs.io/en/latest/error_codes.html + disable: [ + # For short descriptions it makes sense not to end with a period: + D400, # First line should end with a period + # Disable because not part of PEP257 official convention: + D203, # 1 blank line required before class docstring + D212, # Multi-line docstring summary should start at the first line + D213, # Multi-line docstring summary should start at the second line + D404, # First word of the docstring should not be This + D107, # We are using numpy style and constructor should be documented in class docstring + D105, # Docstring in magic methods should not be required: we all now what they are for + ] diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..4094c1f --- /dev/null +++ b/.pylintrc @@ -0,0 +1,407 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Allow optimization of some AST trees. This will activate a peephole AST +# optimizer, which will apply various small optimizations. For instance, it can +# be used to obtain the result of joining multiple strings with the addition +# operator. Joining a lot of strings can lead to a maximum recursion error in +# Pylint and this flag can prevent that. It has one side effect, the resulting +# AST will be different than the one from reality. This option is deprecated +# and it will be removed in Pylint 2.0. +optimize-ast=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". This option is deprecated +# and it will be removed in Pylint 2.0. +files-output=no + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=79 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format=LF + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,FIX-ME,XXX,TODO + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + + +[BASIC] + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_,logger + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=yes + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[ELIF] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec,optparse + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..168dd4e --- /dev/null +++ b/environment.yml @@ -0,0 +1,22 @@ +--- + +name: hermesv3_gr + +channels: + - conda-forge + +dependencies: + - numpy + - netcdf4 >= 1.3.1 + - python-cdo >= 1.3.4 + - geopandas + - pyproj + - configargparse + - cf_units >= 1.1.3 + - esmpy >= 7.1.0r + - pytz + - timezonefinder + - mpi4py + + - pip: + - holidays diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py index 2edbc06..7304084 100644 --- a/hermesv3_gr/modules/regrid/regrid.py +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -18,32 +18,13 @@ # along with HERMESv3_GR. If not, see . import os -import sys import numpy as np import timeit import hermesv3_gr.config.settings as settings class Regrid(object): - """ - Regrid class that contains all the needed information to do the regrid. - - :param input_file: Path to the input emission file. - :type input_file: str - - :param pollutants: Pollutants available of the input file. - :type pollutants: list - - :param weight_matrix_file: Path to the weight matrix. - :type weight_matrix_file: str - - :param grid: Grid object. - :type grid: Grid - - :param masking: Masking object. - Default = None - :type masking: Masking - """ + # TODO Documentation def __init__(self, pollutant_dicts, weight_matrix_file, grid, masking=None): st_time = timeit.default_timer() settings.write_log('\t\t\tInitializing Regrid.', level=3) @@ -94,17 +75,17 @@ class Regrid(object): # Do masking if self.masking.regrid_mask is not None: - values = np.multiply(values, self.masking.regrid_mask, dtype=settings.precision) + values = np.multiply(values, self.masking.regrid_mask) # Do scalling if self.masking.scale_mask is not None: - values = np.multiply(values, self.masking.scale_mask, dtype=settings.precision) + values = np.multiply(values, self.masking.scale_mask) values = values.reshape(values.shape[1], values.shape[2] * values.shape[3]) # Expand src values src_aux = np.take(values, src_indices, axis=1) # Apply weights - dst_field_aux = np.multiply(src_aux, weights, dtype=settings.precision) + dst_field_aux = np.multiply(src_aux, weights) # Reduce dst values dst_field = self.reduce_dst_field(dst_field_aux, dst_indices_counts, max_index) @@ -126,8 +107,8 @@ class Regrid(object): result = [0+1+2, 3+4+5+, 6+7+8+9] result = [3, 12, 30] - :param dst_field_extended: Array with as many elements as interconnections between src and dst with the dst values - to be gathered. + :param dst_field_extended: Array with as many elements as interconnections between src and dst with the dst + values to be gathered. :type dst_field_extended: numpy.array :param dst_indices: Array with the last element index to @@ -143,14 +124,14 @@ class Regrid(object): # Create new dst_field = np.zeros((dst_field_extended.shape[0], max_index), dtype=settings.precision) - # dst_field = np.zeros((dst_field_extended.shape[0], self.grid.shape[-1] * self.grid.shape[-2]), dtype=settings.precision) + # dst_field = np.zeros((dst_field_extended.shape[0], self.grid.shape[-1] * self.grid.shape[-2])) previous = 0 count = 0 for i in dst_indices: try: dst_field[:, count] = dst_field_extended[:, previous:i].sum(axis=1, dtype=settings.precision) - except: + except ValueError: pass count += 1 previous = i diff --git a/hermesv3_gr/modules/speciation/speciation.py b/hermesv3_gr/modules/speciation/speciation.py index 09676c8..ac6d17b 100644 --- a/hermesv3_gr/modules/speciation/speciation.py +++ b/hermesv3_gr/modules/speciation/speciation.py @@ -18,10 +18,8 @@ # along with HERMESv3_GR. If not, see . -import os import sys import timeit - import hermesv3_gr.config.settings as settings from warnings import warn as warning @@ -50,8 +48,6 @@ class Speciation(object): settings.write_time('Speciation', 'Init', timeit.default_timer() - st_time, level=2) - return None - def get_speciation_profile(self, speciation_profile_path): """ Extracts the speciation information as a dictionary with the destiny pollutant as key and the formula as value. @@ -133,7 +129,6 @@ class Speciation(object): :return: List of emissions already speciated. :rtype: list """ - from cf_units import Unit import numpy as np st_time = timeit.default_timer() diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 7b552a2..98fd6d3 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -142,8 +142,6 @@ class TemporalDistribution(object): settings.write_time('TemporalDistribution', 'Init', timeit.default_timer() - st_time, level=2) - return None - def calculate_ending_date(self): """ Calculates the date of the last timestep. @@ -238,7 +236,7 @@ class TemporalDistribution(object): try: tz_id = tz_id[0] - except IndexError as e: + except IndexError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: raise ValueError("ERROR: The timezone '{0}' is not in the {1} file. ".format(tz, self.world_info) + @@ -368,7 +366,8 @@ class TemporalDistribution(object): write_netcdf(self.netcdf_timezones, total_lat, total_lon, data, RegularLatLon=True) settings.comm.Barrier() - settings.write_time('TemporalDistribution', 'create_netcdf_timezones', timeit.default_timer() - st_time, level=2) + settings.write_time('TemporalDistribution', 'create_netcdf_timezones', timeit.default_timer() - st_time, + level=2) return True @@ -398,7 +397,6 @@ class TemporalDistribution(object): :rtype: numpy.chararray """ from netCDF4 import Dataset - from hermesv3_gr.tools.netcdf_tools import extract_vars st_time = timeit.default_timer() @@ -440,7 +438,7 @@ class TemporalDistribution(object): try: df['local'] = df.groupby('tz')['utc'].apply( lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert(x.name).dt.tz_localize(None)) - except Exception: + except: df['local'] = df.groupby('tz')['utc'].apply( lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert( self.parse_tz(x.name)).dt.tz_localize(None)) @@ -506,7 +504,8 @@ class TemporalDistribution(object): factors = np.array(df['factor'].values).reshape((self.timezones_array.shape[1], self.timezones_array.shape[2])) del df - settings.write_time('TemporalDistribution', 'calculate_2d_temporal_factors', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_2d_temporal_factors', timeit.default_timer() - st_time, + level=3) return factors @@ -514,12 +513,6 @@ class TemporalDistribution(object): """ Calculates the temporal factor to correct the input data of the given date for each cell. - :param date: Date of the current timestep. - :type date: datetime.datetime - - :param timezones: Array of the timezones - :type timezones: numpy.chararray - :return: 3D array with the factors to correct the input data to the date of this timestep. :rtype: numpy.array """ @@ -543,7 +536,8 @@ class TemporalDistribution(object): factors = np.array(factors) - settings.write_time('TemporalDistribution', 'calculate_3d_temporal_factors', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_3d_temporal_factors', timeit.default_timer() - st_time, + level=3) return factors def parse_hourly_profile_id(self): @@ -556,7 +550,7 @@ class TemporalDistribution(object): dict_aux = {} list_aux = list(map(str, re.split(' , | ,|, |,| ', self.hourly_profile))) for element in list_aux: - key_value_list = list(map(str, re.split(':| :| :| : |=| =| =| = ', element))) + key_value_list = list(map(str, re.split(':| :|: | : |=| =|= | = ', element))) dict_aux[key_value_list[0]] = key_value_list[1] return dict_aux @@ -587,14 +581,16 @@ class TemporalDistribution(object): except IndexError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: - raise AttributeError('ERROR: Hourly profile ID {0} is not in the {1} file.'.format(profile_id, self.hourly_profile_path)) + raise AttributeError('ERROR: Hourly profile ID {0} is not in the {1} file.'.format( + profile_id, self.hourly_profile_path)) sys.exit(1) profile.pop('TP_H', None) profile = {int(k): float(v) for k, v in profile.items()} else: # print self.hourly_profile profile = None - settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', timeit.default_timer() - st_time, + level=3) return profile @@ -620,7 +616,8 @@ class TemporalDistribution(object): except IndexError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: - raise AttributeError('ERROR: Daily profile ID {0} is not in the {1} file.'.format(self.daily_profile_id, self.daily_profile_path)) + raise AttributeError('ERROR: Daily profile ID {0} is not in the {1} file.'.format( + self.daily_profile_id, self.daily_profile_path)) sys.exit(1) profile.pop('TP_D', None) profile_aux = {int(k): float(v) for k, v in profile.items()} @@ -629,7 +626,8 @@ class TemporalDistribution(object): else: profile = None - settings.write_time('TemporalDistribution', 'get_temporal_daily_profile', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'get_temporal_daily_profile', timeit.default_timer() - st_time, + level=3) return profile @@ -652,7 +650,8 @@ class TemporalDistribution(object): weekdays = self.calculate_weekdays(date) rebalance_factor = self.calculate_weekday_factor_full_month(profile, weekdays) - settings.write_time('TemporalDistribution', 'calculate_rebalance_factor', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_rebalance_factor', timeit.default_timer() - st_time, + level=3) return rebalance_factor @@ -673,7 +672,8 @@ class TemporalDistribution(object): weekdays_factors += profile[day] * weekdays[day] num_days += weekdays[day] - settings.write_time('TemporalDistribution', 'calculate_weekday_factor_full_month', timeit.default_timer() - st_time, level=3) + settings.write_time('TemporalDistribution', 'calculate_weekday_factor_full_month', + timeit.default_timer() - st_time, level=3) return (num_days - weekdays_factors) / num_days @@ -724,21 +724,23 @@ class TemporalDistribution(object): except IndexError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: - raise AttributeError('ERROR: Monthly profile ID {0} is not in the {1} file.'.format(profile_id, profile_path)) + raise AttributeError('ERROR: Monthly profile ID {0} is not in the {1} file.'.format( + profile_id, profile_path)) sys.exit(1) profile.pop('TP_M', None) profile = {int(k): float(v) for k, v in profile.items()} else: profile = None - settings.write_time('TemporalDistribution', 'get_temporal_monthly_profile', timeit.default_timer() - st_time, level=2) + settings.write_time('TemporalDistribution', 'get_temporal_monthly_profile', timeit.default_timer() - st_time, + level=2) return profile @staticmethod def calculate_delta_hours(st_date, time_step_type, time_step_num, time_step_freq): from datetime import timedelta - from calendar import monthrange + from calendar import monthrange, isleap st_time = timeit.default_timer() @@ -787,231 +789,6 @@ class TemporalDistribution(object): return hours_since - # # @profile - # def do_temporal(self, data, grid): - # """ - # Manages all the steps to get the temporal disaggregation. - # - # :param data: list of emission to disaggregate. - # :type data: list - # - # :param grid: Destination grid. - # :type grid: Grid - # - # :return: Same data but already temporally disaggregated. - # :rtype: list - # """ - # import copy - # - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # # if not self.is_created_netcdf_timezones(): - # # print "The timezones NetCDF is not created. Lets try to create it." - # # self.create_netcdf_timezones(grid) - # # self.timezones_array = self.calculate_timezones() - # - # data_empty = [] - # for element in data: - # # print element - # dict_aux = element.copy() - # # dict_aux['data'] = [] - # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) - # data_empty.append(dict_aux) - # data_to_fill = copy.deepcopy(data_empty) - # - # date_aux = self.starting_date - # count = 0 - # while date_aux <= self.ending_date: - # count += 1 - # if settings.log_level_2: - # print '\t\tDoing {0}/{1} time step.'.format(count, self.timestep_num) - # temporal_data = self.calculate_time_step(data, data_empty, date_aux) - # - # # Copy data to final - # for i in xrange(len(data_to_fill)): - # if temporal_data[i]['data'] is 0: - # data_to_fill[i]['data'] = 0 - # else: - # if data_to_fill[i]['data'] is None: - # data_to_fill[i]['data'] = temporal_data[i]['data'].reshape( - # (1,) + temporal_data[i]['data'].shape) - # else: - # data_to_fill[i]['data'] = np.append(data_to_fill[i]['data'], - # temporal_data[i]['data'].reshape( - # (1,) + temporal_data[i]['data'].shape), - # axis=0) - # - # # factors = self.calculate_2d_temporal_factors(date_aux) - # # for i in xrange(len(data_empty)): - # # if data[i]['data'] is not 0: - # # aux_data = data[i]['data'] * factors - # # if count == 1: - # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) - # # else: - # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) - # # else: - # # data_empty[i]['data'] = data[i]['data'] - # - # d = date_aux - self.starting_date - # self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour - # date_aux = date_aux + self.calculate_timedelta(date_aux) - # - # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # - # return data_to_fill - # - # # @profile - # def do_temporal_and_write(self, data, grid, nc_out): - # """ - # Manages all the steps to get the temporal disaggregation. - # - # :param data: list of emission to disaggregate. - # :type data: list - # - # :param grid: Destination grid. - # :type grid: Grid - # - # :return: Same data but already temporally disaggregated. - # :rtype: list - # """ - # from hermesv3_gr.tools.netcdf_tools import fill_netcdf - # import copy - # - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # if not self.is_created_netcdf_timezones(): - # print "The timezones NetCDF is not created. Lets try to create it." - # self.create_netcdf_timezones(grid) - # # self.timezones_array = self.calculate_timezones() - # - # data_empty = [] - # for element in data: - # # print element - # dict_aux = element.copy() - # # dict_aux['data'] = [] - # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) - # data_empty.append(dict_aux) - # data_to_fill = copy.deepcopy(data_empty) - # - # date_aux = self.starting_date - # count = 0 - # while date_aux <= self.ending_date: - # - # if settings.log_level_2: - # print '\t\tDoing {0}/{1} time step.'.format(count + 1, self.timestep_num) - # temporal_data = self.calculate_time_step(data, data_empty, date_aux) - # - # fill_netcdf(count, nc_out, temporal_data) - # - # # Copy data to final - # if False: - # # for i in xrange(len(data_to_fill)): - # if temporal_data[i]['data'] is 0: - # data_to_fill[i]['data'] = 0 - # else: - # if data_to_fill[i]['data'] is None: - # data_to_fill[i]['data'] = temporal_data[i]['data'].reshape( - # (1,) + temporal_data[i]['data'].shape) - # else: - # data_to_fill[i]['data'] = np.append(data_to_fill[i]['data'], - # temporal_data[i]['data'].reshape( - # (1,) + temporal_data[i]['data'].shape), - # axis=0) - # - # # factors = self.calculate_2d_temporal_factors(date_aux) - # # for i in xrange(len(data_empty)): - # # if data[i]['data'] is not 0: - # # aux_data = data[i]['data'] * factors - # # if count == 1: - # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) - # # else: - # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) - # # else: - # # data_empty[i]['data'] = data[i]['data'] - # - # d = date_aux - self.starting_date - # self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour - # date_aux = date_aux + self.calculate_timedelta(date_aux) - # count += 1 - # - # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # - # # @profile - # def do_time_step(self, date, data, grid): - # """ - # Manages all the steps to get the temporal disaggregation. - # - # :param data: list of emission to disaggregate. - # :type data: list - # - # :param grid: Destination grid. - # :type grid: Grid - # - # :return: Same data but already temporally disaggregated. - # :rtype: list - # """ - # from hermesv3_gr.tools.netcdf_tools import fill_netcdf - # import copy - # - # if settings.log_level_3: - # st_time = timeit.default_timer() - # else: - # st_time = None - # - # if not self.is_created_netcdf_timezones(): - # print "The timezones NetCDF is not created. Lets try to create it." - # self.create_netcdf_timezones(grid) - # # self.timezones_array = self.calculate_timezones() - # - # data_empty = [] - # for element in data: - # # print element - # dict_aux = element.copy() - # # dict_aux['data'] = [] - # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) - # data_empty.append(dict_aux) - # # data_to_fill = copy.deepcopy(data_empty) - # - # temporal_data = self.calculate_time_step(data, data_empty, date) - # - # if settings.log_level_3: - # print 'TIME -> TemporalDistribution.do_temporal: {0} s'.format(round(timeit.default_timer() - st_time, 2)) - # return temporal_data - # - # def calculate_time_step(self, data, data_empty, date): - # factors = self.calculate_2d_temporal_factors(date) - # for i in xrange(len(data_empty)): - # if data[i]['data'] is not 0: - # # print "data[i]['data'].shape, factors.shape", data[i]['data'].shape, factors.shape - # data_empty[i]['data'] = data[i]['data'] * factors - # # if data_empty[i]['data'] is None: - # # data_empty[i]['data'] = aux_data.reshape((1,) + aux_data.shape) - # # else: - # # data_empty[i]['data'] = np.append(data_empty[i]['data'], aux_data.reshape((1,) + aux_data.shape), axis=0) - # else: - # data_empty[i]['data'] = data[i]['data'] - # return data_empty - # - # @staticmethod - # def make_empty_copy(data): - # temporal_data = [] - # for element in data: - # # print element - # dict_aux = element.copy() - # # dict_aux['data'] = [] - # dict_aux['data'] = None # np.ndarray((1,) + element['data'].shape) - # temporal_data.append(dict_aux) - # - # return temporal_data - if __name__ == '__main__': pass diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py index dea4cd5..de9041f 100644 --- a/hermesv3_gr/modules/vertical/vertical.py +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -18,10 +18,8 @@ # along with HERMESv3_GR. If not, see . -import os import sys import timeit - import hermesv3_gr.config.settings as settings @@ -112,7 +110,8 @@ class VerticalDistribution(object): heights = df.height_magl.values - settings.write_time('VerticalDistribution', 'get_vertical_output_profile', timeit.default_timer() - st_time, level=3) + settings.write_time('VerticalDistribution', 'get_vertical_output_profile', timeit.default_timer() - st_time, + level=3) return heights @@ -176,7 +175,6 @@ class VerticalDistribution(object): return weights @staticmethod - # @profile def apply_weights(data, weights): """ Calculates the vertical distribution using the given data and weights. @@ -185,7 +183,7 @@ class VerticalDistribution(object): :type data: numpy.array :param weights: Weights of each layer. - :type weights: list of float + :type weights: numpy.array :return: Emissions already vertically distributed. :rtype: numpy.ndarray @@ -194,7 +192,7 @@ class VerticalDistribution(object): st_time = timeit.default_timer() - data_aux = np.multiply(weights.reshape(weights.shape + (1, 1)), data, dtype=settings.precision) + data_aux = np.multiply(weights.reshape(weights.shape + (1, 1)), data) settings.write_time('VerticalDistribution', 'apply_weights', timeit.default_timer() - st_time, level=3) @@ -202,8 +200,6 @@ class VerticalDistribution(object): @staticmethod def apply_weights_level(data, weight): - import numpy as np - st_time = timeit.default_timer() for emi in data: diff --git a/hermesv3_gr/modules/vertical/vertical_gfas.py b/hermesv3_gr/modules/vertical/vertical_gfas.py index 86fbfe8..08caf9f 100644 --- a/hermesv3_gr/modules/vertical/vertical_gfas.py +++ b/hermesv3_gr/modules/vertical/vertical_gfas.py @@ -18,10 +18,7 @@ # along with HERMESv3_GR. If not, see . -import os -import sys import timeit - import hermesv3_gr.config.settings as settings from vertical import VerticalDistribution @@ -45,8 +42,6 @@ class GfasVerticalDistribution(VerticalDistribution): settings.write_time('GfasVerticalDistribution', 'Init', timeit.default_timer() - st_time, level=3) - return None - @staticmethod def calculate_widths(heights_list): """ @@ -121,7 +116,7 @@ class GfasVerticalDistribution(VerticalDistribution): fires = np.zeros(top_fires.shape) for i in xrange(len(self.output_heights)): if top_fires[i].sum() != 0: - weight_list = self.get_weights(self.output_heights[0: i + 1]) + weight_list = self.get_weights(list(self.output_heights[0: i + 1])) for i_weight in xrange(len(weight_list)): fires[i_weight] += top_fires[i] * weight_list[i_weight] @@ -155,8 +150,8 @@ class GfasVerticalDistribution(VerticalDistribution): fire_list.append(aux_data) fire_list = np.array(fire_list).reshape((len(fire_list), values.shape[1], values.shape[2])) - settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation_allocation', timeit.default_timer() - st_time, - level=3) + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation_allocation', + timeit.default_timer() - st_time, level=3) return fire_list def do_vertical_interpolation(self, values): @@ -173,7 +168,8 @@ class GfasVerticalDistribution(VerticalDistribution): fire_list = self.apply_approach(values) - settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation', timeit.default_timer() - st_time, level=3) + settings.write_time('GfasVerticalDistribution', 'do_vertical_interpolation', timeit.default_timer() - st_time, + level=3) return fire_list diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index 4fdf4ca..a110b6c 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -19,9 +19,7 @@ import timeit - from hermesv3_gr.config import settings -import os import sys @@ -43,8 +41,6 @@ class Writer(object): self.global_attributes_path = global_attributes_path - return None - def write(self, inventory_list): st_time = timeit.default_timer() settings.write_log('') @@ -120,7 +116,8 @@ class Writer(object): vertical_time = timeit.default_timer() if ei.source_type == 'area': if ei.vertical_factors is not None: - aux_data = emission['data'][np.newaxis, :, :] * ei.vertical_factors[:, np.newaxis, np.newaxis] + aux_data = emission['data'][np.newaxis, :, :] * ei.vertical_factors[:, np.newaxis, + np.newaxis] else: if len(emission['data'].shape) != 3: aux_data = np.zeros((shape[1], shape[2], shape[3])) @@ -132,8 +129,8 @@ class Writer(object): aux_data[ei.location['layer'], ei.location['FID']] = emission['data'] aux_data = aux_data.reshape((shape[1], shape[2], shape[3])) - settings.write_time('VerticalDistribution', 'calculate_data_by_var', timeit.default_timer() - vertical_time, - level=2) + settings.write_time('VerticalDistribution', 'calculate_data_by_var', + timeit.default_timer() - vertical_time, level=2) del emission['data'] temporal_time = timeit.default_timer() @@ -143,8 +140,8 @@ class Writer(object): data += aux_data[np.newaxis, :, :, :] * ei.temporal_factors[:, np.newaxis, :, :] else: data += aux_data[np.newaxis, :, :, :] - settings.write_time('TemporalDistribution', 'calculate_data_by_var', timeit.default_timer() - temporal_time, - level=2) + settings.write_time('TemporalDistribution', 'calculate_data_by_var', + timeit.default_timer() - temporal_time, level=2) # Unit changes data = self.unit_change(variable, data) if data is not None: -- GitLab From 55bfd691cbd8f7d1e1318b653bf9a64ae1e2fa60 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 7 Sep 2018 17:49:07 +0200 Subject: [PATCH 12/51] First test installed done --- .codacy.yml | 4 ++-- environment.yml | 1 + hermesv3_gr/config/config.py | 6 +++--- hermesv3_gr/hermes.py | 12 +++++------- setup.py | 8 ++++++-- 5 files changed, 17 insertions(+), 14 deletions(-) diff --git a/.codacy.yml b/.codacy.yml index 3e730e5..bf12fc2 100644 --- a/.codacy.yml +++ b/.codacy.yml @@ -13,9 +13,9 @@ engines: enabled: true pylint: enabled: true - python_version: 3 + python_version: 2 exclude_paths: [ 'doc/**', - 'earthdiagnostics/cmor_tables/**', + 'data/**', ] diff --git a/environment.yml b/environment.yml index 168dd4e..9a70344 100644 --- a/environment.yml +++ b/environment.yml @@ -6,6 +6,7 @@ channels: - conda-forge dependencies: + - python = 2 - numpy - netcdf4 >= 1.3.1 - python-cdo >= 1.3.4 diff --git a/hermesv3_gr/config/config.py b/hermesv3_gr/config/config.py index c2b1ad2..196ddd9 100644 --- a/hermesv3_gr/config/config.py +++ b/hermesv3_gr/config/config.py @@ -52,7 +52,7 @@ class Config(ArgParser): help="Name of the output file. You can add the string '' that will be substitute by the " + "starting date of the simulation day.") p.add_argument('--start_date', required=True, help='Starting Date to simulate (UTC)') - p.add_argument('--end_date', required=False, + p.add_argument('--end_date', required=False, default=None, help='If you want to simulate more than one day you have to specify the ending date of ' + 'simulation in this parameter. If it is not set end_date = start_date.') @@ -166,8 +166,8 @@ class Config(ArgParser): exec("options.{0} = options.{0}.replace('', '{1}_{2}')".format( item, options.inc_x, options.inc_y)) - options.start_date = self._parse_start_date(str(options.start_date)) - options.end_date = self._parse_end_date(str(options.end_date), options.start_date) + options.start_date = self._parse_start_date(options.start_date) + options.end_date = self._parse_end_date(options.end_date, options.start_date) self.create_dir(options.output_dir) self.create_dir(options.auxiliar_files_path) diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index 2ca7ab6..84ac1b4 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -90,13 +90,7 @@ class Hermes(object): """ Main functionality of the model. """ - from multiprocessing import Process, Queue, cpu_count - from threading import Thread - import copy - import gc - import numpy as np from datetime import timedelta - from cf_units import Unit st_time = timeit.default_timer() settings.write_log('') @@ -142,8 +136,12 @@ class Hermes(object): return None -if __name__ == '__main__': +def run(): date = Hermes(Config()).main() while date is not None: date = Hermes(Config(), new_date=date).main() sys.exit(0) + + +if __name__ == '__main__': + run() diff --git a/setup.py b/setup.py index ede6bbd..adc7133 100644 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ setup( 'pyproj', 'configargparse', 'cf_units>=1.1.3', - 'ESMPy>=7.1.0', + 'ESMPy>=7.1.0.dev0', 'holidays', 'pytz', 'timezonefinder', @@ -78,5 +78,9 @@ setup( ] }, - scripts=['bin/hermes_gr'] + entry_points={ + 'console_scripts': [ + 'hermesv3_gr = hermesv3_gr.hermes:run', + ], + }, ) -- GitLab From 252f3b5b3396f7aae7fd3b6ac0ff8f6add298fb0 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 11:52:18 +0200 Subject: [PATCH 13/51] Copy example files done --- environment.yml | 2 +- hermesv3_gr/hermes.py | 1 - hermesv3_gr/tools/sample_files.py | 171 ++++++++++++++++++++++++++++++ setup.py | 5 +- 4 files changed, 175 insertions(+), 4 deletions(-) create mode 100644 hermesv3_gr/tools/sample_files.py diff --git a/environment.yml b/environment.yml index 9a70344..d36c8f2 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ dependencies: - python = 2 - numpy - netcdf4 >= 1.3.1 - - python-cdo >= 1.3.4 + - python-cdo >= 1.3.3 - geopandas - pyproj - configargparse diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index 84ac1b4..cf1f0ed 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -19,7 +19,6 @@ import timeit - from hermesv3_gr.config import settings from hermesv3_gr.config.config import Config from hermesv3_gr.modules.emision_inventories.emission_inventory import EmissionInventory diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py new file mode 100644 index 0000000..78c762a --- /dev/null +++ b/hermesv3_gr/tools/sample_files.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import sys +import os + + +def make_conf_file_list(): + main_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, os.pardir) + + file_list = [ + {'conf': [ + os.path.join(main_dir, 'conf', 'hermes.conf'), + os.path.join(main_dir, 'conf', 'EI_configuration.csv'), + ]}, + ] + + return file_list + + +def make_profiles_file_list(): + main_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, os.pardir) + + file_list = [ + {'data': [ + os.path.join(main_dir, 'data', 'global_attributes.csv'), + {'profiles': [{ + 'speciation': [ + os.path.join(main_dir, 'data', 'profiles', 'speciation', 'Speciation_profile_cb05_aero5_CMAQ.csv'), + os.path.join(main_dir, 'data', 'profiles', 'speciation', + 'Speciation_profile_cb05_aero5_MONARCH_aerosols.csv'), + os.path.join(main_dir, 'data', 'profiles', 'speciation', + 'Speciation_profile_cb05_aero5_MONARCH_fullchem.csv'), + os.path.join(main_dir, 'data', 'profiles', 'speciation', + 'Speciation_profile_radm2_madesorgam_WRF_CHEM.csv'), + ]}, + {'temporal': [ + os.path.join(main_dir, 'data', 'profiles', 'temporal', 'TemporalProfile_Daily.csv'), + os.path.join(main_dir, 'data', 'profiles', 'temporal', 'TemporalProfile_Hourly.csv'), + os.path.join(main_dir, 'data', 'profiles', 'temporal', 'TemporalProfile_Monthly.csv'), + os.path.join(main_dir, 'data', 'profiles', 'temporal', 'tz_world_country_iso3166.csv'), + ]}, + {'vertical': [ + os.path.join(main_dir, 'data', 'profiles', 'vertical', '1layer_vertical_description.csv'), + os.path.join(main_dir, 'data', 'profiles', 'vertical', 'CMAQ_15layers_vertical_description.csv'), + os.path.join(main_dir, 'data', 'profiles', 'vertical', + 'MONARCH_Global_48layers_vertical_description.csv'), + os.path.join(main_dir, 'data', 'profiles', 'vertical', + 'MONARCH_regional_48layers_vertical_description.csv'), + os.path.join(main_dir, 'data', 'profiles', 'vertical', 'Vertical_profile.csv'), + ]}, + ]}, + ]}, + ] + + return file_list + + +def make_preproc_file_list(): + main_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, os.pardir) + + file_list = [ + os.path.join(main_dir, 'preproc', 'ceds_preproc.py'), + os.path.join(main_dir, 'preproc', 'eclipsev5a_preproc.py'), + os.path.join(main_dir, 'preproc', 'edgarv432_ap_preproc.py'), + os.path.join(main_dir, 'preproc', 'edgarv432_voc_preproc.py'), + os.path.join(main_dir, 'preproc', 'emep_preproc.py'), + os.path.join(main_dir, 'preproc', 'gfas12_preproc.py'), + os.path.join(main_dir, 'preproc', 'htapv2_preproc.py'), + os.path.join(main_dir, 'preproc', 'tno_mac_iii_preproc.py'), + os.path.join(main_dir, 'preproc', 'tno_mac_iii_preproc_voc_ratios.py'), + os.path.join(main_dir, 'preproc', 'wiedinmyer_preproc.py'), + ] + + return file_list + + +def query_yes_no(question, default="yes"): + valid = {"yes": True, "y": True, "1": True, 1: True, + "no": False, "n": False, "0": False, 0: False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = raw_input().lower() + if default is not None and choice == '': + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") + + +def check_args(args, exe_str): + if len(args) == 0: + print("Missing destination path after '{0}'. e.g.:".format(exe_str) + + "\n\t{0} /home/user/HERMES/sample_files".format(exe_str)) + sys.exit(1) + elif len(args) > 1: + print("Too much arguments through '{0}'. Only destination path is needed e.g.:".format(exe_str) + + "\n\t{0} /home/user/HERMES/sample_files".format(exe_str)) + sys.exit(1) + else: + dir_path = args[0] + + if not os.path.exists(dir_path): + if query_yes_no("'{0}' does not exist. Do you want to create it? ".format(dir_path)): + os.makedirs(dir_path) + else: + sys.exit(0) + + return dir_path + + +def copy_files(file_list, directory): + from shutil import copy2 + + if not os.path.exists(directory): + os.makedirs(directory) + + for element in file_list: + if dict == type(element): + copy_files(element.values()[0], os.path.join(directory, element.keys()[0])) + else: + copy2(element, directory) + return True + + +def copy_sample_files(): + argv = sys.argv[1:] + + parent_dir = check_args(argv, 'copy_sample_files') + + copy_files(make_conf_file_list(), parent_dir) + copy_files(make_profiles_file_list(), parent_dir) + + +def copy_preproc_files(): + argv = sys.argv[1:] + + parent_dir = check_args(argv, 'copy_preproc_files') + + copy_files(make_preproc_file_list(), parent_dir) + + +if __name__ == '__main__': + copy_sample_files() diff --git a/setup.py b/setup.py index adc7133..da05cf8 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ setup( install_requires=[ 'numpy', 'netCDF4>=1.3.1', - 'cdo>=1.3.4', + 'cdo>=1.3.3', 'pandas', 'geopandas', 'pyproj', @@ -70,7 +70,6 @@ setup( ], include_package_data=True, package_data={'hermesv3_gr': [ - 'data/*', 'README', 'CHANGELOG', 'VERSION', @@ -81,6 +80,8 @@ setup( entry_points={ 'console_scripts': [ 'hermesv3_gr = hermesv3_gr.hermes:run', + 'hermesv3_copy_sample_files = hermesv3_gr.tools.sample_files:copy_sample_files', + 'hermesv3_copy_preproc_files = hermesv3_gr.tools.sample_files:copy_preproc_files', ], }, ) -- GitLab From 6871e9e88fc1369938ca9415a7a5bc3301da1a82 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 12:17:31 +0200 Subject: [PATCH 14/51] change hermesv3_copy_sample_files by hermesv3_copy_config_files --- hermesv3_gr/tools/sample_files.py | 8 ++++---- setup.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 78c762a..987b806 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -150,10 +150,10 @@ def copy_files(file_list, directory): return True -def copy_sample_files(): +def copy_config_files(): argv = sys.argv[1:] - parent_dir = check_args(argv, 'copy_sample_files') + parent_dir = check_args(argv, 'hermesv3_copy_config_files') copy_files(make_conf_file_list(), parent_dir) copy_files(make_profiles_file_list(), parent_dir) @@ -162,10 +162,10 @@ def copy_sample_files(): def copy_preproc_files(): argv = sys.argv[1:] - parent_dir = check_args(argv, 'copy_preproc_files') + parent_dir = check_args(argv, 'hermesv3_copy_preproc_files') copy_files(make_preproc_file_list(), parent_dir) if __name__ == '__main__': - copy_sample_files() + copy_config_files() diff --git a/setup.py b/setup.py index da05cf8..69e262c 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ setup( entry_points={ 'console_scripts': [ 'hermesv3_gr = hermesv3_gr.hermes:run', - 'hermesv3_copy_sample_files = hermesv3_gr.tools.sample_files:copy_sample_files', + 'hermesv3_copy_config_files = hermesv3_gr.tools.sample_files:copy_config_files', 'hermesv3_copy_preproc_files = hermesv3_gr.tools.sample_files:copy_preproc_files', ], }, -- GitLab From 28f74f1c4f8ec1a5042da1f90ede3ec4e2eb3924 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 12:37:43 +0200 Subject: [PATCH 15/51] Added run_test script --- environment.yml | 1 + run_test.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 run_test.py diff --git a/environment.yml b/environment.yml index d36c8f2..0cb41ee 100644 --- a/environment.yml +++ b/environment.yml @@ -18,6 +18,7 @@ dependencies: - pytz - timezonefinder - mpi4py + - pytest - pip: - holidays diff --git a/run_test.py b/run_test.py new file mode 100644 index 0000000..cec7ec8 --- /dev/null +++ b/run_test.py @@ -0,0 +1,24 @@ +# coding=utf-8 +"""Script to run the tests for EarthDiagnostics and generate the code coverage report""" + +import os +import sys +import pytest +work_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) +os.chdir(work_path) +print(work_path) + + +version = sys.version_info[0] +report_dir = 'test/report/python{}'.format(version) +errno = pytest.main([ + 'test', + '--ignore=test/report', + '--cov=earthdiagnostics', + '--cov-report=term', + '--cov-report=html:{}/coverage_html'.format(report_dir), + '--cov-report=xml:{}/coverage.xml'.format(report_dir), + '--profile', + '--profile-svg', +]) +sys.exit(errno) -- GitLab From 0dae89e46994f0cf732b1d12918a9cd7272a1ba4 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 12:41:47 +0200 Subject: [PATCH 16/51] Added test dependencies to environment --- environment.yml | 2 ++ preproc/ceds_preproc.py | 0 preproc/eclipsev5a_preproc.py | 0 preproc/emep_preproc.py | 0 preproc/tno_mac_iii_preproc.py | 0 preproc/tno_mac_iii_preproc_voc_ratios.py | 0 run_test.py | 4 +--- 7 files changed, 3 insertions(+), 3 deletions(-) mode change 100644 => 100755 preproc/ceds_preproc.py mode change 100644 => 100755 preproc/eclipsev5a_preproc.py mode change 100644 => 100755 preproc/emep_preproc.py mode change 100644 => 100755 preproc/tno_mac_iii_preproc.py mode change 100644 => 100755 preproc/tno_mac_iii_preproc_voc_ratios.py diff --git a/environment.yml b/environment.yml index 0cb41ee..7cab561 100644 --- a/environment.yml +++ b/environment.yml @@ -18,7 +18,9 @@ dependencies: - pytz - timezonefinder - mpi4py + # Testing - pytest + - pytest-cov - pip: - holidays diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py old mode 100644 new mode 100755 diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py old mode 100644 new mode 100755 diff --git a/preproc/emep_preproc.py b/preproc/emep_preproc.py old mode 100644 new mode 100755 diff --git a/preproc/tno_mac_iii_preproc.py b/preproc/tno_mac_iii_preproc.py old mode 100644 new mode 100755 diff --git a/preproc/tno_mac_iii_preproc_voc_ratios.py b/preproc/tno_mac_iii_preproc_voc_ratios.py old mode 100644 new mode 100755 diff --git a/run_test.py b/run_test.py index cec7ec8..9fcabec 100644 --- a/run_test.py +++ b/run_test.py @@ -14,11 +14,9 @@ report_dir = 'test/report/python{}'.format(version) errno = pytest.main([ 'test', '--ignore=test/report', - '--cov=earthdiagnostics', + '--cov=hermesv3_gr', '--cov-report=term', '--cov-report=html:{}/coverage_html'.format(report_dir), '--cov-report=xml:{}/coverage.xml'.format(report_dir), - '--profile', - '--profile-svg', ]) sys.exit(errno) -- GitLab From 295d3fab1b1de75ac6f762aae7e536d0039d172c Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 12:44:14 +0200 Subject: [PATCH 17/51] Fix test script --- run_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/run_test.py b/run_test.py index 9fcabec..663775c 100644 --- a/run_test.py +++ b/run_test.py @@ -10,10 +10,10 @@ print(work_path) version = sys.version_info[0] -report_dir = 'test/report/python{}'.format(version) +report_dir = 'tests/report/python{}'.format(version) errno = pytest.main([ - 'test', - '--ignore=test/report', + 'tests', + '--ignore=tests/report', '--cov=hermesv3_gr', '--cov-report=term', '--cov-report=html:{}/coverage_html'.format(report_dir), -- GitLab From 73a3d524e07b522735a512c8175cbc4afa285813 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Mon, 10 Sep 2018 13:49:04 +0200 Subject: [PATCH 18/51] Correcting code to python conventions --- environment.yml | 1 + hermesv3_gr/config/settings.py | 5 +- hermesv3_gr/hermes.py | 9 +- .../emision_inventories/emission_inventory.py | 8 +- hermesv3_gr/modules/regrid/regrid.py | 2 +- hermesv3_gr/modules/writing/writer_cmaq.py | 8 +- hermesv3_gr/modules/writing/writer_monarch.py | 1 - .../modules/writing/writer_wrf_chem.py | 3 - hermesv3_gr/tools/coordinates_tools.py | 22 +- hermesv3_gr/tools/lcc_LatLon_to_m.py | 22 - tests/unit/test_lint.py | 33 + tests/unit/test_temporal.py | 765 +++++++++--------- 12 files changed, 440 insertions(+), 439 deletions(-) delete mode 100644 hermesv3_gr/tools/lcc_LatLon_to_m.py create mode 100644 tests/unit/test_lint.py diff --git a/environment.yml b/environment.yml index 7cab561..2916618 100644 --- a/environment.yml +++ b/environment.yml @@ -21,6 +21,7 @@ dependencies: # Testing - pytest - pytest-cov + - pycodestyle - pip: - holidays diff --git a/hermesv3_gr/config/settings.py b/hermesv3_gr/config/settings.py index d16ac27..52c5c39 100644 --- a/hermesv3_gr/config/settings.py +++ b/hermesv3_gr/config/settings.py @@ -53,7 +53,7 @@ def define_global_vars(in_log_level): global comm global rank global size - + icomm = MPI.COMM_WORLD comm = icomm.Split(color=0, key=0) rank = comm.Get_rank() @@ -115,7 +115,8 @@ def finish_logs(output_dir, date): date.strftime('%Y%m%d%H'), str(size).zfill(4))) if os.path.exists(times_path): os.remove(times_path) - df_merged = reduce(lambda left, right: pd.merge(left, right, on=['Class', 'Function'], how='outer'), data_frames) + df_merged = reduce(lambda left, right: pd.merge(left, right, on=['Class', 'Function'], how='outer'), + data_frames) df_merged['min'] = df_merged.loc[:, range(size)].min(axis=1) df_merged['max'] = df_merged.loc[:, range(size)].max(axis=1) df_merged['mean'] = df_merged.loc[:, range(size)].mean(axis=1) diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index cf1f0ed..f094111 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -57,7 +57,8 @@ class Hermes(object): if self.options.output_model in ['CMAQ', 'WRF_CHEM'] and self.options.domain_type == 'global': settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: - raise AttributeError('ERROR: Global domain is not aviable for {0} output model.'.format(self.options.output_model)) + raise AttributeError('ERROR: Global domain is not aviable for {0} output model.'.format( + self.options.output_model)) sys.exit(1) self.levels = VerticalDistribution.get_vertical_output_profile(self.options.vertical_description) @@ -70,7 +71,8 @@ class Hermes(object): self.options.nx, self.options.ny, self.options.inc_x, self.options.inc_y, self.options.x_0, self.options.y_0, self.options.lat_ts) - self.emission_list = EmissionInventory.make_emission_list(self.options, self.grid, self.levels, self.options.start_date) + self.emission_list = EmissionInventory.make_emission_list(self.options, self.grid, self.levels, + self.options.start_date) self.delta_hours = TemporalDistribution.calculate_delta_hours( self.options.start_date, self.options.output_timestep_type, self.options.output_timestep_num, @@ -78,7 +80,8 @@ class Hermes(object): self.writer = Writer.get_writer( self.options.output_model, self.config.get_output_name(self.options.start_date), self.grid, - self.levels, self.options.start_date, self.delta_hours, self.options.output_attributes, compress=settings.compressed_netcdf, + self.levels, self.options.start_date, self.delta_hours, self.options.output_attributes, + compress=settings.compressed_netcdf, parallel=not settings.writing_serial) settings.write_log('End of HERMESv3 initialization.') diff --git a/hermesv3_gr/modules/emision_inventories/emission_inventory.py b/hermesv3_gr/modules/emision_inventories/emission_inventory.py index 3259949..75573dd 100644 --- a/hermesv3_gr/modules/emision_inventories/emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/emission_inventory.py @@ -157,10 +157,10 @@ class EmissionInventory(object): def create_pollutants_dicts(self, pollutants): """ Creates a list of dictionaries with the information of the name, paht and Dataset of each pollutant - + :param pollutants: List of pollutants names :type pollutants: list - + :return: List of dictionaries :rtype: list """ @@ -266,10 +266,10 @@ class EmissionInventory(object): :param options: Full list of parameters given by passing argument or in the configuration file. :type options: Namespace - + :param grid: Grid to use. :type grid: Grid - + :param vertical_output_profile: Path to eht file that contains the vertical profile. :type vertical_output_profile: str diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py index 7304084..d533678 100644 --- a/hermesv3_gr/modules/regrid/regrid.py +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -33,7 +33,7 @@ class Regrid(object): self.pollutant_dicts = pollutant_dicts self.weight_matrix_file = weight_matrix_file self.masking = masking - + if not self.is_created_weight_matrix(erase=False): settings.write_log("\t\t\tWeight matrix {0} is not created. ".format(weight_matrix_file) + "Trying to create it", level=1) diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 7b23615..439a6f9 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -167,8 +167,8 @@ class WriterCmaq(Writer): atts_dict[att] = np.array(df.loc[df['attribute'] == att, 'value'].item().split(), dtype=np.float32) except ValueError: - settings.write_log('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( - att, atts_dict[att])) + settings.write_log('WARNING: The global attribute {0} is not defined;'.format(att) + + ' Using default value {0}'.format(atts_dict[att])) if settings.rank == 0: warning('WARNING: The global attribute {0} is not defined; Using default value {1}'.format( att, atts_dict[att])) @@ -597,8 +597,8 @@ class WriterCmaq(Writer): elif i == settings.size - 1: var[:, :, :, full_position[i][2]:] = recvbuf[i, :, :, :, :-1] else: - var[:, :, :, full_position[i][2]:full_position[i][3]] = recvbuf[i, :, :, :, - : full_shape[i][-1]] + var[:, :, :, full_position[i][2]:full_position[i][3]] = \ + recvbuf[i, :, :, :, : full_shape[i][-1]] except: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index f823928..a612eaf 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -748,4 +748,3 @@ class WriterMonarch(Writer): if settings.rank == 0: netcdf.close() settings.write_time('WriterMonarch', 'write_serial_netcdf', timeit.default_timer() - st_time, level=3) - diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index b42a0db..0e479d0 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -439,6 +439,3 @@ class WriterWrfChem(Writer): netcdf.close() settings.write_time('WriterWrfChem', 'write_serial_netcdf', timeit.default_timer() - st_time, level=3) return True - - - diff --git a/hermesv3_gr/tools/coordinates_tools.py b/hermesv3_gr/tools/coordinates_tools.py index 4c1b767..20a4d12 100644 --- a/hermesv3_gr/tools/coordinates_tools.py +++ b/hermesv3_gr/tools/coordinates_tools.py @@ -206,7 +206,8 @@ def rotated2latlon_single(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min= # # # # sph=ctph0*stph+stph0*ctph*ctlm - # sin_rotated_lat = (cos_lat_pole_rad*sin_lat_rad) + (sin_distance_from_center_lon*cos_lat_rad*cos_distance_from_center_lon) + # sin_rotated_lat = (cos_lat_pole_rad*sin_lat_rad) + + # (sin_distance_from_center_lon*cos_lat_rad*cos_distance_from_center_lon) # # sph=min(sph,1.) # # sph=max(sph,-1.) # if sin_rotated_lat > 1.: @@ -215,7 +216,8 @@ def rotated2latlon_single(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min= # sin_rotated_lat = -1. # # aph=asin(sph) # real_latitude = math.asin(sin_rotated_lat) - # real_longitude = math.atan2(cos_lat_rad*sin_distance_from_center_lon, (cos_distance_from_center_lon*cos_lat_rad - sin_lat_pole_rad*sin_rotated_lat)/cos_lat_pole_rad) - math.pi + # real_longitude = math.atan2(cos_lat_rad*sin_distance_from_center_lon, + # (cos_distance_from_center_lon*cos_lat_rad - sin_lat_pole_rad*sin_rotated_lat)/cos_lat_pole_rad) - math.pi # Positive east to negative east lon_pole_deg -= 180 @@ -467,23 +469,9 @@ if __name__ == '__main__': import numpy as np new_pole_longitude_degrees = 20.0 # lonpole tlm0d new_pole_latitude_degrees = 35.0 # latpole tph0d - # print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, 20.0, 35.0) print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, -20.2485, -9.9036) - # print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, 0, 0) print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -35.) - # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.9) - # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.8) - # # print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -34.7) print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, np.array([-51., -51., -51., -51.]), - np.array([-35., -34.9, -34.8, -34.7])) - # - # lat, lon = rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, np.array([0]), np.array([0])) - # print lat - - # lat, lon, b_lat, b_lon = create_regular_grid(0, 0, -180, -90, 1., 1.) - # print lat - # print lon - # print b_lat - # print b_lon + np.array([-35., -34.9, -34.8, -34.7])) diff --git a/hermesv3_gr/tools/lcc_LatLon_to_m.py b/hermesv3_gr/tools/lcc_LatLon_to_m.py deleted file mode 100644 index 5fbe9d9..0000000 --- a/hermesv3_gr/tools/lcc_LatLon_to_m.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -from pyproj import Proj -if __name__ == '__main__': - projection = Proj( - proj='lcc', - ellps='WGS84', - R=6370000.000, - lat_1=37, - lat_2=43, - lon_0=-3, - lat_0=40, - to_meter=1, - x_0=0, - y_0=0, - a=6370000.000, - k_0=1.0) - lon_array = [-11.5488, -11.5066, 7.1104] - lat_array = [32.5108, 32.5142, 46.6579] - UTMx, UTMy = projection(lon_array, lat_array) - - print UTMx, UTMy \ No newline at end of file diff --git a/tests/unit/test_lint.py b/tests/unit/test_lint.py new file mode 100644 index 0000000..5a5fd76 --- /dev/null +++ b/tests/unit/test_lint.py @@ -0,0 +1,33 @@ +""" Lint tests """ +import os +import unittest + +import pycodestyle # formerly known as pep8 + + +class TestLint(unittest.TestCase): + + def test_pep8_conformance(self): + """Test that we conform to PEP-8.""" + + check_paths = [ + 'hermesv3_gr', + 'tests', + ] + exclude_paths = [ + + ] + + print("PEP8 check of directories: {}\n".format(', '.join(check_paths))) + + # Get paths wrt package root + package_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + for paths in (check_paths, exclude_paths): + for i, path in enumerate(paths): + paths[i] = os.path.join(package_root, path) + + style = pycodestyle.StyleGuide() + style.options.exclude.extend(exclude_paths) + style.options.max_line_length = 120 + + self.assertEqual(style.check_files(check_paths).total_errors, 0) diff --git a/tests/unit/test_temporal.py b/tests/unit/test_temporal.py index 3ffed3c..729c07a 100644 --- a/tests/unit/test_temporal.py +++ b/tests/unit/test_temporal.py @@ -28,388 +28,390 @@ class TestTemporalDistribution(unittest.TestCase): def setUp(self): pass - def testing_calculate_ending_date_1hour(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 1, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_ending_date(), - datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)) - - def testing_calculate_ending_date_24hours(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 24, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_ending_date(), - datetime(year=2016, month=01, day=01, hour=23, minute=0, second=0)) - - def testing_calculate_ending_date_3hour_each2(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_ending_date(), - datetime(year=2016, month=01, day=01, hour=4, minute=0, second=0)) - - def testing_def_calculate_timedelta_3hour_each2(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_timedelta(datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)), - timedelta(hours=2)) - - def testing_def_calculate_timedelta_month(self): - temporal = TemporalDistribution( - datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_timedelta(datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0)), - timedelta(hours=24*28)) - - def testing_def_calculate_timedelta_month_leapyear(self): - temporal = TemporalDistribution( - datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - self.assertEqual( - temporal.calculate_timedelta(datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0)), - timedelta(hours=24*29)) - - def testing_get_tz_from_id(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.get_tz_from_id(309), - "Europe/Andorra") - - def testing_get_id_from_tz(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.get_id_from_tz("Europe/Andorra"), - 309) - - def testing_parse_tz(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.parse_tz("America/Fort_Nelson"), - 'America/Vancouver') - - def testing_find_closest_timezone_BCN(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.find_closest_timezone(41.390205, 2.154007), - 'Europe/Madrid') - - def testing_find_closest_timezone_MEX(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.find_closest_timezone(19.451054, -99.125519), - "America/Mexico_City") - - def testing_find_closest_timezone_Kuwait(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - - self.assertEqual( - temporal.find_closest_timezone(29.378586, 47.990341), - "Asia/Kuwait") - - def testing_find_closest_timezone_Shanghai(self): - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') - + # def testing_calculate_ending_date_1hour(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 1, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_ending_date(), + # datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)) + # + # def testing_calculate_ending_date_24hours(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 24, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_ending_date(), + # datetime(year=2016, month=01, day=01, hour=23, minute=0, second=0)) + # + # def testing_calculate_ending_date_3hour_each2(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_ending_date(), + # datetime(year=2016, month=01, day=01, hour=4, minute=0, second=0)) + # + # def testing_def_calculate_timedelta_3hour_each2(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0), 'hourly', 3, 2, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_timedelta(datetime(year=2016, month=01, day=01, hour=0, minute=0, second=0)), + # timedelta(hours=2)) + # + # def testing_def_calculate_timedelta_month(self): + # temporal = TemporalDistribution( + # datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_timedelta(datetime(year=2017, month=02, day=01, hour=0, minute=0, second=0)), + # timedelta(hours=24*28)) + # + # def testing_def_calculate_timedelta_month_leapyear(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0), 'monthly', 1, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # self.assertEqual( + # temporal.calculate_timedelta(datetime(year=2016, month=02, day=01, hour=0, minute=0, second=0)), + # timedelta(hours=24*29)) + # + # def testing_get_tz_from_id(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.get_tz_from_id(309), + # "Europe/Andorra") + # + # def testing_get_id_from_tz(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.get_id_from_tz("Europe/Andorra"), + # 309) + # + # def testing_parse_tz(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.parse_tz("America/Fort_Nelson"), + # 'America/Vancouver') + # + # def testing_find_closest_timezone_BCN(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.find_closest_timezone(41.390205, 2.154007), + # 'Europe/Madrid') + # + # def testing_find_closest_timezone_MEX(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.find_closest_timezone(19.451054, -99.125519), + # "America/Mexico_City") + # + # def testing_find_closest_timezone_Kuwait(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.find_closest_timezone(29.378586, 47.990341), + # "Asia/Kuwait") + # + # def testing_find_closest_timezone_Shanghai(self): + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/global_1.0_1.40625') + # + # self.assertEqual( + # temporal.find_closest_timezone(31.267401, 121.522179), + # "Asia/Shanghai") + # + # def testing_create_netcdf_timezones(self): + # import numpy as np + # from hermesv3_gr.modules.grids.grid import Grid + # from hermesv3_gr.tools.netcdf_tools import extract_vars + # + # aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' + # if not os.path.exists(aux_path): + # os.makedirs(aux_path) + # + # grid = Grid('global', aux_path) + # grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) + # grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) + # + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # aux_path) + # + # self.assertTrue(temporal.create_netcdf_timezones(grid)) + # + # [timezones] = extract_vars(temporal.netcdf_timezones, ['timezone_id']) + # timezones = list(timezones['data'][0, :].astype(int).flatten()) + # + # self.assertEqual(timezones, + # [335, 147, 247, 268]) + # + # def testing_calculate_timezones(self): + # self.testing_create_netcdf_timezones() + # + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # self.assertEqual(temporal.calculate_timezones().tolist(), + # [['Europe/Madrid', "America/Mexico_City"], ["Asia/Kuwait", "Asia/Shanghai"]]) + # + # def testing_calculate_2d_temporal_factors(self): + # self.testing_create_netcdf_timezones() + # + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # timezones = temporal.calculate_timezones() + # + # temporal.monthly_profile = {1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1., + # 7: 1., + # 8: 1., + # 9: 1., + # 10: 1., + # 11: 1., + # 12: 1.} + # temporal.daily_profile_id = {0: 1., + # 1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1.} + # temporal.hourly_profile = {0: 1., + # 1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1., + # 7: 1., + # 8: 1., + # 9: 1., + # 10: 1., + # 11: 1., + # 12: 1., + # 13: 20., + # 14: 1., + # 15: 1., + # 16: 1., + # 17: 1., + # 18: 1., + # 19: 1., + # 20: 1., + # 21: 1., + # 22: 1., + # 23: 1.} + # self.assertEqual( - temporal.find_closest_timezone(31.267401, 121.522179), - "Asia/Shanghai") - - def testing_create_netcdf_timezones(self): - import numpy as np - from hermesv3_gr.modules.grids.grid import Grid - from hermesv3_gr.tools.netcdf_tools import extract_vars - - aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' - if not os.path.exists(aux_path): - os.makedirs(aux_path) - - grid = Grid('global', aux_path) - grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) - grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) - - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - aux_path) - - self.assertTrue(temporal.create_netcdf_timezones(grid)) - - [timezones] = extract_vars(temporal.netcdf_timezones, ['timezone_id']) - timezones = list(timezones['data'][0, :].astype(int).flatten()) - - self.assertEqual(timezones, - [335, 147, 247, 268]) - - def testing_calculate_timezones(self): - self.testing_create_netcdf_timezones() - - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - self.assertEqual(temporal.calculate_timezones().tolist(), - [['Europe/Madrid', "America/Mexico_City"], ["Asia/Kuwait", "Asia/Shanghai"]]) - - def testing_calculate_2d_temporal_factors(self): - self.testing_create_netcdf_timezones() - - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - timezones = temporal.calculate_timezones() - - temporal.monthly_profile = {1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1., - 7: 1., - 8: 1., - 9: 1., - 10: 1., - 11: 1., - 12: 1.} - temporal.daily_profile_id = {0: 1., - 1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1.} - temporal.hourly_profile = {0: 1., - 1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1., - 7: 1., - 8: 1., - 9: 1., - 10: 1., - 11: 1., - 12: 1., - 13: 20., - 14: 1., - 15: 1., - 16: 1., - 17: 1., - 18: 1., - 19: 1., - 20: 1., - 21: 1., - 22: 1., - 23: 1.} - - self.assertEqual(temporal.calculate_2d_temporal_factors(datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), timezones).tolist(), - [[20., 1.], [1., 1.]]) - - def testing_do_temporal(self): - import numpy as np - from hermesv3_gr.modules.grids.grid import Grid - self.testing_create_netcdf_timezones() - - aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' - - temporal = TemporalDistribution( - datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), 'hourly', 1, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - aux_path) - temporal.monthly_profile = {1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1., - 7: 1., - 8: 1., - 9: 1., - 10: 1., - 11: 1., - 12: 1.} - temporal.daily_profile_id = {0: 1., - 1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1.} - temporal.hourly_profile = {0: 1., - 1: 1., - 2: 1., - 3: 1., - 4: 1., - 5: 1., - 6: 1., - 7: 1., - 8: 1., - 9: 1., - 10: 1., - 11: 1., - 12: 1., - 13: 20., - 14: 1., - 15: 1., - 16: 1., - 17: 1., - 18: 1., - 19: 1., - 20: 1., - 21: 1., - 22: 1., - 23: 1.} - - grid = Grid('global', aux_path) - grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) - grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) - data_in = [{'data': np.array([[10., 10.], [10., 10.]])}] - # data_out = [{'data': np.array([[200., 10.], [10., 10.]])}] - data_out = temporal.do_temporal(data_in, grid) - - self.assertEqual(data_out[0]['data'].tolist(), [[[200., 10.], [10., 10.]]]) - - def testing_calculate_weekdays_no_leap_year(self): - from datetime import datetime - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - self.assertEqual(temporal.calculate_weekdays(datetime(year=2017, month=02, day=1)), - {0: 4, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) - - def testing_calculate_weekdays_leap_year(self): - from datetime import datetime - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - self.assertEqual(temporal.calculate_weekdays(datetime(year=2016, month=02, day=1)), - {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) - - def testing_calculate_weekdays_factors_full_month(self): - from datetime import datetime - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - - self.assertEqual(round(temporal.calculate_weekday_factor_full_month( - {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}), 5), - round(0.2/29, 5)) - - def testing_calculate_rebalance_factor(self): - from datetime import datetime - temporal = TemporalDistribution( - datetime(year=2016, month=01, day=01), 'monthly', 48, 1, - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', - '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', - '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') - - self.assertEqual(round(temporal.calculate_rebalance_factor( - {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, datetime(year=2016, month=02, day=1)), 5), - round(0.2/29, 5)) + temporal.calculate_2d_temporal_factors( + datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), timezones).tolist(), + [[20., 1.], [1., 1.]]) + + # def testing_do_temporal(self): + # import numpy as np + # from hermesv3_gr.modules.grids.grid import Grid + # self.testing_create_netcdf_timezones() + # + # aux_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing' + # + # temporal = TemporalDistribution( + # datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), 'hourly', 1, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # aux_path) + # temporal.monthly_profile = {1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1., + # 7: 1., + # 8: 1., + # 9: 1., + # 10: 1., + # 11: 1., + # 12: 1.} + # temporal.daily_profile_id = {0: 1., + # 1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1.} + # temporal.hourly_profile = {0: 1., + # 1: 1., + # 2: 1., + # 3: 1., + # 4: 1., + # 5: 1., + # 6: 1., + # 7: 1., + # 8: 1., + # 9: 1., + # 10: 1., + # 11: 1., + # 12: 1., + # 13: 20., + # 14: 1., + # 15: 1., + # 16: 1., + # 17: 1., + # 18: 1., + # 19: 1., + # 20: 1., + # 21: 1., + # 22: 1., + # 23: 1.} + # + # grid = Grid('global', aux_path) + # grid.center_latitudes = np.array([[41.390205, 19.451054], [29.378586, 31.267401]]) + # grid.center_longitudes = np.array([[2.154007, -99.125519], [47.990341, 121.522179]]) + # data_in = [{'data': np.array([[10., 10.], [10., 10.]])}] + # # data_out = [{'data': np.array([[200., 10.], [10., 10.]])}] + # data_out = temporal.do_temporal(data_in, grid) + # + # self.assertEqual(data_out[0]['data'].tolist(), [[[200., 10.], [10., 10.]]]) + # + # def testing_calculate_weekdays_no_leap_year(self): + # from datetime import datetime + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # self.assertEqual(temporal.calculate_weekdays(datetime(year=2017, month=02, day=1)), + # {0: 4, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) + # + # def testing_calculate_weekdays_leap_year(self): + # from datetime import datetime + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # self.assertEqual(temporal.calculate_weekdays(datetime(year=2016, month=02, day=1)), + # {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}) + # + # def testing_calculate_weekdays_factors_full_month(self): + # from datetime import datetime + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # + # self.assertEqual(round(temporal.calculate_weekday_factor_full_month( + # {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, {0: 5, 1: 4, 2: 4, 3: 4, 4: 4, 5: 4, 6: 4}), 5), + # round(0.2/29, 5)) + # + # def testing_calculate_rebalance_factor(self): + # from datetime import datetime + # temporal = TemporalDistribution( + # datetime(year=2016, month=01, day=01), 'monthly', 48, 1, + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Monthly.csv', 'M001', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Daily.csv', 'D000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/TemporalProfile_Hourly.csv', 'H000', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/tz_world_country_iso3166.csv', + # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') + # + # self.assertEqual(round(temporal.calculate_rebalance_factor( + # {0: 0.8, 1: 1.2, 2: 0.5, 3: 1.5, 4: 0.9, 5: 0.9, 6: 1.2}, datetime(year=2016, month=02, day=1)), 5), + # round(0.2/29, 5)) # def testing_get_temporal_daily_profile(self): # from datetime import datetime @@ -424,4 +426,3 @@ class TestTemporalDistribution(unittest.TestCase): # '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/testing') # # print temporal.get_temporal_daily_profile(date) - -- GitLab From 8a3216fba1165030ace8dd92a87db045c6d6dba7 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Mon, 10 Sep 2018 19:38:22 +0200 Subject: [PATCH 19/51] corrected python conventions --- conf/hermes.conf | 4 +- hermesv3_gr/modules/writing/writer.py | 2 +- hermesv3_gr/modules/writing/writer_cmaq.py | 14 ++-- hermesv3_gr/modules/writing/writer_monarch.py | 62 +++++++++++++---- .../modules/writing/writer_wrf_chem.py | 68 ++++++++++++++++--- hermesv3_gr/tools/netcdf_tools.py | 54 +++++++-------- 6 files changed, 146 insertions(+), 58 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index 36f5cc2..82ed49f 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -5,8 +5,8 @@ input_dir = /home/Earth/ctena/Models/HERMESv3/IN # data_path = /gpfs/scratch/bsc32/bsc32538/HERMES_data data_path = /esarchive/recon #output_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/OUT -output_dir = /home/Earth/ctena/Models/HERMESv3/OUT -output_name = HERMES_paralel_.nc +output_dir = /home/carles/HERMES_out +output_name = HERMESv3_.nc start_date = 2014/09/02 00:00:00 # ***** end_date = start_date [DEFAULT] ***** # end_date = 2014/09/03 00:00:00 diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index a110b6c..36d7185 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -420,7 +420,7 @@ class Writer(object): var.grid_mapping = 'mercator' try: var[:] = variable['data'] - except: + except ValueError: print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 439a6f9..4921417 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -470,7 +470,8 @@ class WriterCmaq(Writer): # Correcting NAN if data is None: data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = data settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) netcdf.close() @@ -484,7 +485,8 @@ class WriterCmaq(Writer): # Gathering the index if mpi_numpy or mpi_vector: - rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, self.grid.y_upper_bound], dtype='i') + rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, + self.grid.y_upper_bound], dtype='i') full_position = None if settings.rank == 0: full_position = np.empty([settings.size, 4], dtype='i') @@ -617,9 +619,13 @@ class WriterCmaq(Writer): for i in xrange(settings.size): # print 'Resizeing {0}'.format(i) if not i == settings.size - 1: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) else: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) else: data = 0 var[:] = data diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index a612eaf..62cc8fb 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -38,6 +38,13 @@ class WriterMonarch(Writer): # } def unit_change(self, variable, data): + # TODO Documentation + """ + + :param variable: + :param data: + :return: + """ from cf_units import Unit st_time = timeit.default_timer() @@ -62,6 +69,11 @@ class WriterMonarch(Writer): return data def create_parallel_netcdf(self): + # TODO Documentation + """ + + :return: + """ from cf_units import Unit, encode_time st_time = timeit.default_timer() @@ -96,7 +108,8 @@ class WriterMonarch(Writer): settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) lat_dim = ('lon', 'lat', ) else: - print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format(len(self.grid.center_latitudes.shape)) + print 'ERROR: Latitudes must be on a 1D or 2D array instead of {0}'.format( + len(self.grid.center_latitudes.shape)) sys.exit(1) # Longitude @@ -109,7 +122,8 @@ class WriterMonarch(Writer): settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[1]), level=3) lon_dim = ('lon', 'lat', ) else: - print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(self.grid.center_longitudes.shape)) + print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format( + len(self.grid.center_longitudes.shape)) sys.exit(1) elif Rotated: var_dim = ('rlat', 'rlon',) @@ -335,6 +349,12 @@ class WriterMonarch(Writer): settings.write_time('WriterMonarch', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): + # TODO Documentation + """ + + :param emission_list: + :return: + """ st_time = timeit.default_timer() @@ -358,18 +378,26 @@ class WriterMonarch(Writer): # Correcting NAN if data is None: data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = data settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) if self.grid.cell_area is not None: c_area = netcdf.variables['cell_area'] - c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area + c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area netcdf.close() settings.write_time('WriterMonarch', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_serial_netcdf(self, emission_list,): + # TODO Documentation + """ + + :param emission_list: + :return: + """ from cf_units import Unit, encode_time st_time = timeit.default_timer() @@ -379,7 +407,8 @@ class WriterMonarch(Writer): # Gathering the index if mpi_numpy or mpi_vector: - rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, self.grid.y_upper_bound], dtype='i') + rank_position = np.array([self.grid.x_lower_bound, self.grid.x_upper_bound, self.grid.y_lower_bound, + self.grid.y_upper_bound], dtype='i') full_position = None if settings.rank == 0: full_position = np.empty([settings.size, 4], dtype='i') @@ -407,11 +436,13 @@ class WriterMonarch(Writer): # Latitude if len(self.grid.center_latitudes.shape) == 1: - settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), + level=3) netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) lat_dim = ('lat',) elif len(self.grid.center_latitudes.shape) == 2: - settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + settings.write_log("\t\t\t'lat' dimension: {0}".format(self.grid.center_latitudes.shape[0]), + level=3) netcdf.createDimension('lat', self.grid.center_latitudes.shape[0]) lat_dim = ('lon', 'lat', ) else: @@ -424,11 +455,13 @@ class WriterMonarch(Writer): # Longitude if len(self.grid.center_longitudes.shape) == 1: - settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), level=3) + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), + level=3) netcdf.createDimension('lon', self.grid.center_longitudes.shape[0]) lon_dim = ('lon',) elif len(self.grid.center_longitudes.shape) == 2: - settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), level=3) + settings.write_log("\t\t\t'lon' dimension: {0}".format(self.grid.center_longitudes.shape[0]), + level=3) netcdf.createDimension('lon', self.grid.center_longitudes.shape[1]) lon_dim = ('lon', 'lat', ) else: @@ -498,7 +531,8 @@ class WriterMonarch(Writer): time = netcdf.createVariable('time', 'd', ('time',)) u = Unit('hours') time.units = str(u.offset_by_time(encode_time( - self.date.year, self.date.month, self.date.day, self.date.hour, self.date.minute, self.date.second))) + self.date.year, self.date.month, self.date.day, self.date.hour, self.date.minute, + self.date.second))) time.standard_name = "time" time.calendar = "gregorian" time.long_name = "time" @@ -695,9 +729,13 @@ class WriterMonarch(Writer): data = np.empty(var[:].shape, dtype=settings.precision) for i in xrange(settings.size): if not i == settings.size - 1: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) else: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) else: data = 0 var[:] = data diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index 0e479d0..e64ed52 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -37,7 +37,7 @@ class WriterWrfChem(Writer): 'TITLE', 'START_DATE', 'WEST-EAST_GRID_DIMENSION', 'SOUTH-NORTH_GRID_DIMENSION', 'BOTTOM-TOP_GRID_DIMENSION', 'DX', 'DY', 'GRIDTYPE', 'DIFF_OPT', 'KM_OPT', 'DAMP_OPT', 'DAMPCOEF', 'KHDIF', 'KVDIF', 'MP_PHYSICS', 'RA_LW_PHYSICS', 'RA_SW_PHYSICS', 'SF_SFCLAY_PHYSICS', 'SF_SURFACE_PHYSICS', - 'BL_PBL_PHYSICS', 'CU_PHYSICS', 'SF_LAKE_PHYSICS', 'SURFACE_INPUT_SOURCE','SST_UPDATE', 'GRID_FDDA', + 'BL_PBL_PHYSICS', 'CU_PHYSICS', 'SF_LAKE_PHYSICS', 'SURFACE_INPUT_SOURCE', 'SST_UPDATE', 'GRID_FDDA', 'GFDDA_INTERVAL_M', 'GFDDA_END_H', 'GRID_SFDDA', 'SGFDDA_INTERVAL_M', 'SGFDDA_END_H', 'WEST-EAST_PATCH_START_UNSTAG', 'WEST-EAST_PATCH_END_UNSTAG', 'WEST-EAST_PATCH_START_STAG', 'WEST-EAST_PATCH_END_STAG', 'SOUTH-NORTH_PATCH_START_UNSTAG', 'SOUTH-NORTH_PATCH_END_UNSTAG', @@ -48,6 +48,13 @@ class WriterWrfChem(Writer): 'MAP_PROJ', 'MMINLU', 'NUM_LAND_CAT', 'ISWATER', 'ISLAKE', 'ISICE', 'ISURBAN', 'ISOILWATER'] def unit_change(self, variable, data): + # TODO Documentation + """ + + :param variable: + :param data: + :return: + """ from cf_units import Unit if data is not None: @@ -74,6 +81,11 @@ class WriterWrfChem(Writer): return data def change_variable_attributes(self): + # TODO Documentation + """ + + :return: + """ from cf_units import Unit new_variable_dict = {} @@ -105,6 +117,11 @@ class WriterWrfChem(Writer): self.variables_attributes = new_variable_dict def read_global_attributes(self): + # TODO Documentation + """ + + :return: + """ import pandas as pd from warnings import warn as warning @@ -183,7 +200,8 @@ class WriterWrfChem(Writer): except ValueError: print 'A warning has occurred. Check the .err file to get more information.' if settings.rank == 0: - warning('The global attribute {0} is not defined; Using default value {1}'.format(att, atts_dict[att])) + warning('The global attribute {0} is not defined; Using default value {1}'.format( + att, atts_dict[att])) else: settings.write_log('WARNING: Check the .err file to get more information.') @@ -197,6 +215,7 @@ class WriterWrfChem(Writer): return atts_dict def create_global_attributes(self): + # TODO Documentation """ Creates the global attributes that have to be filled. """ @@ -247,6 +266,11 @@ class WriterWrfChem(Writer): return global_attributes def create_times_var(self): + # TODO Documentation + """ + + :return: + """ from datetime import timedelta import netCDF4 @@ -260,6 +284,11 @@ class WriterWrfChem(Writer): return str_out def create_parallel_netcdf(self): + # TODO Documentation + """ + + :return: + """ st_time = timeit.default_timer() settings.write_log("\tCreating parallel NetCDF file.", level=2) netcdf = Dataset(self.path, mode='w', format="NETCDF4") @@ -268,13 +297,15 @@ class WriterWrfChem(Writer): # ===== Dimensions ===== settings.write_log("\t\tCreating NetCDF dimensions.", level=2) netcdf.createDimension('Time', None) - settings.write_log("\t\t\t'Time' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), level=3) + settings.write_log("\t\t\t'Time' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), + level=3) netcdf.createDimension('DateStrLen', 19) settings.write_log("\t\t\t'DateStrLen' dimension: 19", level=3) netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) - settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), + level=3) netcdf.createDimension('emissions_zdim', len(self.levels)) settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) @@ -305,6 +336,12 @@ class WriterWrfChem(Writer): settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): + # TODO Documentation + """ + + :param emission_list: + :return: + """ st_time = timeit.default_timer() settings.write_log("\tAppending data to parallel NetCDF file.", level=2) @@ -325,13 +362,20 @@ class WriterWrfChem(Writer): # Correcting NAN if data is None: data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, self.grid.y_lower_bound:self.grid.y_upper_bound] = data + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = data settings.write_log("\t\t\t'{0}' variable filled".format(var_name)) netcdf.close() settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_serial_netcdf(self, emission_list): + # TODO Documentation + """ + + :param emission_list: + :return: + """ st_time = timeit.default_timer() # Gathering the index @@ -356,7 +400,8 @@ class WriterWrfChem(Writer): netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) - settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), level=3) + settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), + level=3) netcdf.createDimension('emissions_zdim', len(self.levels)) settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) @@ -409,7 +454,8 @@ class WriterWrfChem(Writer): st_time = timeit.default_timer() index += 1 - var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), zlib=self.compress) + var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), + zlib=self.compress) var.setncatts(self.variables_attributes[var_name]) var_time = timeit.default_timer() @@ -421,9 +467,13 @@ class WriterWrfChem(Writer): for i in xrange(settings.size): # print 'Resizeing {0}'.format(i) if not i == settings.size - 1: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]: displacements[i + 1]]).reshape(full_shape[i]) else: - data[:, :, full_position[i][0]:full_position[i][1], full_position[i][2]:full_position[i][3]] = np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) + data[:, :, full_position[i][0]:full_position[i][1], + full_position[i][2]:full_position[i][3]] = \ + np.array(recvbuf[0][displacements[i]:]).reshape(full_shape[i]) else: data = 0 var[:] = data diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py index d094233..6a50c73 100644 --- a/hermesv3_gr/tools/netcdf_tools.py +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -152,15 +152,11 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # Bounds if boundary_latitudes is not None: - # print boundary_latitudes.shape - # print len(boundary_latitudes[0, 0]) try: netcdf.createDimension('nv', len(boundary_latitudes[0, 0])) except TypeError: netcdf.createDimension('nv', boundary_latitudes.shape[1]) - # sys.exit() - # Time netcdf.createDimension('time', None) @@ -178,7 +174,8 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, u = Unit('hours') # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, + date.second))) time.standard_name = "time" time.calendar = "gregorian" time.long_name = "time" @@ -276,7 +273,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, var.grid_mapping = 'mercator' try: var[:] = variable['data'] - except: + except ValueError: print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping @@ -300,7 +297,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, mapping.longitude_of_central_meridian = lon_0 mapping.latitude_of_projection_origin = lat_0 elif Mercator: - #Mercator + # Mercator mapping = netcdf.createVariable('mercator', 'i') mapping.grid_mapping_name = "mercator" mapping.longitude_of_projection_origin = lon_0 @@ -322,23 +319,23 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, - levels=None, date=None, hours=None, - boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + levels=None, date=None, hours=None, + boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, + regular_latlon=False, + rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + lcc=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): from cf_units import Unit, encode_time import sys from netCDF4 import Dataset import numpy as np - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True + if not (regular_latlon or lcc or rotated): + regular_latlon = True netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") # ===== Dimensions ===== - if RegularLatLon: + if regular_latlon: var_dim = ('lat', 'lon',) # Latitude @@ -362,7 +359,7 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, else: print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) sys.exit(1) - elif Rotated: + elif rotated: var_dim = ('rlat', 'rlon',) # Rotated Latitude @@ -379,7 +376,7 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.createDimension('rlon', len(rotated_lons)) lon_dim = ('rlat', 'rlon',) - elif LambertConformalConic: + elif lcc: var_dim = ('y', 'x',) netcdf.createDimension('y', len(lcc_y)) @@ -416,7 +413,8 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, u = Unit('hours') # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.units = str(u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, + date.second))) time.standard_name = "time" time.calendar = "gregorian" time.long_name = "time" @@ -450,7 +448,7 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # print lon_bnds[:].shape, boundary_longitudes.shape lon_bnds[:] = boundary_longitudes - if Rotated: + if rotated: # Rotated Latitude rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) rlat.long_name = "latitude in rotated pole grid" @@ -464,7 +462,7 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" rlon[:] = rotated_lons - if LambertConformalConic: + if lcc: x = netcdf.createVariable('x', 'd', ('x',), zlib=True) x.units = Unit("km").symbol x.long_name = "x coordinate of projection" @@ -504,11 +502,11 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, var.coordinates = "lat lon" if cell_area is not None: var.cell_measures = 'area: cell_area' - if RegularLatLon: + if regular_latlon: var.grid_mapping = 'crs' - elif Rotated: + elif rotated: var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: + elif lcc: var.grid_mapping = 'Lambert_conformal' # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours # if variable['data'] is not 0: @@ -520,19 +518,19 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, var[:] = np.zeros(shape) # Grid mapping - if RegularLatLon: + if regular_latlon: # CRS mapping = netcdf.createVariable('crs', 'i') mapping.grid_mapping_name = "latitude_longitude" mapping.semi_major_axis = 6371000.0 mapping.inverse_flattening = 0 - elif Rotated: + elif rotated: # Rotated pole mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' mapping.grid_north_pole_latitude = north_pole_lat mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: + elif lcc: # CRS mapping = netcdf.createVariable('Lambert_conformal', 'i') mapping.grid_mapping_name = "lambert_conformal_conic" @@ -576,7 +574,3 @@ def calculate_displacements(counts): if __name__ == '__main__': pass - - - - -- GitLab From 3e913e13184590a5aead2e03fd569d14916a7578 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Mon, 10 Sep 2018 19:51:11 +0200 Subject: [PATCH 20/51] changed max-line-lenggth for Codacy --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 4094c1f..db7741b 100644 --- a/.pylintrc +++ b/.pylintrc @@ -99,7 +99,7 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme [FORMAT] # Maximum number of characters on a single line. -max-line-length=79 +max-line-length=120 # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ -- GitLab From f7e2073c773d286a17ac8317d048c3bf86930887 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Mon, 10 Sep 2018 20:02:29 +0200 Subject: [PATCH 21/51] Correcting Code conventions --- hermesv3_gr/modules/grids/grid_rotated.py | 3 ++- hermesv3_gr/modules/masking/masking.py | 4 ++-- hermesv3_gr/modules/temporal/temporal.py | 8 ++++---- hermesv3_gr/modules/writing/writer.py | 2 +- hermesv3_gr/modules/writing/writer_cmaq.py | 4 ++-- hermesv3_gr/modules/writing/writer_monarch.py | 4 ++-- hermesv3_gr/modules/writing/writer_wrf_chem.py | 4 ++-- hermesv3_gr/tools/coordinates_tools.py | 2 -- 8 files changed, 15 insertions(+), 16 deletions(-) diff --git a/hermesv3_gr/modules/grids/grid_rotated.py b/hermesv3_gr/modules/grids/grid_rotated.py index 02dc6e4..a21fdbc 100644 --- a/hermesv3_gr/modules/grids/grid_rotated.py +++ b/hermesv3_gr/modules/grids/grid_rotated.py @@ -17,9 +17,10 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import timeit + import sys import os +import timeit import hermesv3_gr.config.settings as settings from grid import Grid diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py index 98c113c..068c819 100644 --- a/hermesv3_gr/modules/masking/masking.py +++ b/hermesv3_gr/modules/masking/masking.py @@ -17,10 +17,10 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import timeit -import hermesv3_gr.config.settings as settings import os +import timeit +import hermesv3_gr.config.settings as settings from warnings import warn as warning diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 98fd6d3..4f6909f 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -17,11 +17,11 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import timeit -import hermesv3_gr.config.settings as settings import os import sys +import timeit +import hermesv3_gr.config.settings as settings import numpy as np @@ -86,7 +86,7 @@ class TemporalDistribution(object): self.timestep_num = timestep_num self.timestep_freq = timestep_freq - self.ending_date = self.calculate_ending_date() + self.ending_date = self.calculates_ending_date() if month_profile_id is not None: if len(month_profile_id) > 4: if os.path.exists(month_profile_id): @@ -142,7 +142,7 @@ class TemporalDistribution(object): settings.write_time('TemporalDistribution', 'Init', timeit.default_timer() - st_time, level=2) - def calculate_ending_date(self): + def calculates_ending_date(self): """ Calculates the date of the last timestep. diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index 36d7185..7fd70d8 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -18,9 +18,9 @@ # along with HERMESv3_GR. If not, see . +import sys import timeit from hermesv3_gr.config import settings -import sys class Writer(object): diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 4921417..7da5172 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -18,11 +18,11 @@ # along with HERMESv3_GR. If not, see . +import os +import sys from hermesv3_gr.modules.writing.writer import Writer import timeit from hermesv3_gr.config import settings -import os -import sys import numpy as np from netCDF4 import Dataset from mpi4py import MPI diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index 62cc8fb..98831ad 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -18,11 +18,11 @@ # along with HERMESv3_GR. If not, see . +import os +import sys from hermesv3_gr.modules.writing.writer import Writer import timeit from hermesv3_gr.config import settings -import os -import sys import numpy as np from netCDF4 import Dataset from mpi4py import MPI diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index e64ed52..6711880 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -18,11 +18,11 @@ # along with HERMESv3_GR. If not, see . +import os +import sys from hermesv3_gr.modules.writing.writer import Writer import timeit from hermesv3_gr.config import settings -import os -import sys import numpy as np from netCDF4 import Dataset from mpi4py import MPI diff --git a/hermesv3_gr/tools/coordinates_tools.py b/hermesv3_gr/tools/coordinates_tools.py index 20a4d12..51d074a 100644 --- a/hermesv3_gr/tools/coordinates_tools.py +++ b/hermesv3_gr/tools/coordinates_tools.py @@ -19,8 +19,6 @@ import os import sys -# import numpy as np -# import math # Global variables -- GitLab From 47c8420c83c07f0443d6300466402a28b9f9fd4a Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Mon, 10 Sep 2018 21:08:16 +0200 Subject: [PATCH 22/51] Correcting Code conventions --- hermesv3_gr/modules/temporal/temporal.py | 137 ++++++----- preproc/ceds_preproc.py | 52 ++-- preproc/eclipsev5a_preproc.py | 48 ++-- preproc/edgarv432_ap_preproc.py | 54 ++--- preproc/edgarv432_voc_preproc.py | 52 ++-- preproc/emep_preproc.py | 20 +- preproc/gfas12_preproc.py | 22 +- preproc/htapv2_preproc.py | 277 +++++++++++++--------- preproc/tno_mac_iii_preproc.py | 28 +-- preproc/tno_mac_iii_preproc_voc_ratios.py | 140 ++++++++--- preproc/wiedinmyer_preproc.py | 20 +- 11 files changed, 493 insertions(+), 357 deletions(-) diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 4f6909f..5da069d 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -67,7 +67,6 @@ class TemporalDistribution(object): timezones. :type auxiliar_files_dir: str """ - def __init__(self, starting_date, timestep_type, timestep_num, timestep_freq, monthly_profile_path, month_profile_id, daily_profile_path, daily_profile_id, hourly_profile_path, hourly_profile_id, world_info_path, auxiliar_files_dir, grid): @@ -86,7 +85,7 @@ class TemporalDistribution(object): self.timestep_num = timestep_num self.timestep_freq = timestep_freq - self.ending_date = self.calculates_ending_date() + self.ending_date = self.calculate_ending_date() if month_profile_id is not None: if len(month_profile_id) > 4: if os.path.exists(month_profile_id): @@ -142,9 +141,9 @@ class TemporalDistribution(object): settings.write_time('TemporalDistribution', 'Init', timeit.default_timer() - st_time, level=2) - def calculates_ending_date(self): + def calculate_ending_date(self): """ - Calculates the date of the last timestep. + Calculate the date of the last timestep. :return: Date of the last timestep :rtype: datetime.datetime @@ -174,7 +173,7 @@ class TemporalDistribution(object): def calculate_timedelta(self, date): """ - Calculates the difference of time to the next timestep. + Calculate the difference of time to the next timestep. :param date: Date of the current timestep. :type date: datetime.datetime @@ -208,7 +207,7 @@ class TemporalDistribution(object): def get_tz_from_id(self, tz_id): """ - Extracts the timezone (string format) for the given id (int). + Extract the timezone (string format) for the given id (int). :param tz_id: ID of the timezone. :type tz_id: int @@ -216,14 +215,13 @@ class TemporalDistribution(object): :return: Timezone :rtype: str """ - tz = self.world_info_df.time_zone[self.world_info_df.time_zone_code == tz_id].values return tz[0] def get_id_from_tz(self, tz): """ - Extracts the id (int) for the given timezone (string format). + Extract the id (int) for the given timezone (string format). :param tz: Timezone of the ID. :type tz: str @@ -231,7 +229,6 @@ class TemporalDistribution(object): :return: ID :rtype: int """ - tz_id = self.world_info_df.time_zone_code[self.world_info_df.time_zone == tz].values try: @@ -247,9 +244,11 @@ class TemporalDistribution(object): return tz_id @staticmethod - def parse_tz(tz): + def parse_tz(timezone): """ - Parses the timezone (string format). It is needed because some libraries have more timezones than others and it + Parse the timezone (string format). + + It is needed because some libraries have more timezones than others and it tries to simplify setting the strange ones into the nearest common one. Examples: 'America/Punta_Arenas': 'America/Santiago', @@ -262,8 +261,8 @@ class TemporalDistribution(object): 'Asia/Tomsk': 'Asia/Novokuznetsk', 'America/Fort_Nelson': 'America/Vancouver' - :param tz: Not parsed timezone. - :type tz: str + :param timezone: Not parsed timezone. + :type timezone: str :return: Parsed timezone :rtype: str @@ -281,14 +280,14 @@ class TemporalDistribution(object): 'Asia/Famagusta': 'Asia/Nicosia', } - if tz in tz_dict.iterkeys(): - tz = tz_dict[tz] + if timezone in tz_dict.iterkeys(): + timezone = tz_dict[timezone] - return tz + return timezone def find_closest_timezone(self, latitude, longitude): """ - Finds the closest timezone for the given coordinates. + Find the closest timezone for the given coordinates. :param latitude: Latitude coordinate to find timezone. :type latitude: float @@ -299,22 +298,21 @@ class TemporalDistribution(object): :return: Nearest timezone of the given coordinates. :rtype: str """ - st_time = timeit.default_timer() - dg = 0 - tz = None - while tz is None: - tz = self.tf.closest_timezone_at(lng=longitude, lat=latitude, delta_degree=dg) - dg += 1 + degrees = 0 + timezone = None + while timezone is None: + timezone = self.tf.closest_timezone_at(lng=longitude, lat=latitude, delta_degree=degrees) + degrees += 1 settings.write_time('TemporalDistribution', 'find_closest_timezone', timeit.default_timer() - st_time, level=3) - return tz + return timezone def is_created_netcdf_timezones(self): """ - Checks if the NetCDF of timezones is created + Check if the NetCDF of timezones is created :return: True if it is already created. :rtype: bool @@ -323,7 +321,7 @@ class TemporalDistribution(object): def create_netcdf_timezones(self, grid): """ - Creates a NetCDF with the timezones in the resolution of the given grid. + Create a NetCDF with the timezones in the resolution of the given grid. :param grid: Grid object with the coordinates. :type grid: Grid @@ -344,20 +342,17 @@ class TemporalDistribution(object): num = 0 points = zip(lat.flatten(), lon.flatten()) - # points = points[534000:] - # print len(points) + for lat_aux, lon_aux in points: num += 1 settings.write_log("\t\t\tlat:{0}, lon:{1} ({2}/{3})".format(lat_aux, lon_aux, num, len(points)), level=3) - tz = self.find_closest_timezone(lat_aux, lon_aux) - tz_id = self.get_id_from_tz(tz) + timezone = self.find_closest_timezone(lat_aux, lon_aux) + tz_id = self.get_id_from_tz(timezone) dst_var.append(tz_id) dst_var = np.array(dst_var) dst_var = dst_var.reshape((1,) + lat.shape) dst_var = settings.comm.gather(dst_var, root=0) if settings.rank == 0: - for var in dst_var: - print var.shape total_lat = np.concatenate(total_lat, axis=1) total_lon = np.concatenate(total_lon, axis=1) dst_var = np.concatenate(dst_var, axis=2) @@ -372,6 +367,13 @@ class TemporalDistribution(object): return True def read_gridded_profile(self, path, value): + # TODO Documentation + """ + + :param path: + :param value: + :return: + """ from netCDF4 import Dataset st_time = timeit.default_timer() @@ -391,7 +393,7 @@ class TemporalDistribution(object): def calculate_timezones(self): """ - Extracts the timezones ID's from the NetCDF and convert them to the timezone (str). + Calculate the timezones ID's from the NetCDF and convert them to the timezone (str). :return: Array with the timezone of each cell. :rtype: numpy.chararray @@ -408,8 +410,8 @@ class TemporalDistribution(object): tz_list = np.chararray(timezones.shape, itemsize=32) for id_aux in xrange(timezones.min(), timezones.max() + 1): try: - tz = self.get_tz_from_id(id_aux) - tz_list[timezones == id_aux] = tz + timezone = self.get_tz_from_id(id_aux) + tz_list[timezones == id_aux] = timezone except: pass settings.write_time('TemporalDistribution', 'calculate_timezones', timeit.default_timer() - st_time, level=3) @@ -418,7 +420,7 @@ class TemporalDistribution(object): def calculate_2d_temporal_factors(self, date): """ - Calculates the temporal factor to correct the input data of the given date for each cell. + Calculate the temporal factor to correct the input data of the given date for each cell. :param date: Date of the current timestep. :type date: datetime.datetime @@ -449,7 +451,7 @@ class TemporalDistribution(object): df['hour'] = df.index.hour if self.hourly_profile is not None: - if type(self.hourly_profile) is dict: + if isinstance(self.hourly_profile, dict): df['hour_factor'] = df['hour'].map(self.hourly_profile) else: profile_ids = self.parse_hourly_profile_id() @@ -481,13 +483,13 @@ class TemporalDistribution(object): if self.monthly_profile is None: df['month_factor'] = 1 - elif type(self.monthly_profile) == dict: + elif isinstance(self.monthly_profile, dict): df['month_factor'] = df['month'].map(self.monthly_profile) - elif type(self.monthly_profile) == np.ndarray: + elif isinstance(self.monthly_profile, np.ndarray): for m, df_aux in df.groupby('month'): try: df.loc[df['month'] == m, 'month_factor'] = \ - self.monthly_profile[m-1, df.loc[df['month'] == m, 'i'].values] + self.monthly_profile[m - 1, df.loc[df['month'] == m, 'i'].values] except IndexError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: @@ -511,7 +513,7 @@ class TemporalDistribution(object): def calculate_3d_temporal_factors(self): """ - Calculates the temporal factor to correct the input data of the given date for each cell. + Calculate the temporal factor to correct the input data of the given date for each cell. :return: 3D array with the factors to correct the input data to the date of this timestep. :rtype: numpy.array @@ -520,19 +522,19 @@ class TemporalDistribution(object): settings.write_log("\tCalculating temporal factors.", level=2) factors = [] - date_aux = self.starting_date + date = self.starting_date count = 0 - while date_aux <= self.ending_date: + while date <= self.ending_date: count += 1 settings.write_log("\t\t{0} temporal factor ({1}/{2}).".format( - date_aux.strftime('%Y/%m/%d %H:%M:%S'), count, self.timestep_num), level=3) + date.strftime('%Y/%m/%d %H:%M:%S'), count, self.timestep_num), level=3) - factors.append(self.calculate_2d_temporal_factors(date_aux)) + factors.append(self.calculate_2d_temporal_factors(date)) - d = date_aux - self.starting_date - self.hours_since.append(d.seconds / 3600 + d.days * 24) # 3600 seconds per hour - date_aux = date_aux + self.calculate_timedelta(date_aux) + date_aux = date - self.starting_date + self.hours_since.append(date_aux.seconds / 3600 + date_aux.days * 24) # 3600 seconds per hour + date = date + self.calculate_timedelta(date) factors = np.array(factors) @@ -542,7 +544,8 @@ class TemporalDistribution(object): def parse_hourly_profile_id(self): """ - Parses the hourly profile ID to get a dictionary with the ID for "weekday", "saturday" and "sunday" + Parse the hourly profile ID to get a dictionary with the ID for "weekday", "saturday" and "sunday" + :return: """ import re @@ -557,7 +560,8 @@ class TemporalDistribution(object): def get_temporal_hourly_profile(self, profile_id, date=None): """ - Extracts the hourly profile of the given ID in a dictionary format. + Extract the hourly profile of the given ID in a dictionary format. + The hour (0 to 23) is the key (int) and the value (float) is the factor. :param profile_id: ID of the hourly profile to use. @@ -572,8 +576,6 @@ class TemporalDistribution(object): import pandas as pd st_time = timeit.default_timer() - # settings.write_log("\t\t\tGetting temporal hourly profile '{0}' from {1} .".format( - # profile_id, self.hourly_profile_path), level=3) if date is None: df = pd.read_csv(self.hourly_profile_path) try: @@ -587,7 +589,6 @@ class TemporalDistribution(object): profile.pop('TP_H', None) profile = {int(k): float(v) for k, v in profile.items()} else: - # print self.hourly_profile profile = None settings.write_time('TemporalDistribution', 'get_temporal_hourly_profile', timeit.default_timer() - st_time, level=3) @@ -596,7 +597,8 @@ class TemporalDistribution(object): def get_temporal_daily_profile(self, date): """ - Extracts the daily profile of the given ID in a dictionary format. + Extract the daily profile of the given ID in a dictionary format. + The weekday (0 to 6) is the key (int) and the value (float) is the factor. :param date: Date of the timestep to simulate. @@ -633,8 +635,10 @@ class TemporalDistribution(object): def calculate_rebalance_factor(self, profile, date): """ - Calculates the necessary factor make consistent the full month data. This is needed for the months that if you - sum the daily factor of each day of the month it doesn't sum as the number of days of the month. + Calculate the necessary factor make consistent the full month data. + + This is needed for the months that if you sum the daily factor of each day of the month it doesn't sum as + the number of days of the month. :param profile: Daily profile. :type profile: dict @@ -657,8 +661,9 @@ class TemporalDistribution(object): @staticmethod def calculate_weekday_factor_full_month(profile, weekdays): + # TODO Documentation """ - Operates with all the days of the month to get the sum of daily factors of the full month. + Operate with all the days of the month to get the sum of daily factors of the full month. :param profile: :param weekdays: @@ -679,6 +684,12 @@ class TemporalDistribution(object): @staticmethod def calculate_weekdays(date): + # TODO Documentation + """ + + :param date: + :return: + """ from calendar import monthrange, weekday, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY st_time = timeit.default_timer() @@ -698,7 +709,8 @@ class TemporalDistribution(object): @staticmethod def get_temporal_monthly_profile(profile_path, profile_id): """ - Extracts the monthly profile of the given ID in a dictionary format. + Extract the monthly profile of the given ID in a dictionary format. + The month (1 to 12) is the key (int) and the value (float) is the factor. :param profile_path: Path to the file that contains all the monthly profiles. @@ -739,6 +751,15 @@ class TemporalDistribution(object): @staticmethod def calculate_delta_hours(st_date, time_step_type, time_step_num, time_step_freq): + # TODO Documentation + """ + + :param st_date: + :param time_step_type: + :param time_step_num: + :param time_step_freq: + :return: + """ from datetime import timedelta from calendar import monthrange, isleap diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py index 068c261..1b13729 100755 --- a/preproc/ceds_preproc.py +++ b/preproc/ceds_preproc.py @@ -23,20 +23,20 @@ import sys # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/jgcri/ceds/original_files' -output_path = '/esarchive/recon/jgcri/ceds' -list_pollutants = ['BC', 'CO', 'NH3', 'NMVOC', 'NOx', 'OC', 'SO2'] -voc_pollutants = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', 'VOC08', 'VOC09', 'VOC12', 'VOC13', +INPUT_PATH = '/esarchive/recon/jgcri/ceds/original_files' +OUTPUT_PATH = '/esarchive/recon/jgcri/ceds' +LIST_POLLUTANTS = ['BC', 'CO', 'NH3', 'NMVOC', 'NOx', 'OC', 'SO2'] +VOC_POLLUTANTS = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', 'VOC08', 'VOC09', 'VOC12', 'VOC13', 'VOC14', 'VOC15', 'VOC16', 'VOC17', 'VOC18', 'VOC19', 'VOC20', 'VOC21', 'VOC22', 'VOC23', 'VOC24', 'VOC25'] -list_sectors = ['agriculture', 'energy', 'industry', 'transport', 'residential', 'solvents', 'waste', 'ships'] -# list_years = from 1950 to 2014 -list_years = [2010] -input_name = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' -voc_input_name = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim-supplemental-data_gr_01-12.nc' -do_air = True -air_input_name = '-em-AIR-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26_gr_01-12.nc' +LIST_SECTORS = ['agriculture', 'energy', 'industry', 'transport', 'residential', 'solvents', 'waste', 'ships'] +# LIST_YEARS = from 1950 to 2014 +LIST_YEARS = [2010] +INPUT_NAME = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' +VOC_INPUT_NAME = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim-supplemental-data_gr_01-12.nc' +DO_AIR = True +AIR_INPUT_NAME = '-em-AIR-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26_gr_01-12.nc' # ============================================================== @@ -120,11 +120,11 @@ def get_input_name(pollutant, year, air=False): :rtype: str """ if air: - file_name = air_input_name.replace('', pollutant) - elif pollutant in list_pollutants: - file_name = input_name.replace('', pollutant) - elif pollutant in voc_pollutants: - file_name = voc_input_name.replace('', '{0}-{1}'.format(pollutant, voc_to_vocname(pollutant))) + file_name = AIR_INPUT_NAME.replace('', pollutant) + elif pollutant in LIST_POLLUTANTS: + file_name = INPUT_NAME.replace('', pollutant) + elif pollutant in VOC_POLLUTANTS: + file_name = VOC_INPUT_NAME.replace('', '{0}-{1}'.format(pollutant, voc_to_vocname(pollutant))) else: raise ValueError('Pollutant {0} not in pollutant list or voc list'.format(pollutant)) @@ -139,7 +139,7 @@ def get_input_name(pollutant, year, air=False): else: file_name = file_name.replace('', str(2000)).replace('', str(2014)) - return os.path.join(input_path, file_name) + return os.path.join(INPUT_PATH, file_name) def get_full_year_data(file_name, pollutant, sector, year, air=False): @@ -179,9 +179,9 @@ def get_full_year_data(file_name, pollutant, sector, year, air=False): i_time = np.where(time_array == datetime(year=year, month=1, day=1))[0][0] if air: data = nc.variables['AIR'][i_time:i_time + 12, :, :, :] - elif pollutant in list_pollutants: + elif pollutant in LIST_POLLUTANTS: data = nc.variables['{0}_em_anthro'.format(pollutant)][i_time:i_time+12, sector_to_index(sector), :, :] - elif pollutant in voc_pollutants: + elif pollutant in VOC_POLLUTANTS: data = nc.variables['{0}-{1}_em_speciated_VOC'.format( pollutant, voc_to_vocname(pollutant).replace('-', '_'))][i_time:i_time+12, sector_to_index(sector), :, :] else: @@ -222,14 +222,14 @@ def do_transformation(year): """ from datetime import datetime from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf - for pollutant in list_pollutants + voc_pollutants: + for pollutant in LIST_POLLUTANTS + VOC_POLLUTANTS: file_name = get_input_name(pollutant, year) if os.path.exists(file_name): c_lats, c_lons, b_lats, b_lons = extract_vars(file_name, ['lat', 'lon', 'lat_bnds', 'lon_bnds']) cell_area = get_grid_area(file_name) global_attributes = get_global_attributes(file_name) - for sector in list_sectors: + for sector in LIST_SECTORS: data = get_full_year_data(file_name, pollutant, sector, year) if pollutant == 'NOx': @@ -237,7 +237,7 @@ def do_transformation(year): else: pollutant_name = pollutant.lower() - file_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) + file_path = os.path.join(OUTPUT_PATH, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) if not os.path.exists(file_path): os.makedirs(file_path) @@ -267,7 +267,7 @@ def do_air_transformation(year): from datetime import datetime from hermesv3_gr.tools.netcdf_tools import extract_vars, get_grid_area, write_netcdf - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: file_name = get_input_name(pollutant, year, air=True) if os.path.exists(file_name): c_lats, c_lons, b_lats, b_lons = extract_vars(file_name, ['lat', 'lon', 'lat_bnds', 'lon_bnds']) @@ -283,7 +283,7 @@ def do_air_transformation(year): pollutant_name = pollutant.lower() for sector in ['air_lto', 'air_cds', 'air_crs']: - file_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) + file_path = os.path.join(OUTPUT_PATH, 'monthly_mean', '{0}_{1}'.format(pollutant_name, sector)) if not os.path.exists(file_path): os.makedirs(file_path) @@ -314,7 +314,7 @@ def do_air_transformation(year): if __name__ == '__main__': - for y in list_years: + for y in LIST_YEARS: # do_transformation(y) - if do_air: + if DO_AIR: do_air_transformation(y) diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index 6a359b5..157dd81 100755 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -26,14 +26,14 @@ from cf_units import Unit # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/iiasa/eclipsev5a/original_files' -output_path = '/esarchive/recon/iiasa/eclipsev5a/original_files/test' -input_name = 'ECLIPSE_base_CLE_V5a_.nc' -input_name_flaring = 'ECLIPSE_V5a_baseline_CLE_flaring.nc' -input_name_ship = "ship_CLE_emis_.nc" -monthly_pattern_file = 'ECLIPSEv5_monthly_patterns.nc' -list_years = [1990, 1995, 2000, 2005, 2010, 2015, 2020, 2025, 2030, 2040, 2050] -list_pollutants = ['BC', 'CH4', 'CO', 'NH3', 'NOx', 'OC', 'OM', 'PM10', 'PM25', 'SO2', 'VOC'] +INPUT_PATH = '/esarchive/recon/iiasa/eclipsev5a/original_files' +OUTPUT_PATH = '/esarchive/recon/iiasa/eclipsev5a/original_files/test' +INPUT_NAME = 'ECLIPSE_base_CLE_V5a_.nc' +INPUT_NAME_FLARING = 'ECLIPSE_V5a_baseline_CLE_flaring.nc' +INPUT_NAME_SHIPS = "ship_CLE_emis_.nc" +MONTHLY_PATTERN_FILE = 'ECLIPSEv5_monthly_patterns.nc' +LIST_YEARS = [1990, 1995, 2000, 2005, 2010, 2015, 2020, 2025, 2030, 2040, 2050] +LIST_POLLUTANTS = ['BC', 'CH4', 'CO', 'NH3', 'NOx', 'OC', 'OM', 'PM10', 'PM25', 'SO2', 'VOC'] # ============================================================== @@ -207,7 +207,7 @@ def extract_month_profile_by_sector(sector, month, pollutant=None): else: profile_name = sector_dict[sector] - nc_profiles = Dataset(os.path.join(input_path, monthly_pattern_file), mode='r') + nc_profiles = Dataset(os.path.join(INPUT_PATH, MONTHLY_PATTERN_FILE), mode='r') profile = nc_profiles.variables[profile_name][month, :, :] @@ -219,7 +219,7 @@ def extract_month_profile_by_sector(sector, month, pollutant=None): def get_output_name(pollutant, sector, year, month): # TODO Docuemtnation - output_path_aux = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(pollutant, sector), ) + output_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) @@ -229,7 +229,7 @@ def get_output_name(pollutant, sector, year, month): def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area): # TODO Docuemtnation - for i in xrange(len(list_years)): + for i in xrange(len(LIST_YEARS)): for month in xrange(12): # print i, list_years[i], month + 1 @@ -239,7 +239,7 @@ def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area) pollutant_name = 'nmvoc' else: pollutant_name = pollutant.lower() - output_name = get_output_name(pollutant_name.lower(), sector.lower(), list_years[i], month + 1) + output_name = get_output_name(pollutant_name.lower(), sector.lower(), LIST_YEARS[i], month + 1) profile = extract_month_profile_by_sector(sector, month, pollutant) data_aux = data[i, :, :] * profile # print factor @@ -256,13 +256,13 @@ def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area) 'units': Unit(var_units), }] write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, - datetime(year=list_years[i], month=month + 1, day=1)) + datetime(year=LIST_YEARS[i], month=month + 1, day=1)) def do_transformation(): # TODO Documentation - for pollutant in list_pollutants: - file_name = os.path.join(input_path, input_name.replace('', pollutant)) + for pollutant in LIST_POLLUTANTS: + file_name = os.path.join(INPUT_PATH, INPUT_NAME.replace('', pollutant)) print file_name nc = Dataset(file_name, mode='r') c_lats = nc.variables['lat'][:] @@ -278,7 +278,7 @@ def do_transformation(): def get_flaring_output_name(pollutant, sector, year): # TODO Docuemtnation - output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) + output_path_aux = os.path.join(OUTPUT_PATH, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) @@ -309,17 +309,17 @@ def get_flaring_var_name(nc_var): def do_flaring_transformation(): # TODO Documentation - nc_in = Dataset(os.path.join(input_path, input_name_flaring), mode='r') + nc_in = Dataset(os.path.join(INPUT_PATH, INPUT_NAME_FLARING), mode='r') c_lats = nc_in.variables['lat'][:] c_lons = nc_in.variables['lon'][:] - cell_area = get_grid_area(os.path.join(input_path, input_name_flaring)) + cell_area = get_grid_area(os.path.join(INPUT_PATH, INPUT_NAME_FLARING)) for var in nc_in.variables: var_name = get_flaring_var_name(var) if var_name is not None: data = nc_in.variables[var][:] data = np.nan_to_num(data) - for i in xrange(len(list_years)): - output_name = get_flaring_output_name(var_name, 'flaring', list_years[i]) + for i in xrange(len(LIST_YEARS)): + output_name = get_flaring_output_name(var_name, 'flaring', LIST_YEARS[i]) data_aux = data[i, :, :] data_aux = (data_aux * year_factor) / cell_area data_aux = data_aux.reshape((1,) + data_aux.shape) @@ -330,13 +330,13 @@ def do_flaring_transformation(): 'units': Unit(var_units), }] write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, - datetime(year=list_years[i], month=1, day=1)) + datetime(year=LIST_YEARS[i], month=1, day=1)) nc_in.close() def get_ship_output_name(pollutant, sector, year): # TODO Docuemntation - output_path_aux = os.path.join(output_path, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) + output_path_aux = os.path.join(OUTPUT_PATH, 'yearly_mean', '{0}_{1}'.format(pollutant, sector), ) if not(os.path.exists(output_path_aux)): os.makedirs(output_path_aux) @@ -366,8 +366,8 @@ def get_ship_var_name(nc_var): def do_ship_transformation(): # TODO Documentation - for year in list_years: - in_path = os.path.join(input_path, input_name_ship.replace('', str(year))) + for year in LIST_YEARS: + in_path = os.path.join(INPUT_PATH, INPUT_NAME_SHIPS.replace('', str(year))) nc_in = Dataset(in_path, mode='r') c_lats = nc_in.variables['lat'][:] c_lons = nc_in.variables['lon'][:] diff --git a/preproc/edgarv432_ap_preproc.py b/preproc/edgarv432_ap_preproc.py index 34f77af..2f79e9c 100755 --- a/preproc/edgarv432_ap_preproc.py +++ b/preproc/edgarv432_ap_preproc.py @@ -25,22 +25,22 @@ from warnings import warn as warning # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/jrc/edgarv432_ap/original_files/' -output_path = '/esarchive/recon/jrc/edgarv432_ap' -list_pollutants = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] -# list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, +INPUT_PATH = '/esarchive/recon/jrc/edgarv432_ap/original_files/' +OUTPUT_PATH = '/esarchive/recon/jrc/edgarv432_ap' +LIST_POLLUTANTS = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] +# LIST_YEARS = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, # 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, # 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] -list_years = [2012] +LIST_YEARS = [2012] # To do yearly emissions -process_yearly = True -yearly_input_name = 'yearly/v432___.0.1x0.1.nc' +PROCESS_YEARLY = True +YEARLY_INPUT_NAME = 'yearly/v432___.0.1x0.1.nc' # To process monthly emissions, 2010 directly from monthly_input_name and other years calculated using bla bla bla -process_monthly = True -monthly_input_name = 'monthly/v432__2010__.0.1x0.1.nc' -monthly_pattern_file = 'temporal_profiles/v432_FM_.0.1x0.1.nc' +PROCESS_MONTHLY = True +MONTHLY_INPUT_NAME = 'monthly/v432__2010__.0.1x0.1.nc' +MONTHLY_PATTERN_FILE = 'temporal_profiles/v432_FM_.0.1x0.1.nc' # ============================================================== """ @@ -225,11 +225,11 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr def do_yearly_transformation(year): # TODO Documentation - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): file_path = os.path.join( - input_path, - yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + INPUT_PATH, + YEARLY_INPUT_NAME.replace('', pollutant).replace('', str(year)).replace('', ipcc)) if os.path.exists(file_path): @@ -263,7 +263,7 @@ def do_yearly_transformation(year): 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'yearly_mean', pollutant.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'yearly_mean', pollutant.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant.lower(), year)), @@ -277,11 +277,11 @@ def do_yearly_transformation(year): def do_monthly_transformation(year): # TODO Documentation - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): file_path = os.path.join( - input_path, - yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + INPUT_PATH, + YEARLY_INPUT_NAME.replace('', pollutant).replace('', str(year)).replace('', ipcc)) if os.path.exists(file_path): @@ -315,11 +315,11 @@ def do_monthly_transformation(year): 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - nc_month_factors = Dataset(os.path.join(input_path, monthly_pattern_file.replace('', sector))) + nc_month_factors = Dataset(os.path.join(INPUT_PATH, MONTHLY_PATTERN_FILE.replace('', sector))) month_factors = nc_month_factors.variables[sector][:] for month in xrange(1, 12 + 1, 1): data_aux = data * month_factors[month - 1, :, :] @@ -336,12 +336,12 @@ def do_monthly_transformation(year): def do_2010_monthly_transformation(): # TODO Documentation - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): for month in xrange(1, 12 + 1, 1): file_path = os.path.join( - input_path, - monthly_input_name.replace('', pollutant).replace('', + INPUT_PATH, + MONTHLY_INPUT_NAME.replace('', pollutant).replace('', str(month)).replace('', ipcc)) if os.path.exists(file_path): @@ -374,7 +374,7 @@ def do_2010_monthly_transformation(): 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), 2010, @@ -389,12 +389,12 @@ def do_2010_monthly_transformation(): if __name__ == '__main__': - if process_yearly: - for y in list_years: + if PROCESS_YEARLY: + for y in LIST_YEARS: do_yearly_transformation(y) - if process_monthly: - for y in list_years: + if PROCESS_MONTHLY: + for y in LIST_YEARS: if y == 2010: do_2010_monthly_transformation() else: diff --git a/preproc/edgarv432_voc_preproc.py b/preproc/edgarv432_voc_preproc.py index a42d8aa..3d7d116 100755 --- a/preproc/edgarv432_voc_preproc.py +++ b/preproc/edgarv432_voc_preproc.py @@ -25,24 +25,24 @@ from warnings import warn as warning # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/jrc/edgarv432_voc/original_files/' -output_path = '/esarchive/recon/jrc/edgarv432_voc' -list_pollutants = ['voc1', 'voc2', 'voc3', 'voc4', 'voc5', 'voc6', 'voc7', 'voc8', 'voc9', 'voc10', 'voc11', 'voc12', +INPUT_PATH = '/esarchive/recon/jrc/edgarv432_voc/original_files/' +OUTPUT_PATH = '/esarchive/recon/jrc/edgarv432_voc' +LIST_POLLUTANTS = ['voc1', 'voc2', 'voc3', 'voc4', 'voc5', 'voc6', 'voc7', 'voc8', 'voc9', 'voc10', 'voc11', 'voc12', 'voc13', 'voc14', 'voc15', 'voc16', 'voc17', 'voc18', 'voc19', 'voc20', 'voc21', 'voc22', 'voc23', 'voc24', 'voc25'] # list_years = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, # 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, # 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] -list_years = [2010] +LIST_YEARS = [2010] # To do yearly emissions -process_yearly = True -yearly_input_name = 'yearly/v432_VOC_spec___.0.1x0.1.nc' +PROCESS_YEARLY = True +YEARLY_INPUT_NAME = 'yearly/v432_VOC_spec___.0.1x0.1.nc' # To process monthly emissions, 2010 directly from monthly_input_name and other years calculated using bla bla bla -process_monthly = False -monthly_input_name = 'monthly/v432_VOC_spec__2010__.0.1x0.1.nc' -monthly_pattern_file = 'temporal_profiles/v432_FM_.0.1x0.1.nc' +PROCESS_MONTHLY = False +MONTHLY_INPUT_NAME = 'monthly/v432_VOC_spec__2010__.0.1x0.1.nc' +MONTHLY_PATTERN_FILE = 'temporal_profiles/v432_FM_.0.1x0.1.nc' # ============================================================== """ @@ -222,11 +222,11 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr def do_yearly_transformation(year): # TODO Documentation print year - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): file_path = os.path.join( - input_path, - yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + INPUT_PATH, + YEARLY_INPUT_NAME.replace('', pollutant).replace('', str(year)).replace('', ipcc)) if os.path.exists(file_path): @@ -253,7 +253,7 @@ def do_yearly_transformation(year): 'units': 'kg.m-2.s-1', 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'yearly_mean', pollutant_aux.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'yearly_mean', pollutant_aux.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) # print os.path.join(out_path_aux, '{0}_{1}.nc'.format(pollutant_aux.lower(), year)) @@ -269,11 +269,11 @@ def do_yearly_transformation(year): def do_monthly_transformation(year): # TODO Documentation print year - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): file_path = os.path.join( - input_path, - yearly_input_name.replace('', pollutant).replace('', str(year)).replace('', + INPUT_PATH, + YEARLY_INPUT_NAME.replace('', pollutant).replace('', str(year)).replace('', ipcc)) if os.path.exists(file_path): @@ -302,11 +302,11 @@ def do_monthly_transformation(year): 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - nc_month_factors = Dataset(os.path.join(input_path, monthly_pattern_file.replace('', sector))) + nc_month_factors = Dataset(os.path.join(INPUT_PATH, MONTHLY_PATTERN_FILE.replace('', sector))) month_factors = nc_month_factors.variables[sector][:] for month in xrange(1, 12 + 1, 1): data_aux = data * month_factors[month - 1, :, :] @@ -323,12 +323,12 @@ def do_monthly_transformation(year): def do_2010_monthly_transformation(): # TODO Documentation - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for ipcc in ipcc_to_sector_dict().keys(): for month in xrange(1, 12 + 1, 1): file_path = os.path.join( - input_path, - monthly_input_name.replace('', pollutant).replace('', + INPUT_PATH, + MONTHLY_INPUT_NAME.replace('', pollutant).replace('', str(month)).replace('', ipcc)) if os.path.exists(file_path): @@ -358,7 +358,7 @@ def do_2010_monthly_transformation(): 'grid_mapping': 'crs'} out_path_aux = os.path.join( - output_path, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) + OUTPUT_PATH, 'monthly_mean', pollutant_aux.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format( @@ -373,12 +373,12 @@ def do_2010_monthly_transformation(): if __name__ == '__main__': - if process_yearly: - for y in list_years: + if PROCESS_YEARLY: + for y in LIST_YEARS: do_yearly_transformation(y) - if process_monthly: - for y in list_years: + if PROCESS_MONTHLY: + for y in LIST_YEARS: if y == 2010: do_2010_monthly_transformation() else: diff --git a/preproc/emep_preproc.py b/preproc/emep_preproc.py index e4f526a..6df1d37 100755 --- a/preproc/emep_preproc.py +++ b/preproc/emep_preproc.py @@ -24,12 +24,12 @@ from datetime import datetime # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/ceip/emepv18/original_files' -output_path = '/esarchive/recon/ceip/emepv18/yearly_mean' -input_name = '__2018_GRID_.txt' +INPUT_PATH = '/esarchive/recon/ceip/emepv18/original_files' +OUTPUT_PATH = '/esarchive/recon/ceip/emepv18/yearly_mean' +INPUT_NAME = '__2018_GRID_.txt' # list_years = [2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016] -list_years = [2015] -list_pollutants = ['NOx', 'NMVOC', 'SOx', 'NH3', 'PM2_5', 'PM10', 'CO'] +LIST_YEARS = [2015] +LIST_POLLUTANTS = ['NOx', 'NMVOC', 'SOx', 'NH3', 'PM2_5', 'PM10', 'CO'] # ============================================================== @@ -83,11 +83,11 @@ def do_transformation(year): unit_factor = 1000./(365.*24.*3600.) # From Mg/year to Kg/s - for pollutant in list_pollutants: + for pollutant in LIST_POLLUTANTS: for sector in get_sectors(): in_file = os.path.join( - input_path, - input_name.replace('', str(year)).replace('', sector).replace('', pollutant)) + INPUT_PATH, + INPUT_NAME.replace('', str(year)).replace('', sector).replace('', pollutant)) if os.path.exists(in_file): print in_file @@ -123,7 +123,7 @@ def do_transformation(year): element['data'] = element['data'].reshape((1,) + element['data'].shape) - complete_output_dir = os.path.join(output_path, '{0}_{1}'.format(element['name'], sector.lower())) + complete_output_dir = os.path.join(OUTPUT_PATH, '{0}_{1}'.format(element['name'], sector.lower())) if not os.path.exists(complete_output_dir): os.makedirs(complete_output_dir) complete_output_dir = os.path.join(complete_output_dir, '{0}_{1}.nc'.format(element['name'], year)) @@ -148,5 +148,5 @@ def do_transformation(year): if __name__ == '__main__': - for y in list_years: + for y in LIST_YEARS: do_transformation(y) diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py index ef2de31..b2591aa 100755 --- a/preproc/gfas12_preproc.py +++ b/preproc/gfas12_preproc.py @@ -26,14 +26,14 @@ import datetime from datetime import datetime, timedelta # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/' -input_name = 'ga_.grb' -output_path = '/esarchive/recon/ecmwf/gfas' +INPUT_PATH = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/' +INPUT_NAME = 'ga_.grb' +OUTPUT_PATH = '/esarchive/recon/ecmwf/gfas' -starting_date = datetime(year=2018, month=8, day=29) -ending_date = datetime(year=2018, month=8, day=29) +STARTING_DATE = datetime(year=2018, month=8, day=29) +ENDIND_DATE = datetime(year=2018, month=8, day=29) -parameters_file = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/GFAS_Parameters.csv' +PARAMETERS_FILE = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecmf/GFAS_Parameters.csv' # ============================================================== @@ -261,13 +261,13 @@ def do_var_list(variables_file): if __name__ == '__main__': - var_list = do_var_list(parameters_file) + var_list = do_var_list(PARAMETERS_FILE) - date_aux = starting_date - while date_aux <= ending_date: - f = os.path.join(input_path, input_name.replace('', date_aux.strftime('%Y%m%d'))) + date_aux = STARTING_DATE + while date_aux <= ENDIND_DATE: + f = os.path.join(INPUT_PATH, INPUT_NAME.replace('', date_aux.strftime('%Y%m%d'))) if os.path.isfile(f): - do_transformation(f, date_aux, output_path, var_list) + do_transformation(f, date_aux, OUTPUT_PATH, var_list) else: print 'ERROR: file {0} not found'.format(f) diff --git a/preproc/htapv2_preproc.py b/preproc/htapv2_preproc.py index d178c71..5961919 100755 --- a/preproc/htapv2_preproc.py +++ b/preproc/htapv2_preproc.py @@ -19,27 +19,28 @@ import os -import sys # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/jrc/htapv2/original_files' -output_path = '/esarchive/recon/jrc/htapv2' +INPUT_PATH = '/esarchive/recon/jrc/htapv2/original_files' +OUTPUT_PATH = '/esarchive/recon/jrc/htapv2' -input_name = 'edgar_HTAP__emi___.0.1x0.1.nc' -input_name_air = 'edgar_HTAP_emi___.0.1x0.1.nc' -input_name_ships = 'edgar_HTAP__emi_SHIPS_.0.1x0.1.nc' -#HTAP auxiliary NMVOC emission data for the industry sub-sectors (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) -input_name_nmvoc_industry = 'HTAPv2_NMVOC___.0.1x0.1.nc' +INPUT_NAME = 'edgar_HTAP__emi___.0.1x0.1.nc' +INPUT_NAME_AIR = 'edgar_HTAP_emi___.0.1x0.1.nc' +INPUT_NAME_SHIPS = 'edgar_HTAP__emi_SHIPS_.0.1x0.1.nc' +# HTAP auxiliary NMVOC emission data for the industry sub-sectors +# (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) +INPUT_NAME_NMVOC_INDUSTRY = 'HTAPv2_NMVOC___.0.1x0.1.nc' # list_years = [2008, 2010] -list_years = [2010] - -#RETRO ratios applied to HTAPv2 NMVOC emissions (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) -voc_ratio_path = '/esarchive/recon/jrc/htapv2/original_files/retro_nmvoc_ratio_2000_01x01' -voc_ratio_name = 'retro_nmvoc_ratio__2000_0.1deg.nc' -voc_ratio_air_name = 'VOC_split_AIR.csv' -voc_ratio_ships_name = 'VOC_split_SHIP.csv' +LIST_YEARS = [2010] + +# RETRO ratios applied to HTAPv2 NMVOC emissions +# (http://iek8wikis.iek.fz-juelich.de/HTAPWiki/WP1.1?highlight=%28%28WP1.1%29%29) +VOC_RATIO_PATH = '/esarchive/recon/jrc/htapv2/original_files/retro_nmvoc_ratio_2000_01x01' +VOC_RATIO_NAME = 'retro_nmvoc_ratio__2000_0.1deg.nc' +VOC_RATIO_AIR_NAME = 'VOC_split_AIR.csv' +VOC_RATIO_SHIPS_NAME = 'VOC_split_SHIP.csv' # ============================================================== @@ -64,11 +65,10 @@ def do_transformation_annual(filename, out_path, pollutant, sector, year): :return: """ - import os from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds print filename - c_lats, c_lons = extract_vars(filename, ['lat', 'lon']) + [c_lats, c_lons] = extract_vars(filename, ['lat', 'lon']) if pollutant == 'pm25': [data] = extract_vars(filename, ['emi_pm2.5'], @@ -90,7 +90,8 @@ def do_transformation_annual(filename, out_path, pollutant, sector, year): '2017-04-04: Added global attributes;\n' + '2017-04-04: Re-naming pollutant;\n' + '2017-04-04: Added cell_area variable;\n', - 'references': 'EC, JRC / US EPA, HTAP_V2. http://edgar.jrc.ec.europa.eu/htap/EDGAR-HTAP_v1_final_jan2012.pdf\n ' + + 'references': 'EC, JRC / US EPA, HTAP_V2. ' + + 'http://edgar.jrc.ec.europa.eu/htap/EDGAR-HTAP_v1_final_jan2012.pdf\n ' + 'http://edgar.jrc.ec.europa.eu/htap_v2/', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)', @@ -129,13 +130,12 @@ def do_transformation(filename_list, out_path, pollutant, sector, year): :return: """ - import os from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds for month in xrange(1, 13): print filename_list[month - 1] - c_lats, c_lons = extract_vars(filename_list[month - 1], ['lat', 'lon']) + [c_lats, c_lons] = extract_vars(filename_list[month - 1], ['lat', 'lon']) if pollutant == 'pm25': [data] = extract_vars(filename_list[month - 1], ['emi_pm2.5'], @@ -157,7 +157,9 @@ def do_transformation(filename_list, out_path, pollutant, sector, year): '2017-04-04: Added global attributes;\n' + '2017-04-04: Re-naming pollutant;\n' + '2017-04-04: Added cell_area variable;\n', - 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global ' + + 'emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, ' + + 'Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)', @@ -176,45 +178,57 @@ def do_transformation(filename_list, out_path, pollutant, sector, year): def do_ratio_list(sector=None): # TODO Documentation + """ + + :param sector: + :return: + """ if sector == 'SHIPS': - return {'all': os.path.join(voc_ratio_path, voc_ratio_ships_name)} + return {'all': os.path.join(VOC_RATIO_PATH, VOC_RATIO_SHIPS_NAME)} elif sector == 'AIR_CDS': - return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} + return {'all': os.path.join(VOC_RATIO_PATH, VOC_RATIO_AIR_NAME)} elif sector == 'AIR_CRS': - return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} + return {'all': os.path.join(VOC_RATIO_PATH, VOC_RATIO_AIR_NAME)} elif sector == 'AIR_LTO': - return {'all': os.path.join(voc_ratio_path, voc_ratio_air_name)} - else: - return { - 'voc01': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '01')), - 'voc02': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '02')), - 'voc03': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '03')), - 'voc04': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '04')), - 'voc05': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '05')), - 'voc06': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '06')), - 'voc07': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '07')), - 'voc08': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '08')), - 'voc09': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '09')), - 'voc12': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '12')), - 'voc13': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '13')), - 'voc14': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '14')), - 'voc15': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '15')), - 'voc16': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '16')), - 'voc17': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '17')), - 'voc18': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '18')), - 'voc19': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '19')), - 'voc20': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '20')), - 'voc21': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '21')), - 'voc22': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '22')), - 'voc23': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '23')), - 'voc24': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '24')), - 'voc25': os.path.join(voc_ratio_path, voc_ratio_name.replace('', '25')), - } + return {'all': os.path.join(VOC_RATIO_PATH, VOC_RATIO_AIR_NAME)} + return { + 'voc01': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '01')), + 'voc02': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '02')), + 'voc03': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '03')), + 'voc04': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '04')), + 'voc05': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '05')), + 'voc06': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '06')), + 'voc07': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '07')), + 'voc08': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '08')), + 'voc09': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '09')), + 'voc12': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '12')), + 'voc13': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '13')), + 'voc14': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '14')), + 'voc15': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '15')), + 'voc16': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '16')), + 'voc17': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '17')), + 'voc18': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '18')), + 'voc19': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '19')), + 'voc20': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '20')), + 'voc21': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '21')), + 'voc22': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '22')), + 'voc23': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '23')), + 'voc24': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '24')), + 'voc25': os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', '25')), + } def do_nmvoc_month_transformation(filename_list, out_path, sector, year): # TODO Docuemtnation - from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + """ + + :param filename_list: + :param out_path: + :param sector: + :param year: + :return: + """ + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf from hermesv3_gr.tools.coordinates_tools import create_bounds nmvoc_ratio_list = do_ratio_list() @@ -258,7 +272,7 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): [ratio] = extract_vars(ratio_file, [ratio_var]) data_aux = data.copy() - data_aux['data'] = data['data']*ratio['data'] + data_aux['data'] = data['data'] * ratio['data'] data_aux['data'] = data_aux['data'].reshape((1,) + data_aux['data'].shape) data_aux['name'] = voc data_aux['units'] = 'kg m-2 s-1' @@ -286,12 +300,21 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): print out_path_aux write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data_aux], boundary_latitudes=create_bounds(c_lats['data']), - boundary_longitudes=create_bounds(c_lons['data']),global_attributes=global_attributes,) + boundary_longitudes=create_bounds(c_lons['data']) ,global_attributes=global_attributes,) return True def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year): - from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + # TODO Documentation + """ + + :param filename_list: + :param out_path: + :param sector: + :param year: + :return: + """ + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf from hermesv3_gr.tools.coordinates_tools import create_bounds nmvoc_ratio_list = do_ratio_list() @@ -314,22 +337,23 @@ def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year } if voc in ['voc02', 'voc03', 'voc04', 'voc05', 'voc07', 'voc08', 'voc12', 'voc13']: [r_inc, r_exf] = extract_vars(ratio_file, ['inc', 'exf']) - data.update({'data': ind['data']*r_inc['data'] + exf['data']*r_exf['data']}) + data.update({'data': ind['data'] * r_inc['data'] + exf['data'] * r_exf['data']}) elif voc in ['voc01', 'voc23', 'voc25']: [r_inc, r_sol] = extract_vars(ratio_file, ['inc', 'sol']) - data.update({'data': ind['data']*r_inc['data'] + sol['data']*r_sol['data']}) + data.update({'data': ind['data'] * r_inc['data'] + sol['data'] * r_sol['data']}) elif voc in ['voc09', 'voc16', 'voc21', 'voc22', 'voc24']: [r_inc] = extract_vars(ratio_file, ['inc']) - data.update({'data': ind['data']*r_inc['data']}) + data.update({'data': ind['data'] * r_inc['data']}) # elif voc in []: # [r_exf, r_sol] = extract_vars(ratio_file, ['exf', 'sol']) # data.update({'data': exf['data']*r_exf['data'] + sol['data']*r_sol['data']}) elif voc in ['voc18', 'voc19', 'voc20']: [r_sol] = extract_vars(ratio_file, ['sol']) - data.update({'data': sol['data']*r_sol['data']}) + data.update({'data': sol['data'] * r_sol['data']}) else: [r_inc, r_exf, r_sol] = extract_vars(ratio_file, ['inc', 'exf', 'sol']) - data.update({'data': ind['data']*r_inc['data'] + exf['data']*r_exf['data'] + sol['data']*r_sol['data']}) + data.update({'data': ind['data'] * r_inc['data'] + exf['data']*r_exf['data'] + + sol['data'] * r_sol['data']}) global_attributes = { 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, voc), @@ -338,7 +362,10 @@ def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year 'source': 'HTAPv2', 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + '2017-04-28: ...', - 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and ' + + 'global emission grid maps for 2008 and 2010 to study hemispheric transport of air ' + + 'pollution, Atmos. Chem. Phys., 15, 11411-11432, ' + + 'https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)', @@ -351,22 +378,27 @@ def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year out_path_aux = os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(voc, year, str(month).zfill(2))) print out_path_aux write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data], - boundary_latitudes=create_bounds(c_lats['data']), boundary_longitudes=create_bounds(c_lons['data']), - global_attributes=global_attributes,) + boundary_latitudes=create_bounds(c_lats['data']), + boundary_longitudes=create_bounds(c_lons['data']), global_attributes=global_attributes,) def do_nmvoc_year_transformation(filename, out_path, sector, year): - import os + # TODO Documentation + """ + + :param filename: + :param out_path: + :param sector: + :param year: + :return: + """ import pandas as pd - from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf, get_grid_area + from hermesv3_gr.tools.netcdf_tools import extract_vars, write_netcdf from hermesv3_gr.tools.coordinates_tools import create_bounds nmvoc_ratio_file = do_ratio_list(sector)['all'] nmvoc_ratio_list = pd.read_csv(nmvoc_ratio_file, sep=';') - - print 'hola->',filename - c_lats, c_lons = extract_vars(filename, ['lat', 'lon']) [data] = extract_vars(filename, ['emi_nmvoc']) @@ -375,10 +407,8 @@ def do_nmvoc_year_transformation(filename, out_path, sector, year): pollutant = voc_ratio['voc_group'] ratio = voc_ratio['factor'] - # print pollutant, ratio - data_aux = data.copy() - data_aux['data'] = data['data']*ratio + data_aux['data'] = data['data'] * ratio data_aux['data'] = data_aux['data'].reshape((1,) + data_aux['data'].shape) data_aux['name'] = pollutant data_aux['units'] = 'kg m-2 s-1' @@ -389,7 +419,9 @@ def do_nmvoc_year_transformation(filename, out_path, sector, year): 'source': 'HTAPv2', 'history': 'Re-writing of the HTAPv2 input to follow the CF 1.6 conventions;\n' + '2017-04-28: ...', - 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + + 'references': 'publication: Janssens-Maenhout, G., et al.: HTAP_v2.2: a mosaic of regional and global ' + + 'emission grid maps for 2008 and 2010 to study hemispheric transport of air pollution, ' + + 'Atmos. Chem. Phys., 15, 11411-11432, https://doi.org/10.5194/acp-15-11411-2015, 2015.\n ' + 'web: http://edgar.jrc.ec.europa.eu/htap_v2/index.php', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)\n ' + @@ -409,21 +441,31 @@ def do_nmvoc_year_transformation(filename, out_path, sector, year): def get_pollutant_dict(): - pollutant_dict = { - # 'bc': 'BC', - # 'co': 'CO', - # 'nh3': 'NH3', - # 'nox_no2': 'NOx', - # 'oc': 'OC', - # 'pm10': 'PM10', - # 'pm25': 'PM2.5', - # 'so2': 'SO2', + # TODO Documentation + """ + + :return: + """ + p_dict = { + 'bc': 'BC', + 'co': 'CO', + 'nh3': 'NH3', + 'nox_no2': 'NOx', + 'oc': 'OC', + 'pm10': 'PM10', + 'pm25': 'PM2.5', + 'so2': 'SO2', 'nmvoc': 'NMVOC' } - return pollutant_dict + return p_dict def get_sector_dict(): + # TODO Documentation + """ + + :return: + """ common_dict = { 'month': ['ENERGY', 'INDUSTRY', 'RESIDENTIAL', 'TRANSPORT'], 'year': ['SHIPS', 'AIR_CDS', 'AIR_CRS', 'AIR_LTO'] @@ -444,88 +486,93 @@ def get_sector_dict(): def get_nmvoc_sector_dict(): + # TODO Documentation + """ + + :return: + """ nmvoc_sectors = {'month': ['ENERGY', 'INDUSTRY_3subsectors', 'RESIDENTIAL', 'TRANSPORT'], 'year': ['SHIPS', 'AIR_CDS', 'AIR_CRS', 'AIR_LTO']} return nmvoc_sectors def check_vocs(year): + # TODO Documentation + """ + + :param year: + :return: + """ from hermesv3_gr.tools.netcdf_tools import extract_vars - for month in xrange(1, 12 +1, 1): + for month in xrange(1, 12 + 1, 1): for snap in ['ENERGY', 'INDUSTRY', 'RESIDENTIAL', 'TRANSPORT']: - nmvoc_path = os.path.join(output_path, 'monthly_mean', 'nmvoc_{0}'.format(snap.lower()), 'nmvoc_{0}{1}.nc'.format(year, str(month).zfill(2))) - # print nmvoc_path + nmvoc_path = os.path.join(OUTPUT_PATH, 'monthly_mean', 'nmvoc_{0}'.format(snap.lower()), + 'nmvoc_{0}{1}.nc'.format(year, str(month).zfill(2))) [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) nmvoc_sum = new_voc['data'].sum() voc_sum = 0 - for voc in ['voc{0}'.format(str(x).zfill(2)) for x in xrange(1, 25 +1, 1)]: - voc_path = os.path.join(output_path, 'monthly_mean', '{0}_{1}'.format(voc, snap.lower()), '{0}_{1}{2}.nc'.format(voc, year, str(month).zfill(2))) - # print voc_path, os.path.exists(voc_path) + for voc in ['voc{0}'.format(str(x).zfill(2)) for x in xrange(1, 25 + 1, 1)]: + voc_path = os.path.join(OUTPUT_PATH, 'monthly_mean', '{0}_{1}'.format(voc, snap.lower()), + '{0}_{1}{2}.nc'.format(voc, year, str(month).zfill(2))) if os.path.exists(voc_path): [new_voc] = extract_vars(voc_path, [voc]) voc_sum += new_voc['data'].sum() - print '{0} month: {4}; NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format(snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum, month) - + print '{0} month: {4}; NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format( + snap, nmvoc_sum, voc_sum, 100 * (nmvoc_sum - voc_sum)/nmvoc_sum, month) if __name__ == '__main__': - - for y in list_years: - # check_vocs(y) - # sys.exit(1) + for y in LIST_YEARS: for pollutant_dict in get_pollutant_dict().iteritems(): for current_sector in get_sector_dict()[pollutant_dict[0]]['month']: - input_name_aux = input_name.replace('', current_sector) + input_name_aux = INPUT_NAME.replace('', current_sector) input_name_aux = input_name_aux.replace('', str(y)) input_name_aux = input_name_aux.replace('', pollutant_dict[1]) - file_list = [os.path.join(input_path, input_name_aux.replace('', str(aux_month))) + file_list = [os.path.join(INPUT_PATH, input_name_aux.replace('', str(aux_month))) for aux_month in xrange(1, 13)] - do_transformation(file_list, os.path.join(output_path, 'monthly_mean'), pollutant_dict[0], current_sector, - y) + do_transformation(file_list, os.path.join(OUTPUT_PATH, 'monthly_mean'), pollutant_dict[0], + current_sector, y) # annual inventories for current_sector in get_sector_dict()[pollutant_dict[0]]['year']: if current_sector[0:3] == 'AIR': - input_name_aux = input_name_air + input_name_aux = INPUT_NAME_AIR else: - input_name_aux = input_name_ships + input_name_aux = INPUT_NAME_SHIPS input_name_aux = input_name_aux.replace('', current_sector) input_name_aux = input_name_aux.replace('', str(y)) input_name_aux = input_name_aux.replace('', pollutant_dict[1]) - input_name_aux = os.path.join(input_path, input_name_aux) + input_name_aux = os.path.join(INPUT_PATH, input_name_aux) - do_transformation_annual(input_name_aux, os.path.join(output_path, 'yearly_mean', ), pollutant_dict[0], + do_transformation_annual(input_name_aux, os.path.join(OUTPUT_PATH, 'yearly_mean', ), pollutant_dict[0], current_sector, y) for current_sector in get_nmvoc_sector_dict()['month']: if current_sector == 'INDUSTRY_3subsectors': - input_name_aux = input_name_nmvoc_industry + input_name_aux = INPUT_NAME_NMVOC_INDUSTRY else: - input_name_aux = input_name + input_name_aux = INPUT_NAME input_name_aux = input_name_aux.replace('', 'NMVOC') input_name_aux = input_name_aux.replace('', current_sector) input_name_aux = input_name_aux.replace('', str(y)) - file_list = [os.path.join(input_path, input_name_aux.replace('', str(aux_month))) + file_list = [os.path.join(INPUT_PATH, input_name_aux.replace('', str(aux_month))) for aux_month in xrange(1, 13)] if current_sector == 'INDUSTRY_3subsectors': - do_nmvoc_industry_month_transformation(file_list, os.path.join(output_path, 'monthly_mean'), current_sector, - y) + do_nmvoc_industry_month_transformation(file_list, os.path.join(OUTPUT_PATH, 'monthly_mean'), + current_sector, y) else: - do_nmvoc_month_transformation(file_list, os.path.join(output_path, 'monthly_mean'), current_sector, - y) + do_nmvoc_month_transformation(file_list, os.path.join(OUTPUT_PATH, 'monthly_mean'), current_sector, y) for current_sector in get_nmvoc_sector_dict()['year']: if current_sector[0:3] == 'AIR': - input_name_aux = input_name_air + input_name_aux = INPUT_NAME_AIR else: - input_name_aux = input_name_ships + input_name_aux = INPUT_NAME_SHIPS input_name_aux = input_name_aux.replace('', 'NMVOC') input_name_aux = input_name_aux.replace('', current_sector) input_name_aux = input_name_aux.replace('', str(y)) - input_name_aux = os.path.join(input_path, input_name_aux) + input_name_aux = os.path.join(INPUT_PATH, input_name_aux) print input_name_aux - do_nmvoc_year_transformation(input_name_aux, os.path.join(output_path, 'yearly_mean'), current_sector, - y) - + do_nmvoc_year_transformation(input_name_aux, os.path.join(OUTPUT_PATH, 'yearly_mean'), current_sector, y) diff --git a/preproc/tno_mac_iii_preproc.py b/preproc/tno_mac_iii_preproc.py index b9841db..75db7df 100755 --- a/preproc/tno_mac_iii_preproc.py +++ b/preproc/tno_mac_iii_preproc.py @@ -22,13 +22,13 @@ import os # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/tno/tno_macc_iii/original_files/ascii' -output_path = '/esarchive/recon/tno/tno_macc_iii/yearly_mean' -input_name = 'TNO_MACC_III_emissions_v1_1_.txt' +INPUT_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/ascii' +OUTPUT_PATH = '/esarchive/recon/tno/tno_macc_iii/yearly_mean' +INPUT_NAME = 'TNO_MACC_III_emissions_v1_1_.txt' # list_years = [2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011] -list_years = [2011] -voc_ratio_path = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' -vor_ratio_name = 'ratio_.nc' +LIST_YEARS = [2011] +VOC_RATIO_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' +VOC_RATIO_NAME = 'ratio_.nc' # ============================================================== @@ -105,7 +105,7 @@ def do_transformation(year): import pandas as pd import numpy as np - in_file = os.path.join(input_path, input_name.replace('', str(year))) + in_file = os.path.join(INPUT_PATH, INPUT_NAME.replace('', str(year))) unit_factor = 1000./(365.*24.*3600.) # To pass from Mg/year to Kg/s # unit_factor = 1000000 # To pass from Mg/m2.year to Mg/Km2.year @@ -145,7 +145,7 @@ def do_transformation(year): pollutant_list[i]['data'] = pollutant_list[i]['data'].reshape((1,) + pollutant_list[i]['data'].shape) # print pollutant_list[i]['data'].max() - aux_output_path = os.path.join(output_path, '{0}_snap{1}'.format(pollutant_list[i]['name'], name)) + aux_output_path = os.path.join(OUTPUT_PATH, '{0}_snap{1}'.format(pollutant_list[i]['name'], name)) if not os.path.exists(aux_output_path): os.makedirs(aux_output_path) aux_output_path = os.path.join(aux_output_path, '{0}_{1}.nc'.format(pollutant_list[i]['name'], year)) @@ -224,10 +224,10 @@ def do_voc_transformation(year): from warnings import warn as warning for snap in get_sector_list(): - in_path = os.path.join(output_path, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) + in_path = os.path.join(OUTPUT_PATH, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) [nmvoc, c_lats, c_lons, cell_area] = extract_vars(in_path, ['nmvoc', 'lat', 'lon', 'cell_area']) for voc in get_voc_list(): - ratio_path = os.path.join(voc_ratio_path, vor_ratio_name.replace('', voc)) + ratio_path = os.path.join(VOC_RATIO_PATH, VOC_RATIO_NAME.replace('', voc)) ratios_dict = get_voc_ratio(ratio_path, snap) if ratios_dict is not None: new_voc = { @@ -241,7 +241,7 @@ def do_voc_transformation(year): new_voc['data'] = nmvoc['data'] * mask - out_dir_aux = os.path.join(output_path, '{0}_{1}'.format(voc, snap)) + out_dir_aux = os.path.join(OUTPUT_PATH, '{0}_{1}'.format(voc, snap)) if not os.path.exists(out_dir_aux): os.makedirs(out_dir_aux) # print os.path.join(out_dir_aux, '{0}_{1}.nc'.format(voc, year)) @@ -267,13 +267,13 @@ def do_voc_transformation(year): def check_vocs(year): # TODO Documentation for snap in get_sector_list(): - nmvoc_path = os.path.join(output_path, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) + nmvoc_path = os.path.join(OUTPUT_PATH, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) nmvoc_sum = new_voc['data'].sum() voc_sum = 0 for voc in get_voc_list(): - voc_path = os.path.join(output_path, '{0}_{1}'.format(voc, snap), '{0}_{1}.nc'.format(voc, year)) + voc_path = os.path.join(OUTPUT_PATH, '{0}_{1}'.format(voc, snap), '{0}_{1}.nc'.format(voc, year)) if os.path.exists(voc_path): [new_voc] = extract_vars(voc_path, [voc]) voc_sum += new_voc['data'].sum() @@ -284,7 +284,7 @@ def check_vocs(year): if __name__ == '__main__': - for y in list_years: + for y in LIST_YEARS: do_transformation(y) do_voc_transformation(y) # check_vocs(y) diff --git a/preproc/tno_mac_iii_preproc_voc_ratios.py b/preproc/tno_mac_iii_preproc_voc_ratios.py index f1800a3..08d4c5d 100755 --- a/preproc/tno_mac_iii_preproc_voc_ratios.py +++ b/preproc/tno_mac_iii_preproc_voc_ratios.py @@ -23,16 +23,22 @@ import os # ============== CONFIGURATION PARAMETERS ====================== -output_path = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' -world_info_path = '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/temporal/tz_world_country_iso3166.csv' -tno_world_mask = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/TNO_MACC-III_WorldMask.nc' -csv_path = '/esarchive/recon/tno/tno_macc_iii/original_files/TNO_MACC_NMVOC profile_country_SNAP_12_05_2010.csv' - +OUTPUT_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' +WORLD_INFO_PATH = '/home/Earth/ctena/Models/HERMESv3/IN/data/profiles/temporal/tz_world_country_iso3166.csv' +TNO_WORLD_MASK = '/home/Earth/ctena/Models/HERMESv3/IN/data/auxiliar_files/TNO_MACC-III_WorldMask.nc' +CSV_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/TNO_MACC_NMVOC profile_country_SNAP_12_05_2010.csv' # ============================================================== -def extract_vars(netcdf_path, variables_list, attributes_list=list()): +def extract_vars(netcdf_path, variables_list, attributes_list=[]): # TODO Docuemtnation + """ + + :param netcdf_path: + :param variables_list: + :param attributes_list: + :return: + """ from netCDF4 import Dataset data_list = [] # print netcdf_path @@ -59,20 +65,47 @@ def extract_vars(netcdf_path, variables_list, attributes_list=list()): def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + regular_latlon=False, + rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + lcc=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): # TODO Docuemtnation + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param date: + :param hours: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + :param global_attributes: + :param regular_latlon: + :param rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param lcc: + :param lcc_x: + :param lcc_y: + :param lat_1_2: + :param lon_0: + :param lat_0: + :return: + """ from cf_units import Unit, encode_time from netCDF4 import Dataset - if not (RegularLatLon or LambertConformalConic or Rotated): - RegularLatLon = True + if not (regular_latlon or lcc or rotated): + regular_latlon = True print netcdf_path netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") # ===== Dimensions ===== - if RegularLatLon: + if regular_latlon: var_dim = ('lat', 'lon',) # Latitude @@ -96,7 +129,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, else: print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) sys.exit(1) - elif Rotated: + elif rotated: var_dim = ('rlat', 'rlon',) # Rotated Latitude @@ -113,7 +146,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.createDimension('rlon', len(rotated_lons)) lon_dim = ('rlat', 'rlon',) - elif LambertConformalConic: + elif lcc: var_dim = ('y', 'x',) netcdf.createDimension('y', len(lcc_y)) @@ -190,7 +223,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # print lon_bnds[:].shape, boundary_longitudes.shape lon_bnds[:] = boundary_longitudes - if Rotated: + if rotated: # Rotated Latitude rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) rlat.long_name = "latitude in rotated pole grid" @@ -204,7 +237,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" rlon[:] = rotated_lons - if LambertConformalConic: + if lcc: x = netcdf.createVariable('x', 'd', ('x',), zlib=True) x.units = Unit("km").symbol x.long_name = "x coordinate of projection" @@ -243,11 +276,11 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, var.coordinates = "lat lon" if cell_area is not None: var.cell_measures = 'area: cell_area' - if RegularLatLon: + if regular_latlon: var.grid_mapping = 'crs' - elif Rotated: + elif rotated: var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: + elif lcc: var.grid_mapping = 'Lambert_conformal' # if variable['data'] is not 0: # print var[:].shape, variable['data'].shape @@ -257,19 +290,19 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping - if RegularLatLon: + if regular_latlon: # CRS mapping = netcdf.createVariable('crs', 'i') mapping.grid_mapping_name = "latitude_longitude" mapping.semi_major_axis = 6371000.0 mapping.inverse_flattening = 0 - elif Rotated: + elif rotated: # Rotated pole mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' mapping.grid_north_pole_latitude = north_pole_lat mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: + elif lcc: # CRS mapping = netcdf.createVariable('Lambert_conformal', 'i') mapping.grid_mapping_name = "lambert_conformal_conic" @@ -295,7 +328,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, def get_grid_area(filename): """ - Calculates the area of each cell. + Calculate the area of each cell. :param filename: Full path to the NetCDF to calculate the cell areas. :type filename: str @@ -307,8 +340,8 @@ def get_grid_area(filename): from netCDF4 import Dataset cdo = Cdo() - s = cdo.gridarea(input=filename) - nc_aux = Dataset(s, mode='r') + src = cdo.gridarea(input=filename) + nc_aux = Dataset(src, mode='r') grid_area = nc_aux.variables['cell_area'][:] nc_aux.close() @@ -317,7 +350,7 @@ def get_grid_area(filename): def create_bounds(coords, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) :type coords: numpy.ndarray @@ -347,14 +380,19 @@ def create_bounds(coords, number_vertices=2): def create_voc_ratio(voc): # TODO Docuemtnation + """ + + :param voc: + :return: + """ import numpy as np - country_values, lat, lon = extract_vars(tno_world_mask, ['timezone_id', 'lat', 'lon']) + [country_values, lat, lon] = extract_vars(TNO_WORLD_MASK, ['timezone_id', 'lat', 'lon']) country_values = country_values['data'].reshape((country_values['data'].shape[1], country_values['data'].shape[1])) - print output_path - if not os.path.exists(output_path): - os.makedirs(output_path) + print OUTPUT_PATH + if not os.path.exists(OUTPUT_PATH): + os.makedirs(OUTPUT_PATH) - complete_output_path = os.path.join(output_path, 'ratio_{0}.nc'.format(voc)) + complete_output_path = os.path.join(OUTPUT_PATH, 'ratio_{0}.nc'.format(voc)) if not os.path.exists(complete_output_path): print 'Creating ratio file for {0}\npath: {1}'.format(voc, complete_output_path) data_list = [] @@ -383,9 +421,15 @@ def create_voc_ratio(voc): def get_default_ratio(voc, snap): # TODO Documentation + """ + + :param voc: + :param snap: + :return: + """ import pandas as pd - df = pd.read_csv(csv_path, sep=';') + df = pd.read_csv(CSV_PATH, sep=';') df = df.loc[df['vcode'] == voc.replace('voc', 'v'), :] df = df.loc[df['snap'] == snap, :] @@ -395,10 +439,14 @@ def get_default_ratio(voc, snap): def get_iso_codes(): # TODO Documentation + """ + + :return: + """ import pandas as pd # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) - df = pd.read_csv(world_info_path, sep=';') + df = pd.read_csv(WORLD_INFO_PATH, sep=';') del df['time_zone'], df['time_zone_code'] df = df.drop_duplicates().dropna() df = df.set_index('country_code_alpha') @@ -410,9 +458,13 @@ def get_iso_codes(): def get_voc_list(): # TODO Documentation + """ + + :return: + """ import pandas as pd - df = pd.read_csv(csv_path, sep=';') + df = pd.read_csv(CSV_PATH, sep=';') del df['ISO3'], df['snap'], df['output substance name'], df['fr'] df = df.drop_duplicates().dropna() voc_list = df.vcode.values @@ -423,9 +475,14 @@ def get_voc_list(): def get_sector_list(voc): # TODO Documentation + """ + + :param voc: + :return: + """ import pandas as pd voc = voc.replace('voc', 'v') - df = pd.read_csv(csv_path, sep=';') + df = pd.read_csv(CSV_PATH, sep=';') df = df[df.vcode == voc] del df['ISO3'], df['vcode'], df['output substance name'], df['fr'] df = df.drop_duplicates().dropna() @@ -434,6 +491,11 @@ def get_sector_list(voc): def get_sector_list_text(voc): # TODO Documentation + """ + + :param voc: + :return: + """ voc = voc.replace('voc', 'v') sector_list = get_sector_list(voc) new_list = [] @@ -444,9 +506,15 @@ def get_sector_list_text(voc): def get_country_code_and_factor(voc, snap): # TODO Documentation + """ + + :param voc: + :param snap: + :return: + """ import pandas as pd voc = voc.replace('voc', 'v') - df = pd.read_csv(csv_path, sep=';') + df = pd.read_csv(CSV_PATH, sep=';') df = df[df.vcode == voc] df = df[df.snap == snap] del df['snap'], df['vcode'], df['output substance name'] diff --git a/preproc/wiedinmyer_preproc.py b/preproc/wiedinmyer_preproc.py index 069ca06..5af455f 100755 --- a/preproc/wiedinmyer_preproc.py +++ b/preproc/wiedinmyer_preproc.py @@ -24,13 +24,13 @@ from netCDF4 import Dataset # ============== CONFIGURATION PARAMETERS ====================== -input_path = '/esarchive/recon/ucar/wiedinmyer/original_files/' -output_path = '/esarchive/recon/ucar/wiedinmyer/' -list_pollutants = ['co2', 'co', 'so2', 'nox_no', 'nh3', 'ch4', 'c2h2', 'c2h4', 'c3h6', 'ch3oh', 'ch2o', 'ch3cooh', +INPUT_PATH = '/esarchive/recon/ucar/wiedinmyer/original_files/' +OUTPUT_PATH = '/esarchive/recon/ucar/wiedinmyer/' +LIST_POLLUTANTS = ['co2', 'co', 'so2', 'nox_no', 'nh3', 'ch4', 'c2h2', 'c2h4', 'c3h6', 'ch3oh', 'ch2o', 'ch3cooh', 'hcn', 'c6h6', 'pcb', 'pah', 'pcdd', 'pbdd', 'nmoc', 'hcl', 'hg', 'pm25', 'pm10', 'oc', 'bc'] -input_name = 'ALL_Emiss_04282014.nc' -year = 2010 +INPUT_NAME = 'ALL_Emiss_04282014.nc' +YEAR = 2010 # ============================================================== @@ -89,7 +89,7 @@ def do_transformation(filename): factor = 1000000./(365.*24.*3600.) # To pass from Gg/m2.year to Kg/m2.s - for output_pollutant in list_pollutants: + for output_pollutant in LIST_POLLUTANTS: input_pollutant = out_pollutant_to_in_pollutant(output_pollutant) data = nc_in.variables[input_pollutant][:] @@ -103,11 +103,11 @@ def do_transformation(filename): 'grid_mapping': 'crs'} data = np.array(data) - out_path_aux = os.path.join(output_path, 'yearly_mean', output_pollutant) + out_path_aux = os.path.join(OUTPUT_PATH, 'yearly_mean', output_pollutant) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(output_pollutant, year)), - data, data_attributes, lats, lons, grid_area, year, 01) + write_netcdf(os.path.join(out_path_aux, '{0}_{1}.nc'.format(output_pollutant, YEAR)), + data, data_attributes, lats, lons, grid_area, YEAR, 01) nc_in.close() @@ -229,6 +229,6 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr if __name__ == '__main__': starting_time = timeit.default_timer() - do_transformation(os.path.join(input_path, input_name)) + do_transformation(os.path.join(INPUT_PATH, INPUT_NAME)) print 'Time(s):', timeit.default_timer() - starting_time -- GitLab From 162f21822301c78900d6909aec6f9cd1d632f4a2 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Tue, 11 Sep 2018 12:27:24 +0200 Subject: [PATCH 23/51] Correcting Code conventions --- hermesv3_gr/config/config.py | 6 +- .../emision_inventories/emission_inventory.py | 4 +- .../gfas_emission_inventory.py | 6 +- .../point_source_emission_inventory.py | 2 +- hermesv3_gr/modules/grids/grid.py | 26 +-- hermesv3_gr/modules/grids/grid_lcc.py | 6 +- hermesv3_gr/modules/grids/grid_mercator.py | 6 +- hermesv3_gr/modules/grids/grid_rotated.py | 4 +- hermesv3_gr/modules/regrid/regrid.py | 2 +- .../modules/regrid/regrid_conservative.py | 2 +- hermesv3_gr/modules/speciation/speciation.py | 4 +- hermesv3_gr/modules/temporal/temporal.py | 2 +- hermesv3_gr/modules/vertical/vertical.py | 10 +- hermesv3_gr/modules/vertical/vertical_gfas.py | 4 +- hermesv3_gr/modules/writing/writer_cmaq.py | 176 ++++++++---------- .../modules/writing/writer_wrf_chem.py | 2 +- hermesv3_gr/tools/coordinates_tools.py | 155 ++++----------- hermesv3_gr/tools/custom_calendar.py | 4 +- hermesv3_gr/tools/netcdf_tools.py | 133 +++++++++++-- preproc/eclipsev5a_preproc.py | 4 +- preproc/edgarv432_ap_preproc.py | 4 +- preproc/edgarv432_voc_preproc.py | 4 +- preproc/gfas12_preproc.py | 6 +- preproc/htapv2_preproc.py | 8 +- 24 files changed, 285 insertions(+), 295 deletions(-) diff --git a/hermesv3_gr/config/config.py b/hermesv3_gr/config/config.py index 196ddd9..36d9db4 100644 --- a/hermesv3_gr/config/config.py +++ b/hermesv3_gr/config/config.py @@ -202,7 +202,7 @@ class Config(ArgParser): @staticmethod def create_dir(path): """ - Creates the given folder if it is not created yet. + Create the given folder if it is not created yet. :param path: Path to create. :type path: str @@ -248,7 +248,7 @@ class Config(ArgParser): @staticmethod def _parse_start_date(str_date): """ - Parses the date form string to datetime. + Parse the date form string to datetime. It accepts several ways to introduce the date: YYYYMMDD, YYYY/MM/DD, YYYYMMDDhh, YYYYYMMDD.hh, YYYY/MM/DD_hh:mm:ss, YYYY-MM-DD_hh:mm:ss, YYYY/MM/DD hh:mm:ss, YYYY-MM-DD hh:mm:ss, YYYY/MM/DD_hh, YYYY-MM-DD_hh. @@ -279,7 +279,7 @@ class Config(ArgParser): def _parse_end_date(self, end_date, start_date): """ - Parses the end date. + Parse the end date. If it's not defined it will be the same date that start_date (to do only one day). :param end_date: Date to the last day to simulate in string format. diff --git a/hermesv3_gr/modules/emision_inventories/emission_inventory.py b/hermesv3_gr/modules/emision_inventories/emission_inventory.py index 75573dd..3101a2f 100644 --- a/hermesv3_gr/modules/emision_inventories/emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/emission_inventory.py @@ -156,7 +156,7 @@ class EmissionInventory(object): def create_pollutants_dicts(self, pollutants): """ - Creates a list of dictionaries with the information of the name, paht and Dataset of each pollutant + Create a list of dictionaries with the information of the name, paht and Dataset of each pollutant :param pollutants: List of pollutants names :type pollutants: list @@ -262,7 +262,7 @@ class EmissionInventory(object): @staticmethod def make_emission_list(options, grid, vertical_output_profile, date): """ - Extracts the information of the cross table to read all the needed emissions. + Extract the information of the cross table to read all the needed emissions. :param options: Full list of parameters given by passing argument or in the configuration file. :type options: Namespace diff --git a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py index e53c8da..7490a0e 100755 --- a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py @@ -116,7 +116,7 @@ class GfasEmissionInventory(EmissionInventory): def get_altitude(self): """ - Extracts the altitude values depending on the choosen method. + Extract the altitude values depending on the choosen method. :return: Array with the alittude of each fire. :rtype: numpy.ndarray @@ -144,7 +144,7 @@ class GfasEmissionInventory(EmissionInventory): @ staticmethod def get_approach(p_vertical): """ - Extracts the given approach value. + Extract the given approach value. :return: Approach value :rtype: str @@ -167,7 +167,7 @@ class GfasEmissionInventory(EmissionInventory): @ staticmethod def get_method(p_vertical): """ - Extracts the given method value. + Extract the given method value. :return: Method value :rtype: str diff --git a/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py index 2238a73..8c22e11 100755 --- a/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/point_source_emission_inventory.py @@ -142,7 +142,7 @@ class PointSourceEmissionInventory(EmissionInventory): def calculate_altitudes(self, vertical_description_path): """ - Calculates the number layer to allocate the point source. + Calculate the number layer to allocate the point source. :param vertical_description_path: Path to the file that contains the vertical description :type vertical_description_path: str diff --git a/hermesv3_gr/modules/grids/grid.py b/hermesv3_gr/modules/grids/grid.py index 250ef8d..b9e19b2 100644 --- a/hermesv3_gr/modules/grids/grid.py +++ b/hermesv3_gr/modules/grids/grid.py @@ -97,7 +97,7 @@ class Grid(object): lat_1, lat_2, lon_0, lat_0, nx, ny, inc_x, inc_y, x_0, y_0, lat_ts): # TODO describe better the rotated parameters """ - Creates a Grid object depending on the grid type. + Create a Grid object depending on the grid type. :param grid_type: type of grid to create [global, rotated, lcc, mercator] :type grid_type: str @@ -208,7 +208,7 @@ class Grid(object): @staticmethod def set_vertical_levels(vertical_description_path): """ - Extracts the vertical levels. + Extract the vertical levels. :param vertical_description_path: path to the file that contain the vertical description of the required output file. @@ -248,7 +248,7 @@ class Grid(object): boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, RegularLatLon=True) - # Calculates the cell area of the auxiliary NetCDF file + # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() # Re-writes the NetCDF adding the cell area @@ -263,7 +263,7 @@ class Grid(object): def get_cell_area(self): """ - Calculates the cell area of the grid. + Calculate the cell area of the grid. :return: Area of each cell of the grid. :rtype: numpy.array @@ -276,7 +276,7 @@ class Grid(object): # Initialises the CDO cdo = Cdo() - # Creates a temporal file 's' with the cell area + # Create a temporal file 's' with the cell area s = cdo.gridarea(input=self.coords_netcdf_file) # Get the cell area of the temporal file nc_aux = Dataset(s, mode='r') @@ -290,7 +290,7 @@ class Grid(object): @staticmethod def create_regular_grid_1d_array(center, inc, boundary): """ - Creates a regular grid giving the center, boundary and increment. + Create a regular grid giving the center, boundary and increment. :param center: Center of the coordinates. :type center: float @@ -307,11 +307,11 @@ class Grid(object): st_time = timeit.default_timer() - # Calculates first center point. + # Calculate first center point. origin = center - abs(boundary) - # Calculates the quantity of cells. + # Calculate the quantity of cells. n = (abs(boundary) / inc) * 2 - # Calculates all the values + # Calculate all the values values = np.arange(origin + inc, origin + (n * inc) - inc + inc / 2, inc, dtype=np.float) settings.write_time('Grid', 'create_regular_grid_1d_array', timeit.default_timer() - st_time, level=3) @@ -321,7 +321,7 @@ class Grid(object): @staticmethod def create_bounds(coords, inc, number_vertices=2, inverse=False): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) :type coords: numpy.array @@ -342,17 +342,17 @@ class Grid(object): st_time = timeit.default_timer() settings.write_log('\t\t\tCreating boundaries.', level=3) - # Creates new arrays moving the centers half increment less and more. + # Create new arrays moving the centers half increment less and more. coords_left = coords - inc / 2 coords_right = coords + inc / 2 # Defining the number of corners needed. 2 to regular grids and 4 for irregular ones. if number_vertices == 2: - # Creates an array of N arrays of 2 elements to store the floor and the ceil values for each cell + # Create an array of N arrays of 2 elements to store the floor and the ceil values for each cell bound_coords = np.dstack((coords_left, coords_right)) bound_coords = bound_coords.reshape((len(coords), number_vertices)) elif number_vertices == 4: - # Creates an array of N arrays of 4 elements to store the corner values for each cell + # Create an array of N arrays of 4 elements to store the corner values for each cell # It can be stored in clockwise starting form the left-top element, or in inverse mode. if inverse: bound_coords = np.dstack((coords_left, coords_left, coords_right, coords_right)) diff --git a/hermesv3_gr/modules/grids/grid_lcc.py b/hermesv3_gr/modules/grids/grid_lcc.py index 43c535f..f66ea65 100644 --- a/hermesv3_gr/modules/grids/grid_lcc.py +++ b/hermesv3_gr/modules/grids/grid_lcc.py @@ -148,7 +148,7 @@ class LccGrid(Grid): LambertConformalConic=True, lcc_x=self.x, lcc_y=self.y, lat_1_2="{0}, {1}".format(self.lat_1, self.lat_2), lon_0=self.lon_0, lat_0=self.lat_0) - # Calculates the cell area of the auxiliary NetCDF file + # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() # Re-writes the NetCDF adding the cell area @@ -173,7 +173,7 @@ class LccGrid(Grid): st_time = timeit.default_timer() settings.write_log('\t\tCreating lcc coordinates', level=3) - # Creates a regular grid in metres (Two 1D arrays) + # Create a regular grid in metres (Two 1D arrays) self.x = np.arange(self.x_0, self.x_0 + self.inc_x * self.nx, self.inc_x, dtype=np.float) if len(self.x)//2 < settings.size: settings.write_log('ERROR: Check the .err file to get more info.') @@ -191,7 +191,7 @@ class LccGrid(Grid): y_b = super(LccGrid, self).create_bounds(y, self.inc_y, number_vertices=4, inverse=True) x_b = super(LccGrid, self).create_bounds(x, self.inc_x, number_vertices=4) - # Creates the LCC projection + # Create the LCC projection projection = Proj( proj='lcc', ellps='WGS84', diff --git a/hermesv3_gr/modules/grids/grid_mercator.py b/hermesv3_gr/modules/grids/grid_mercator.py index f537130..cce1491 100644 --- a/hermesv3_gr/modules/grids/grid_mercator.py +++ b/hermesv3_gr/modules/grids/grid_mercator.py @@ -134,7 +134,7 @@ class MercatorGrid(Grid): boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, Mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) - # Calculates the cell area of the auxiliary NetCDF file + # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() # Re-writes the NetCDF adding the cell area @@ -161,7 +161,7 @@ class MercatorGrid(Grid): st_time = timeit.default_timer() - # Creates a regular grid in metres (Two 1D arrays) + # Create a regular grid in metres (Two 1D arrays) self.x = np.arange(self.x_0, self.x_0 + self.inc_x * self.nx, self.inc_x, dtype=np.float) if len(self.x)//2 < settings.size: settings.write_log('ERROR: Check the .err file to get more info.') @@ -179,7 +179,7 @@ class MercatorGrid(Grid): y_b = super(MercatorGrid, self).create_bounds(y, self.inc_y, number_vertices=4, inverse=True) x_b = super(MercatorGrid, self).create_bounds(x, self.inc_x, number_vertices=4) - # Creates the LCC projection + # Create the LCC projection projection = Proj(self.crs) # UTM to Mercator diff --git a/hermesv3_gr/modules/grids/grid_rotated.py b/hermesv3_gr/modules/grids/grid_rotated.py index a21fdbc..8a6f544 100644 --- a/hermesv3_gr/modules/grids/grid_rotated.py +++ b/hermesv3_gr/modules/grids/grid_rotated.py @@ -127,7 +127,7 @@ class RotatedGrid(Grid): def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180): """ - Calculates the unrotated coordinates using the rotated ones. + Calculate the unrotated coordinates using the rotated ones. :param lon_deg: Rotated longitude coordinate. :type lon_deg: numpy.array @@ -216,7 +216,7 @@ class RotatedGrid(Grid): north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) - # Calculates the cell area of the auxiliary NetCDF file + # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() # Re-writes the NetCDF adding the cell area diff --git a/hermesv3_gr/modules/regrid/regrid.py b/hermesv3_gr/modules/regrid/regrid.py index d533678..9d224a9 100644 --- a/hermesv3_gr/modules/regrid/regrid.py +++ b/hermesv3_gr/modules/regrid/regrid.py @@ -51,7 +51,7 @@ class Regrid(object): def apply_weights(self, values): """ - Calculates the regridded values using the ESMF algorithm for a 3D array. + Calculate the regridded values using the ESMF algorithm for a 3D array. :param values: Input values to regrid :type values: numpy.array diff --git a/hermesv3_gr/modules/regrid/regrid_conservative.py b/hermesv3_gr/modules/regrid/regrid_conservative.py index ff8d509..f22aece 100644 --- a/hermesv3_gr/modules/regrid/regrid_conservative.py +++ b/hermesv3_gr/modules/regrid/regrid_conservative.py @@ -166,7 +166,7 @@ class ConservativeRegrid(Regrid): def apply_weights(self, values): """ - Calculates the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. + Calculate the regridded values using the ESMF algorithm for a 3D array specifically for a conservative regrid. :param values: Input values to regrid. :type values: numpy.array diff --git a/hermesv3_gr/modules/speciation/speciation.py b/hermesv3_gr/modules/speciation/speciation.py index ac6d17b..6084c44 100644 --- a/hermesv3_gr/modules/speciation/speciation.py +++ b/hermesv3_gr/modules/speciation/speciation.py @@ -50,7 +50,7 @@ class Speciation(object): def get_speciation_profile(self, speciation_profile_path): """ - Extracts the speciation information as a dictionary with the destiny pollutant as key and the formula as value. + Extract the speciation information as a dictionary with the destiny pollutant as key and the formula as value. :param speciation_profile_path: :type speciation_profile_path: @@ -95,7 +95,7 @@ class Speciation(object): @staticmethod def extract_molecular_weights(molecular_weights_path): """ - Extracts the molecular weights for each pollutant as a dictionary with the name of the pollutant as key and the + Extract the molecular weights for each pollutant as a dictionary with the name of the pollutant as key and the molecular weight as value. :param molecular_weights_path: Path to the CSV that contains all the molecular weights. diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 5da069d..5dcc331 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -246,7 +246,7 @@ class TemporalDistribution(object): @staticmethod def parse_tz(timezone): """ - Parse the timezone (string format). + Parse the timezone (string format). It is needed because some libraries have more timezones than others and it tries to simplify setting the strange ones into the nearest common one. diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py index de9041f..ccde876 100644 --- a/hermesv3_gr/modules/vertical/vertical.py +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -50,7 +50,7 @@ class VerticalDistribution(object): def get_vertical_profile(self, path): """ - Extracts the vertical v_profile from the vertical v_profile file. + Extract the vertical v_profile from the vertical v_profile file. :param path: Path to the file that contains all the vertical profiles. :type path: str @@ -93,7 +93,7 @@ class VerticalDistribution(object): @staticmethod def get_vertical_output_profile(path): """ - Extracts the vertical description of the desired output. + Extract the vertical description of the desired output. :param path: Path to the file that contains the output vertical description. :type path: str @@ -118,7 +118,7 @@ class VerticalDistribution(object): @staticmethod def get_weights(prev_layer, layer, in_weight, output_vertical_profile): """ - Calculates the weights for the given layer. + Calculate the weights for the given layer. :param prev_layer: Altitude of the low layer. 0 if it's the first. :type prev_layer: float @@ -151,7 +151,7 @@ class VerticalDistribution(object): def calculate_weights(self): """ - Calculates the weights for all the vertical layers. + Calculate the weights for all the vertical layers. :return: Weights that goes to each layer. :rtype: list of float @@ -177,7 +177,7 @@ class VerticalDistribution(object): @staticmethod def apply_weights(data, weights): """ - Calculates the vertical distribution using the given data and weights. + Calculate the vertical distribution using the given data and weights. :param data: Emissions to be vertically distributed. :type data: numpy.array diff --git a/hermesv3_gr/modules/vertical/vertical_gfas.py b/hermesv3_gr/modules/vertical/vertical_gfas.py index 08caf9f..70f0414 100644 --- a/hermesv3_gr/modules/vertical/vertical_gfas.py +++ b/hermesv3_gr/modules/vertical/vertical_gfas.py @@ -45,7 +45,7 @@ class GfasVerticalDistribution(VerticalDistribution): @staticmethod def calculate_widths(heights_list): """ - Calculates the width of each vertical level. + Calculate the width of each vertical level. :param heights_list: List of the top altitude in meters of each level. :type heights_list: list @@ -67,7 +67,7 @@ class GfasVerticalDistribution(VerticalDistribution): def get_weights(self, heights_list): """ - Calculates the proportion (%) of emission to put on each layer. + Calculate the proportion (%) of emission to put on each layer. :param heights_list: List with the width of each vertical level. :type heights_list: list diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 7da5172..a7efdd9 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -18,14 +18,13 @@ # along with HERMESv3_GR. If not, see . -import os import sys -from hermesv3_gr.modules.writing.writer import Writer import timeit -from hermesv3_gr.config import settings import numpy as np from netCDF4 import Dataset from mpi4py import MPI +from hermesv3_gr.modules.writing.writer import Writer +from hermesv3_gr.config import settings class WriterCmaq(Writer): @@ -39,6 +38,13 @@ class WriterCmaq(Writer): 'XCELL', 'YCELL', 'VGTYP', 'VGTOP', 'VGLVLS', 'GDNAM', 'UPNAM', 'FILEDESC', 'HISTORY', 'VAR-LIST'] def unit_change(self, variable, data): + # TODO Documentation + """ + + :param variable: + :param data: + :return: + """ from cf_units import Unit if data is not None: @@ -63,7 +69,7 @@ class WriterCmaq(Writer): @staticmethod def change_variable_attributes(emission_list): """ - Modifies the emission list to be consistent to use the output as input for CMAQ model. + Modify the emission list to be consistent to use the output as input for CMAQ model. :param emission_list: List of emissions :type emission_list: list @@ -88,7 +94,7 @@ class WriterCmaq(Writer): @staticmethod def create_tflag(st_date, hours_array, num_vars): """ - Creates the content of the CMAQ variable TFLAG + Create the content of the CMAQ variable TFLAG :param st_date: Starting date :type st_date: datetime.datetime @@ -103,7 +109,6 @@ class WriterCmaq(Writer): :return: Array with the content of TFLAG :rtype: numpy.array """ - import numpy as np from datetime import timedelta a = np.array([[[]]]) @@ -127,7 +132,6 @@ class WriterCmaq(Writer): :return: List transformed on string. :rtype: str """ - str_var_list = "" for var in var_list: str_var_list += "{:<16}".format(var) @@ -135,6 +139,11 @@ class WriterCmaq(Writer): return str_var_list def read_global_attributes(self): + # TODO Documentation + """ + + :return: + """ import pandas as pd from warnings import warn as warning float_atts = ['VGTOP'] @@ -186,95 +195,14 @@ class WriterCmaq(Writer): def create_global_attributes(self, var_list): """ - Creates the global attributes and the order that they have to be filled. - - :param date: Starting date. - :type date: datetime.datetime - - :param nx: Number of elements on the x dimension. - :type nx: int - - :param ny: Number of elements on the y dimension. - :type ny: int - - :param nlays: Number of vertical layers. - :type nlays: int - - :param lat_1: Value of lat 1 of the Lambert Conformal Conic projection. - :type lat_1: float - - :param lat_2: Value of lat 2 of the Lambert Conformal Conic projection. - :type lat_2: float - - :param lon_0: Value of lon 0 of the Lambert Conformal Conic projection. - :type lon_0: float - - :param lat_0: Value of lat 0 of the Lambert Conformal Conic projection. - :type lat_0: float - - :param x_0: X value of the origin. - :type x_0: float - - :param y_0: Y value of the origin. - :type y_0: float - - :param inc_x: Increment (in meters) of the x values. - :type inc_x: float - - :param inc_y: Increment (in meters) of the y values. - :type inc_x: float + Create the global attributes and the order that they have to be filled. - :param var_list: List of variables. + :param var_list: List of variables :type var_list: list - :param exec_id: ID of the execution. - :type exec_id: str - - :param ftype: File data type = [CUSTOM3:1, GRDDED3:2, BNDARY3:3, IDDATA3:4, PROFIL3:5, or SMATRX3:6] - (Default = 1) - :type ftype: int - - :param tstep: time step, coded HHMMSS according to Models-3 conventions. - :type tstep: int - - :param nthik: For BNDARY3 files, perimeter thickness (cells), or for SMATRX3 files, number of matrix-columns - (unused for other file types) - :type nthik: int - - :param gdtyp: Map projection type - LATGRD3=1 (Lat-Lon), - LAMGRD3=2 (Lambert conformal conic), - MERGRD3=3 (general tangent Mercator), - STEGRD3=4 (general tangent stereographic), - UTMGRD3=5 (UTM, a special case of Mercator), - POLGRD3=6 (polar secant stereographic), - EQMGRD3=7 (equatorial secant Mercator), or - TRMGRD3=8 (transverse secant Mercator) - :type gdtyp: int - - :param vgtype: Vertical coordinate type - VGSGPH3=1 (hydrostatic sigma-P), - VGSGPN3=2 (nonhydrostatic sigma-P), - VGSIGZ3=3 (sigma-Z), - VGPRES3=4 (pressure (mb)), - VGZVAL3=5 (Z (m above sea lvl), or - VGHVAL3=6 (H (m above ground)) - :type vgtype: int - - :param vgtop: Model-top, for sigma vertical-coordinate types - :type vgtop: int - - :param vglvls: Array of vertical coordinate level values; level 1 of the grid goes from vertical coordinate - VGLEVELS[0] to VGLEVELS[1], etc. - :type vglvls: numpy.array - - :param gdnam: Grid Name - :type gdnam: str - :return: Dict of global attributes and a list with the keys ordered. :rtype: tuple """ - # TODO documentation from datetime import datetime global_attributes = self.read_global_attributes() @@ -323,6 +251,30 @@ class WriterCmaq(Writer): def create_cmaq_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, regular_lat_lon=False, rotated=False, nx=None, ny=None, lat_1=None, lat_2=None, lon_0=None, lat_0=None, x_0=None, y_0=None, inc_x=None, inc_y=None): + # TODO Documentation + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param date: + :param hours: + :param regular_lat_lon: + :param rotated: + :param nx: + :param ny: + :param lat_1: + :param lat_2: + :param lon_0: + :param lat_0: + :param x_0: + :param y_0: + :param inc_x: + :param inc_y: + :return: + """ data_list, var_list = WriterCmaq.change_variable_attributes(data_list) @@ -346,10 +298,21 @@ class WriterCmaq(Writer): @staticmethod def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, global_attributes=None, regular_lat_lon=False, rotated=False): + # TODO Documentation + """ - import sys - from netCDF4 import Dataset - + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param date: + :param hours: + :param global_attributes: + :param regular_lat_lon: + :param rotated: + :return: + """ if regular_lat_lon: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: @@ -394,6 +357,10 @@ class WriterCmaq(Writer): netcdf.close() def create_parallel_netcdf(self): + # TODO Documentation + """ + Create an empty netCDF + """ st_time = timeit.default_timer() settings.write_log("\tCreating parallel NetCDF file.", level=2) # netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) @@ -450,6 +417,15 @@ class WriterCmaq(Writer): settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) def write_parallel_netcdf(self, emission_list): + """ + Write the netCDF in parallel mode. + + :param emission_list: List of the processed emissions for the different emission inventories + :type emission_list: list + + :return: True when it finish well. + :rtype: bool + """ st_time = timeit.default_timer() settings.write_log("\tAppending data to parallel NetCDF file.", level=2) @@ -476,8 +452,18 @@ class WriterCmaq(Writer): netcdf.close() settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) + return True def write_serial_netcdf(self, emission_list): + """ + Write the netCDF in serial mode. + + :param emission_list: List of the processed emissions for the different emission inventories + :type emission_list: list + + :return: True when it finish well. + :rtype: bool + """ st_time = timeit.default_timer() mpi_numpy = False @@ -610,10 +596,6 @@ class WriterCmaq(Writer): sys.exit(1) elif mpi_vector: - var_time = timeit.default_timer() - - # data_list = []#np.empty(shape, dtype=np.float64) - if rank_data is not None: data = np.empty(var[:].shape, dtype=settings.precision) for i in xrange(settings.size): diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index 6711880..51f776f 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -217,7 +217,7 @@ class WriterWrfChem(Writer): def create_global_attributes(self): # TODO Documentation """ - Creates the global attributes that have to be filled. + Create the global attributes that have to be filled. """ global_attributes = self.read_global_attributes() diff --git a/hermesv3_gr/tools/coordinates_tools.py b/hermesv3_gr/tools/coordinates_tools.py index 51d074a..a61e0f2 100644 --- a/hermesv3_gr/tools/coordinates_tools.py +++ b/hermesv3_gr/tools/coordinates_tools.py @@ -17,19 +17,16 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import os -import sys - - -# Global variables - def get_grid_area(filename): - # TODO Documentation """ + Calculate the area for each cell of the grid using CDO - :param filename: - :return: + :param filename: Path to the file to calculate the cell area + :type filename: str + + :return: Area of each cell of the grid. + :rtype: numpy.array """ from cdo import Cdo from netCDF4 import Dataset @@ -59,16 +56,16 @@ def latlon2rotated(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): degrees_to_radians = math.pi / 180. radians_to_degrees = 180. / math.pi - lon_max = lon_min + 360 + # lon_max = lon_min + 360 # stlm=sin(tlm) sin_lat_pole_rad = math.sin(lat_pole_deg * degrees_to_radians) # ctlm=cos(tlm) cos_lat_pole_rad = math.cos(lat_pole_deg * degrees_to_radians) # stph=sin(tph) - sin_lon_pole_rad = math.sin(lon_pole_deg * degrees_to_radians) + # sin_lon_pole_rad = math.sin(lon_pole_deg * degrees_to_radians) # ctph=cos(tph) - cos_lon_pole_rad = math.cos(lon_pole_deg * degrees_to_radians) + # cos_lon_pole_rad = math.cos(lon_pole_deg * degrees_to_radians) # relm=(xlon-tlm0d)*dtr !distance from the centre lon (in rad) distance_from_center_lon = (lon_deg - lon_pole_deg) * degrees_to_radians @@ -117,7 +114,7 @@ def rotated2latlon(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): import math degrees_to_radians = math.pi / 180. - radians_to_degrees = 180. / math.pi + # radians_to_degrees = 180. / math.pi # Positive east to negative east lon_pole_deg -= 180 @@ -157,7 +154,7 @@ def rotated2latlon(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): # almd += 360 # elif almd > max_lon: # almd -= 360 - # TODO use lon_min + almd[almd > (lon_min + 360)] -= 360 almd[almd < lon_min] += 360 @@ -178,44 +175,9 @@ def rotated2latlon_single(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min= import math degrees_to_radians = math.pi / 180. - radians_to_degrees = 180. / math.pi + # radians_to_degrees = 180. / math.pi # lon_max = lon_min + 360 - # - # sin_lat_pole_rad = math.sin(lat_pole_deg*degrees_to_radians) - # cos_lat_pole_rad = math.cos(lat_pole_deg*degrees_to_radians) # - # sin_lon_pole_rad = math.sin(lon_pole_deg*degrees_to_radians) # stph - # cos_lon_pole_rad = math.cos(lon_pole_deg*degrees_to_radians) # ctph - # - # - # # relm=(xlon-tlm0d)*dtr !distance from the centre lon (in rad) - # distance_from_center_lon = (lon_deg - lon_pole_deg)*degrees_to_radians - # # ctlm=cos(relm) !cos of this distance - # cos_distance_from_center_lon = math.cos(distance_from_center_lon) - # # stlm=sin(relm) !sin of this distance - # sin_distance_from_center_lon = math.sin(distance_from_center_lon) - # # aph=xlat*dtr !lat in rad - # lat_rad = lat_deg*degrees_to_radians - # # ctph=cos(aph) !cos of lat - # cos_lat_rad = math.cos(lat_rad) - # # stph=sin(aph) !sin of lat - # sin_lat_rad = math.sin(lat_rad) - # - # - # - # # sph=ctph0*stph+stph0*ctph*ctlm - # sin_rotated_lat = (cos_lat_pole_rad*sin_lat_rad) + - # (sin_distance_from_center_lon*cos_lat_rad*cos_distance_from_center_lon) - # # sph=min(sph,1.) - # # sph=max(sph,-1.) - # if sin_rotated_lat > 1.: - # sin_rotated_lat = 1. - # if sin_rotated_lat < -1.: - # sin_rotated_lat = -1. - # # aph=asin(sph) - # real_latitude = math.asin(sin_rotated_lat) - # real_longitude = math.atan2(cos_lat_rad*sin_distance_from_center_lon, - # (cos_distance_from_center_lon*cos_lat_rad - sin_lat_pole_rad*sin_rotated_lat)/cos_lat_pole_rad) - math.pi # Positive east to negative east lon_pole_deg -= 180 @@ -258,7 +220,7 @@ def rotated2latlon_single(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min= def create_bounds(coords, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) :type coords: numpy.ndarray @@ -298,7 +260,7 @@ def create_bounds_esmpy(coords, spheric=False): interval = coords[1] - coords[0] - bound_coords = coords - interval/2 + bound_coords = coords - interval / 2 if not spheric: bound_coords = np.append(bound_coords, [bound_coords[-1] + interval]) @@ -322,9 +284,6 @@ def create_regular_rotated(lat_origin, lon_origin, lat_inc, lon_inc, n_lat, n_lo center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*lat_inc), lat_inc, dtype=np.float) center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*lon_inc), lon_inc, dtype=np.float) - # print lat_origin + (n_lat*lat_inc) - # print n_lat*lat_inc - corner_latitudes = create_bounds_esmpy(center_latitudes) corner_longitudes = create_bounds_esmpy(center_longitudes) @@ -338,65 +297,15 @@ def create_regular_old(lat_origin, lon_origin, lat_inc, lon_inc, n_lat, n_lon): center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*lat_inc), lat_inc, dtype=np.float) center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*lon_inc), lon_inc, dtype=np.float) - # print lat_origin + (n_lat*lat_inc) - # print n_lat*lat_inc - corner_latitudes = create_bounds(center_latitudes) corner_longitudes = create_bounds(center_longitudes) return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes -# def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, inc_lat, inc_lon): -# """ -# Creates a custom grid with the given parameters. The grid is divided in 4 arrays: -# - Center Latitudes -# - Center Longitudes -# - Boundary Latitudes (# latitudes +1) -# - Boundary Longitudes (# longitudes +1) -# -# :param center_lat: Latitude of the center of the grid (degrees). -# :type center_lat: float -# -# :param center_lon: Longitude of the center of the grid (degrees). -# :type center_lon: float -# -# :param west_boundary: Distance from de center to the western boundary (degrees) -# (not to the center of the first cell) -# :type west_boundary: float -# -# :param south_boundary: Distance from de center to the southern boundary (degrees) -# (not to the center of the first cell) -# :type south_boundary: float -# -# :param inc_lat: Vertical resolution of each cell (degrees). -# :type inc_lat: float -# -# :param inc_lon: Horizontal resolution of each cell (degrees) -# :type inc_lon: float -# -# :return: Arrays with the Center Latitudes, Center Longitudes, Boundary Latitudes, Boundary Longitudes. -# :rtype: tuple (numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray) -# """ -# lat_origin = center_lat - abs(south_boundary) + (inc_lat/2) -# lon_origin = center_lon - abs(west_boundary) + (inc_lon/2) -# n_lat = (abs(south_boundary)/inc_lat)*2 -# n_lon = (abs(west_boundary)/inc_lon)*2 -# -# center_latitudes = np.arange(lat_origin, lat_origin + (n_lat*inc_lat), inc_lat, dtype=np.float) -# center_longitudes = np.arange(lon_origin, lon_origin + (n_lon*inc_lon), inc_lon, dtype=np.float) -# -# corner_latitudes = create_bounds(center_latitudes) -# corner_longitudes = create_bounds(center_longitudes) -# -# # print center_latitudes -# -# return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes - - def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, inc_lat, inc_lon): """ - Creates a custom grid with the given parameters. The grid is divided in 4 arrays: + Create a custom grid with the given parameters. The grid is divided in 4 arrays: - Center Latitudes - Center Longitudes - Boundary Latitudes (# latitudes +1) @@ -432,44 +341,44 @@ def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, i n_lat = (abs(south_boundary)/inc_lat)*2 n_lon = (abs(west_boundary)/inc_lon)*2 - center_latitudes = np.arange(lat_origin + inc_lat, lat_origin + (n_lat*inc_lat) - inc_lat + inc_lat/2, inc_lat, + center_latitudes = np.arange(lat_origin + inc_lat, lat_origin + (n_lat*inc_lat) - inc_lat + inc_lat / 2, inc_lat, dtype=np.float) - center_longitudes = np.arange(lon_origin + inc_lon, lon_origin + (n_lon*inc_lon) - inc_lon + inc_lon/2, inc_lon, + center_longitudes = np.arange(lon_origin + inc_lon, lon_origin + (n_lon*inc_lon) - inc_lon + inc_lon / 2, inc_lon, dtype=np.float) corner_latitudes = create_bounds(center_latitudes) corner_longitudes = create_bounds(center_longitudes) center_latitudes = np.concatenate([ - [lat_origin + inc_lat/2 - inc_lat/4], + [lat_origin + inc_lat / 2 - inc_lat / 4], center_latitudes, - [lat_origin + (n_lat*inc_lat) - inc_lat/2 + inc_lat/4]]) + [lat_origin + (n_lat * inc_lat) - inc_lat / 2 + inc_lat / 4]]) center_longitudes = np.concatenate([ - [lon_origin + inc_lon/2 - inc_lon/4], + [lon_origin + inc_lon / 2 - inc_lon / 4], center_longitudes, - [lon_origin + (n_lon*inc_lon) - inc_lon/2 + inc_lon/4]]) + [lon_origin + (n_lon * inc_lon) - inc_lon / 2 + inc_lon / 4]]) corner_latitudes = np.concatenate([ - [[[lat_origin, lat_origin + inc_lat/2]]], + [[[lat_origin, lat_origin + inc_lat / 2]]], corner_latitudes, - [[[lat_origin + (n_lat*inc_lat) - inc_lat/2, lat_origin + (n_lat*inc_lat)]]]], axis=1) + [[[lat_origin + (n_lat * inc_lat) - inc_lat / 2, lat_origin + (n_lat * inc_lat)]]]], axis=1) corner_longitudes = np.concatenate([ - [[[lon_origin, lon_origin + inc_lon/2]]], + [[[lon_origin, lon_origin + inc_lon / 2]]], corner_longitudes, - [[[lon_origin + (n_lon*inc_lon) - inc_lon/2, lon_origin + (n_lon*inc_lon)]]]], axis=1) + [[[lon_origin + (n_lon * inc_lon) - inc_lon / 2, lon_origin + (n_lon * inc_lon)]]]], axis=1) return center_latitudes, center_longitudes, corner_latitudes, corner_longitudes if __name__ == '__main__': import numpy as np - new_pole_longitude_degrees = 20.0 # lonpole tlm0d - new_pole_latitude_degrees = 35.0 # latpole tph0d - print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, 20.0, 35.0) - print latlon2rotated(new_pole_longitude_degrees, new_pole_latitude_degrees, -20.2485, -9.9036) - print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, 0, 0) - print rotated2latlon_single(new_pole_longitude_degrees, new_pole_latitude_degrees, -51., -35.) - print rotated2latlon(new_pole_longitude_degrees, new_pole_latitude_degrees, np.array([-51., -51., -51., -51.]), + new_pole_lon_d = 20.0 # lonpole tlm0d + new_pole_lat_d = 35.0 # latpole tph0d + print latlon2rotated(new_pole_lon_d, new_pole_lat_d, 20.0, 35.0) + print latlon2rotated(new_pole_lon_d, new_pole_lat_d, -20.2485, -9.9036) + print rotated2latlon_single(new_pole_lon_d, new_pole_lat_d, 0, 0) + print rotated2latlon_single(new_pole_lon_d, new_pole_lat_d, -51., -35.) + print rotated2latlon(new_pole_lon_d, new_pole_lat_d, np.array([-51., -51., -51., -51.]), np.array([-35., -34.9, -34.8, -34.7])) diff --git a/hermesv3_gr/tools/custom_calendar.py b/hermesv3_gr/tools/custom_calendar.py index 256bda9..c221328 100644 --- a/hermesv3_gr/tools/custom_calendar.py +++ b/hermesv3_gr/tools/custom_calendar.py @@ -26,7 +26,7 @@ import holidays def custom_holidays(zone, year): """ - Calculates the festivity days that appear in the library holidays adding the Maundy Thursday and the God Friday + Calculate the festivity days that appear in the library holidays adding the Maundy Thursday and the God Friday :param zone: Name of the country. It has to appear and has to have the same format (capital letters) of the library holidays: https://pypi.python.org/pypi/holidays @@ -69,7 +69,7 @@ def get_holidays(zone, year): def pascua(year): """ - Calculates the "Pascua" date + Calculate the "Pascua" date :param year: Year to found the Pascua. :type year: int diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py index 6a50c73..74188bb 100644 --- a/hermesv3_gr/tools/netcdf_tools.py +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -22,25 +22,40 @@ import sys from netCDF4 import Dataset from mpi4py import MPI -icomm = MPI.COMM_WORLD -comm = icomm.Split(color=0, key=0) -rank = comm.Get_rank() -size = comm.Get_size() +ICOMM = MPI.COMM_WORLD +COMM = ICOMM.Split(color=0, key=0) +RANK = COMM.Get_rank() +SIZE = COMM.Get_size() def open_netcdf(netcdf_path): - from netCDF4 import Dataset - nc_out = Dataset(netcdf_path, mode='a') - return nc_out + """ + Open a netCDF file. + + :param netcdf_path: Path to the netCDF file. + :type netcdf_path: str + :return: netCDF + :rtype: Dataset + """ + netcdf = Dataset(netcdf_path, mode='a') + return netcdf -def close_netcdf(nc): - nc.close() + +def close_netcdf(netcdf): + """ + Close the netCDF. + + :param netcdf: netCDF + :type netcdf: Dataset + :return: + """ + netcdf.close() def get_grid_area(filename): """ - Calculates the area of each cell. + Calculate the area of each cell. :param filename: Full path to the NetCDF to calculate the cell areas. :type filename: str @@ -51,17 +66,32 @@ def get_grid_area(filename): from cdo import Cdo cdo = Cdo() - s = cdo.gridarea(input=filename) - nc_aux = Dataset(s, mode='r') - grid_area = nc_aux.variables['cell_area'][:] - nc_aux.close() + src = cdo.gridarea(input=filename) + netcdf = Dataset(src, mode='r') + grid_area = netcdf.variables['cell_area'][:] + netcdf.close() return grid_area def extract_vars(netcdf_path, variables_list, attributes_list=list()): + """ + Get the data from the list of variabbles. + + :param netcdf_path: Path to the netCDF file + :type netcdf_path: str + + :param variables_list: List of the names of the variables to get. + :type variables_list: list + + :param attributes_list: List of the names of the variable attributes to get. + :type attributes_list: list + + :return: List of the variables from the netCDF as a dictionary with data as values and with the other keys their + attributes. + :rtype: list. + """ data_list = [] - # print netcdf_path netcdf = Dataset(netcdf_path, mode='r') for var in variables_list: if var == 'emi_nox_no2': @@ -89,6 +119,36 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, Mercator=False, lat_ts=None): + # TODO Documentation + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param date: + :param hours: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + :param global_attributes: + :param RegularLatLon: + :param Rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param LambertConformalConic: + :param lcc_x: + :param lcc_y: + :param lat_1_2: + :param lon_0: + :param lat_0: + :param Mercator: + :param lat_ts: + :return: + """ from cf_units import Unit, encode_time @@ -324,9 +384,35 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, regular_latlon=False, rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, lcc=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None): + # TODO Documentation + """ + + :param netcdf_path: + :param center_latitudes: + :param center_longitudes: + :param data_list: + :param levels: + :param date: + :param hours: + :param boundary_latitudes: + :param boundary_longitudes: + :param cell_area: + :param global_attributes: + :param regular_latlon: + :param rotated: + :param rotated_lats: + :param rotated_lons: + :param north_pole_lat: + :param north_pole_lon: + :param lcc: + :param lcc_x: + :param lcc_y: + :param lat_1_2: + :param lon_0: + :param lat_0: + :return: + """ from cf_units import Unit, encode_time - import sys - from netCDF4 import Dataset import numpy as np if not (regular_latlon or lcc or rotated): @@ -553,6 +639,13 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, def tuple_to_index(tuple_list, bidimensional=False): + # TODO Documentation + """ + + :param tuple_list: + :param bidimensional: + :return: + """ from operator import mul new_list = [] for tuple in tuple_list: @@ -564,6 +657,12 @@ def tuple_to_index(tuple_list, bidimensional=False): def calculate_displacements(counts): + # TODO Documentation + """ + + :param counts: + :return: + """ new_list = [0] accum = 0 for counter in counts[:-1]: diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index 157dd81..1413710 100755 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -44,7 +44,7 @@ var_units = 'kg.m-2.s-1' def get_grid_area(filename): """ - Calculates the area for each cell of the grid using CDO + Calculate the area for each cell of the grid using CDO :param filename: Path to the file to calculate the cell area :type filename: str @@ -66,7 +66,7 @@ def get_grid_area(filename): def create_bounds(coordinates, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coordinates: Coordinates in degrees (latitude or longitude) :type coordinates: numpy.ndarray diff --git a/preproc/edgarv432_ap_preproc.py b/preproc/edgarv432_ap_preproc.py index 2f79e9c..3dbf1df 100755 --- a/preproc/edgarv432_ap_preproc.py +++ b/preproc/edgarv432_ap_preproc.py @@ -87,7 +87,7 @@ def ipcc_to_sector_dict(): def create_bounds(coordinates, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coordinates: Coordinates in degrees (latitude or longitude) :type coordinates: numpy.array @@ -115,7 +115,7 @@ def create_bounds(coordinates, number_vertices=2): def get_grid_area(filename): """ - Calculates the area for each cell of the grid using CDO + Calculate the area for each cell of the grid using CDO :param filename: Path to the file to calculate the cell area :type filename: str diff --git a/preproc/edgarv432_voc_preproc.py b/preproc/edgarv432_voc_preproc.py index 3d7d116..ef834e1 100755 --- a/preproc/edgarv432_voc_preproc.py +++ b/preproc/edgarv432_voc_preproc.py @@ -56,7 +56,7 @@ Carles Tena Medina (carles.tena@bsc.es) from Barcelona Supercomputing Center (BS def create_bounds(coordinates, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coordinates: Coordinates in degrees (latitude or longitude) :type coordinates: numpy.ndarray @@ -84,7 +84,7 @@ def create_bounds(coordinates, number_vertices=2): def get_grid_area(filename): """ - Calculates the area for each cell of the grid using CDO + Calculate the area for each cell of the grid using CDO :param filename: Path to the file to calculate the cell area :type filename: str diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py index b2591aa..3630052 100755 --- a/preproc/gfas12_preproc.py +++ b/preproc/gfas12_preproc.py @@ -39,7 +39,7 @@ PARAMETERS_FILE = '/esarchive/recon/ecmwf/gfas/original_files/ga_mc_sfc_gfas_ecm def create_bounds(coords, number_vertices=2): """ - Calculates the vertices coordinates. + Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) :type coords: numpy.ndarray @@ -69,7 +69,7 @@ def create_bounds(coords, number_vertices=2): def get_grid_area(filename): """ - Calculates the area for each cell of the grid using CDO + Calculate the area for each cell of the grid using CDO :param filename: Path to the file to calculate the cell area :type filename: str @@ -237,7 +237,7 @@ def do_transformation(input_file, date, output_dir, variables_list): def do_var_list(variables_file): """ - Creates the List of dictionaries + Create the List of dictionaries :param variables_file: CSV file with the information of each variable :type variables_file: str diff --git a/preproc/htapv2_preproc.py b/preproc/htapv2_preproc.py index 5961919..8766dde 100755 --- a/preproc/htapv2_preproc.py +++ b/preproc/htapv2_preproc.py @@ -300,7 +300,7 @@ def do_nmvoc_month_transformation(filename_list, out_path, sector, year): print out_path_aux write_netcdf(out_path_aux, c_lats['data'], c_lons['data'], [data_aux], boundary_latitudes=create_bounds(c_lats['data']), - boundary_longitudes=create_bounds(c_lons['data']) ,global_attributes=global_attributes,) + boundary_longitudes=create_bounds(c_lons['data']), global_attributes=global_attributes,) return True @@ -352,8 +352,8 @@ def do_nmvoc_industry_month_transformation(filename_list, out_path, sector, year data.update({'data': sol['data'] * r_sol['data']}) else: [r_inc, r_exf, r_sol] = extract_vars(ratio_file, ['inc', 'exf', 'sol']) - data.update({'data': ind['data'] * r_inc['data'] + exf['data']*r_exf['data'] + - sol['data'] * r_sol['data']}) + data.update({'data': ind['data'] * r_inc['data'] + exf['data'] * r_exf['data'] + + sol['data'] * r_sol['data']}) global_attributes = { 'title': 'HTAPv2 inventory for the sector {0} and pollutant {1}'.format(sector, voc), @@ -520,7 +520,7 @@ def check_vocs(year): voc_sum += new_voc['data'].sum() print '{0} month: {4}; NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format( - snap, nmvoc_sum, voc_sum, 100 * (nmvoc_sum - voc_sum)/nmvoc_sum, month) + snap, nmvoc_sum, voc_sum, 100 * (nmvoc_sum - voc_sum) / nmvoc_sum, month) if __name__ == '__main__': -- GitLab From 8f945a1923e4e806401270772767cd23e7c5fa04 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Tue, 11 Sep 2018 14:05:37 +0200 Subject: [PATCH 24/51] Correcting Code conventions --- .../gfas_emission_inventory.py | 2 +- hermesv3_gr/modules/grids/grid.py | 4 +- hermesv3_gr/modules/grids/grid_lcc.py | 4 +- hermesv3_gr/modules/grids/grid_mercator.py | 4 +- hermesv3_gr/modules/grids/grid_rotated.py | 4 +- hermesv3_gr/modules/masking/masking.py | 2 +- hermesv3_gr/modules/temporal/temporal.py | 6 +- hermesv3_gr/modules/vertical/vertical.py | 2 +- hermesv3_gr/modules/vertical/vertical_gfas.py | 10 +- hermesv3_gr/modules/writing/writer.py | 195 +++++++++++++----- hermesv3_gr/modules/writing/writer_cmaq.py | 26 +++ hermesv3_gr/modules/writing/writer_monarch.py | 180 ++++++++++------ .../modules/writing/writer_wrf_chem.py | 26 +++ hermesv3_gr/tools/coordinates_tools.py | 8 +- hermesv3_gr/tools/netcdf_tools.py | 66 +++--- preproc/ceds_preproc.py | 3 +- preproc/eclipsev5a_preproc.py | 24 +-- preproc/edgarv432_voc_preproc.py | 4 +- preproc/gfas12_preproc.py | 4 +- preproc/tno_mac_iii_preproc.py | 150 ++++++++++---- preproc/tno_mac_iii_preproc_voc_ratios.py | 8 +- 21 files changed, 504 insertions(+), 228 deletions(-) diff --git a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py index 7490a0e..9801db8 100755 --- a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py @@ -119,7 +119,7 @@ class GfasEmissionInventory(EmissionInventory): Extract the altitude values depending on the choosen method. :return: Array with the alittude of each fire. - :rtype: numpy.ndarray + :rtype: numpy.array """ from hermesv3_gr.tools.netcdf_tools import extract_vars diff --git a/hermesv3_gr/modules/grids/grid.py b/hermesv3_gr/modules/grids/grid.py index b9e19b2..e9227c6 100644 --- a/hermesv3_gr/modules/grids/grid.py +++ b/hermesv3_gr/modules/grids/grid.py @@ -246,7 +246,7 @@ class Grid(object): write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, - RegularLatLon=True) + regular_latlon=True) # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() @@ -255,7 +255,7 @@ class Grid(object): write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], cell_area=self.cell_area, boundary_latitudes=self.boundary_latitudes, - boundary_longitudes=self.boundary_longitudes, RegularLatLon=True) + boundary_longitudes=self.boundary_longitudes, regular_latlon=True) else: self.cell_area = self.get_cell_area() diff --git a/hermesv3_gr/modules/grids/grid_lcc.py b/hermesv3_gr/modules/grids/grid_lcc.py index f66ea65..96ea0ec 100644 --- a/hermesv3_gr/modules/grids/grid_lcc.py +++ b/hermesv3_gr/modules/grids/grid_lcc.py @@ -145,7 +145,7 @@ class LccGrid(Grid): write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, - LambertConformalConic=True, lcc_x=self.x, lcc_y=self.y, + lcc=True, lcc_x=self.x, lcc_y=self.y, lat_1_2="{0}, {1}".format(self.lat_1, self.lat_2), lon_0=self.lon_0, lat_0=self.lat_0) # Calculate the cell area of the auxiliary NetCDF file @@ -156,7 +156,7 @@ class LccGrid(Grid): [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, cell_area=self.cell_area, - LambertConformalConic=True, lcc_x=self.x, lcc_y=self.y, + lcc=True, lcc_x=self.x, lcc_y=self.y, lat_1_2="{0}, {1}".format(self.lat_1, self.lat_2), lon_0=self.lon_0, lat_0=self.lat_0) else: self.cell_area = self.get_cell_area() diff --git a/hermesv3_gr/modules/grids/grid_mercator.py b/hermesv3_gr/modules/grids/grid_mercator.py index cce1491..f3104fb 100644 --- a/hermesv3_gr/modules/grids/grid_mercator.py +++ b/hermesv3_gr/modules/grids/grid_mercator.py @@ -132,7 +132,7 @@ class MercatorGrid(Grid): write_netcdf(self.coords_netcdf_file, self.center_latitudes, self.center_longitudes, [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, - Mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) + mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) # Calculate the cell area of the auxiliary NetCDF file self.cell_area = self.get_cell_area() @@ -146,7 +146,7 @@ class MercatorGrid(Grid): ], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, cell_area=self.cell_area, - Mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) + mercator=True, lcc_x=self.x, lcc_y=self.y, lon_0=self.lon_0, lat_ts=self.lat_ts) else: self.cell_area = self.get_cell_area() diff --git a/hermesv3_gr/modules/grids/grid_rotated.py b/hermesv3_gr/modules/grids/grid_rotated.py index 8a6f544..8566300 100644 --- a/hermesv3_gr/modules/grids/grid_rotated.py +++ b/hermesv3_gr/modules/grids/grid_rotated.py @@ -212,7 +212,7 @@ class RotatedGrid(Grid): [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, - Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, + roated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) @@ -224,7 +224,7 @@ class RotatedGrid(Grid): [{'name': 'var_aux', 'units': '', 'data': 0}], boundary_latitudes=self.boundary_latitudes, boundary_longitudes=self.boundary_longitudes, cell_area=self.cell_area, - Rotated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, + roated=True, rotated_lats=self.rlat, rotated_lons=self.rlon, north_pole_lat=self.new_pole_latitude_degrees, north_pole_lon=self.new_pole_longitude_degrees) else: diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py index 068c819..a6a98b6 100644 --- a/hermesv3_gr/modules/masking/masking.py +++ b/hermesv3_gr/modules/masking/masking.py @@ -112,7 +112,7 @@ class Masking(object): 'units': '', 'data': dst_var, }] - Writer.write_netcdf(self.world_mask_file, lat, lon, data, RegularLatLon=True) + Writer.write_netcdf(self.world_mask_file, lat, lon, data, regular_latlon=True) settings.comm.Barrier() settings.write_time('Masking', 'create_country_iso', timeit.default_timer() - st_time, level=3) diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 5dcc331..55ed096 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -247,7 +247,7 @@ class TemporalDistribution(object): def parse_tz(timezone): """ Parse the timezone (string format). - + It is needed because some libraries have more timezones than others and it tries to simplify setting the strange ones into the nearest common one. Examples: @@ -358,7 +358,7 @@ class TemporalDistribution(object): dst_var = np.concatenate(dst_var, axis=2) data = [{'name': 'timezone_id', 'units': '', 'data': dst_var}] - write_netcdf(self.netcdf_timezones, total_lat, total_lon, data, RegularLatLon=True) + write_netcdf(self.netcdf_timezones, total_lat, total_lon, data, regular_latlon=True) settings.comm.Barrier() settings.write_time('TemporalDistribution', 'create_netcdf_timezones', timeit.default_timer() - st_time, @@ -753,7 +753,7 @@ class TemporalDistribution(object): def calculate_delta_hours(st_date, time_step_type, time_step_num, time_step_freq): # TODO Documentation """ - + :param st_date: :param time_step_type: :param time_step_num: diff --git a/hermesv3_gr/modules/vertical/vertical.py b/hermesv3_gr/modules/vertical/vertical.py index ccde876..1b309ea 100644 --- a/hermesv3_gr/modules/vertical/vertical.py +++ b/hermesv3_gr/modules/vertical/vertical.py @@ -186,7 +186,7 @@ class VerticalDistribution(object): :type weights: numpy.array :return: Emissions already vertically distributed. - :rtype: numpy.ndarray + :rtype: numpy.array """ import numpy as np diff --git a/hermesv3_gr/modules/vertical/vertical_gfas.py b/hermesv3_gr/modules/vertical/vertical_gfas.py index 70f0414..600ab7f 100644 --- a/hermesv3_gr/modules/vertical/vertical_gfas.py +++ b/hermesv3_gr/modules/vertical/vertical_gfas.py @@ -128,13 +128,13 @@ class GfasVerticalDistribution(VerticalDistribution): Allocates the fire emissions on their top level. :param values: 2D array with the fire emissions - :type values: numpy.ndarray + :type values: numpy.array :param altitude: 2D array with the altitude of the fires. - :type altitude: numpy.ndarray + :type altitude: numpy.array :return: Emissions already allocated on the top altitude of each fire. - :rtype: numpy.ndarray + :rtype: numpy.array """ import numpy as np @@ -159,10 +159,10 @@ class GfasVerticalDistribution(VerticalDistribution): Manages all the process to do the vertical distribution. :param values: Emissions to be vertically distributed. - :type values: numpy.ndarray + :type values: numpy.array :return: Emissions already vertically distributed. - :rtype: numpy.ndarray + :rtype: numpy.array """ st_time = timeit.default_timer() diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index 7fd70d8..411c164 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -20,11 +20,38 @@ import sys import timeit +import numpy as np from hermesv3_gr.config import settings class Writer(object): + """ + Class to Write the output file. + :param path: Path to the destination file. + :type path: str + + :param grid: Grid of the destination file. + :type grid: Grid + + :param levels: List with the levels of the grid. + :type levels: list + + :param date: Date of the output file + :type date: datetime.datetime + + :param hours: List with the timestamp hours. + :type hours: list. + + :param global_attributes_path: Path to the file that contains the static global attributes. + :type global_attributes_path: str + + :param compress: Indicates if you want to compress the netCDF variable data. + :type compress: bool + + :param parallel: Indicates if you want to write in parallel mode. + :type parallel. bool + """ def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): self.path = path @@ -42,6 +69,15 @@ class Writer(object): self.global_attributes_path = global_attributes_path def write(self, inventory_list): + """ + Write the netCDF4 file with the pollutants of the given list of inventories. + + :param inventory_list: List of inventories. + :type inventory_list: list + + :return: True at end + :rtype: bool + """ st_time = timeit.default_timer() settings.write_log('') settings.write_log("Writing netCDF output file {0} .".format(self.path)) @@ -77,10 +113,19 @@ class Writer(object): return None def set_variable_attributes(self, inventory_list): + """ + Change the variables_attribute parameter of the Writer class. + + :param inventory_list: list of invenotries. + :type inventory_list: list + + :return: True at end. + :rtype: bool + """ st_time = timeit.default_timer() empty_dict = {} - for ei in inventory_list: - for emi in ei.emissions: + for inventory in inventory_list: + for emi in inventory.emissions: if not emi['name'] in empty_dict: dict_aux = emi.copy() dict_aux['data'] = None @@ -93,17 +138,21 @@ class Writer(object): return True def calculate_data_by_var(self, variable, inventory_list, shape): - # TODO Documentation """ + Calculate the date of the given variable throw the inventory list. - :param variable: - :param inventory_list: - :param shape: - :return: - """ - import timeit - import numpy as np + :param variable: Variable to calculate. + :type variable: str + + :param inventory_list: Inventory list + :type inventory_list: list + + :param shape: Output desired shape. + :type shape: tuple + :return: Data of the given variable. + :rtype: numpy.array + """ st_time = timeit.default_timer() settings.write_log("\t\t\t\tGetting data for '{0}' pollutant.".format(variable), level=3) @@ -128,6 +177,8 @@ class Writer(object): aux_data = np.zeros((shape[1], shape[2] * shape[3])) aux_data[ei.location['layer'], ei.location['FID']] = emission['data'] aux_data = aux_data.reshape((shape[1], shape[2], shape[3])) + else: + aux_data = None settings.write_time('VerticalDistribution', 'calculate_data_by_var', timeit.default_timer() - vertical_time, level=2) @@ -153,10 +204,19 @@ class Writer(object): """ Implement on inner class """ - return None + return np.array([0]) @staticmethod def calculate_displacements(counts): + """ + Calculate the index position of all the ranks. + + :param counts: Number of elements for rank + :type counts: list + + :return: Displacements + :rtype: list + """ st_time = timeit.default_timer() new_list = [0] @@ -170,15 +230,27 @@ class Writer(object): @staticmethod def tuple_to_index(tuple_list, bidimensional=False): + """ + Get the index for a list of shapes. + + :param tuple_list: List os shapes. + :type tuple_list: list + + :param bidimensional: Indicates if the tuple is bidimensional. + :type bidimensional: bool + + :return: List of index + :rtype: list + """ from operator import mul st_time = timeit.default_timer() new_list = [] - for tuple in tuple_list: + for my_tuple in tuple_list: if bidimensional: - new_list.append(tuple[-1] * tuple[-2]) + new_list.append(my_tuple[-1] * my_tuple[-2]) else: - new_list.append(reduce(mul, tuple)) + new_list.append(reduce(mul, my_tuple)) settings.write_time('Writer', 'tuple_to_index', timeit.default_timer() - st_time, level=3) return new_list @@ -190,17 +262,29 @@ class Writer(object): :param output_model: Name of the output model. Only accepted 'MONARCH, CMAQ or WRF_CHEM. :type output_model: str - :param path: Path to the output file. + :param path: Path to the destination file. :type path: str - :param grid: Grid object of the destination. + :param grid: Grid of the destination file. :type grid: Grid - :param compress: Indicates if you want a compressed NetCDF. + :param levels: List with the levels of the grid. + :type levels: list + + :param date: Date of the output file + :type date: datetime.datetime + + :param hours: List with the timestamp hours. + :type hours: list. + + :param global_attributes_path: Path to the file that contains the static global attributes. + :type global_attributes_path: str + + :param compress: Indicates if you want to compress the netCDF variable data. :type compress: bool - :param parallel: Indicates if you want to write the NetCDF in parallel. - :type parallel: bool + :param parallel: Indicates if you want to write in parallel mode. + :type parallel. bool :return: Writing object of the desired output model. :rtype: Writer @@ -227,19 +311,23 @@ class Writer(object): def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, - Mercator=False, lat_ts=None): + regular_latlon=False, + roated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + lcc=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, + mercator=False, lat_ts=None): + # TODO Deprecate + """ + Will be deprecated + """ from netCDF4 import Dataset from cf_units import Unit, encode_time - if not (RegularLatLon or LambertConformalConic or Rotated or Mercator): - RegularLatLon = True + if not (regular_latlon or lcc or roated or mercator): + regular_latlon = True netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") # ===== Dimensions ===== - if RegularLatLon: + if regular_latlon: var_dim = ('lat', 'lon',) # Latitude @@ -264,7 +352,7 @@ class Writer(object): print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format( len(center_longitudes.shape)) sys.exit(1) - elif Rotated: + elif roated: var_dim = ('rlat', 'rlon',) # Rotated Latitude @@ -280,7 +368,7 @@ class Writer(object): sys.exit(1) netcdf.createDimension('rlon', len(rotated_lons)) lon_dim = ('rlat', 'rlon',) - elif LambertConformalConic or Mercator: + elif lcc or mercator: var_dim = ('y', 'x',) netcdf.createDimension('y', len(lcc_y)) @@ -288,6 +376,10 @@ class Writer(object): netcdf.createDimension('x', len(lcc_x)) lon_dim = ('y', 'x',) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if levels is not None: @@ -318,11 +410,10 @@ class Writer(object): time[:] = [0.] else: time = netcdf.createVariable('time', 'd', ('time',), zlib=True) - u = Unit('hours') # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str( - u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) + time.units = str(Unit('hours').offset_by_time( + encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second))) time.standard_name = "time" time.calendar = "gregorian" time.long_name = "time" @@ -357,7 +448,7 @@ class Writer(object): # print lon_bnds[:].shape, boundary_longitudes.shape lon_bnds[:] = boundary_longitudes - if Rotated: + if roated: # Rotated Latitude rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) rlat.long_name = "latitude in rotated pole grid" @@ -371,18 +462,18 @@ class Writer(object): rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" rlon[:] = rotated_lons - if LambertConformalConic or Mercator: - x = netcdf.createVariable('x', 'd', ('x',), zlib=True) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - x[:] = lcc_x - - y = netcdf.createVariable('y', 'd', ('y',), zlib=True) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - y[:] = lcc_y + if lcc or mercator: + x_var = netcdf.createVariable('x', 'd', ('x',), zlib=True) + x_var.units = Unit("km").symbol + x_var.long_name = "x coordinate of projection" + x_var.standard_name = "projection_x_coordinate" + x_var[:] = lcc_x + + y_var = netcdf.createVariable('y', 'd', ('y',), zlib=True) + y_var.units = Unit("km").symbol + y_var.long_name = "y coordinate of projection" + y_var.standard_name = "projection_y_coordinate" + y_var[:] = lcc_y cell_area_dim = var_dim # Levels @@ -410,13 +501,13 @@ class Writer(object): var.coordinates = "lat lon" if cell_area is not None: var.cell_measures = 'area: cell_area' - if RegularLatLon: + if regular_latlon: var.grid_mapping = 'crs' - elif Rotated: + elif roated: var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: + elif lcc: var.grid_mapping = 'Lambert_conformal' - elif Mercator: + elif mercator: var.grid_mapping = 'mercator' try: var[:] = variable['data'] @@ -424,26 +515,26 @@ class Writer(object): print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping - if RegularLatLon: + if regular_latlon: # CRS mapping = netcdf.createVariable('crs', 'i') mapping.grid_mapping_name = "latitude_longitude" mapping.semi_major_axis = 6371000.0 mapping.inverse_flattening = 0 - elif Rotated: + elif roated: # Rotated pole mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' mapping.grid_north_pole_latitude = north_pole_lat mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: + elif lcc: # CRS mapping = netcdf.createVariable('Lambert_conformal', 'i') mapping.grid_mapping_name = "lambert_conformal_conic" mapping.standard_parallel = lat_1_2 mapping.longitude_of_central_meridian = lon_0 mapping.latitude_of_projection_origin = lat_0 - elif Mercator: + elif mercator: # Mercator mapping = netcdf.createVariable('mercator', 'i') mapping.grid_mapping_name = "mercator" diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index a7efdd9..200878b 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -28,7 +28,33 @@ from hermesv3_gr.config import settings class WriterCmaq(Writer): + """ + Class to Write the output file for CMAQ Chemical Transport Model CCTM. + :param path: Path to the destination file. + :type path: str + + :param grid: Grid of the destination file. + :type grid: Grid + + :param levels: List with the levels of the grid. + :type levels: list + + :param date: Date of the output file + :type date: datetime.datetime + + :param hours: List with the timestamp hours. + :type hours: list. + + :param global_attributes_path: Path to the file that contains the static global attributes. + :type global_attributes_path: str + + :param compress: Indicates if you want to compress the netCDF variable data. + :type compress: bool + + :param parallel: Indicates if you want to write in parallel mode. + :type parallel. bool + """ def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterCmaq, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index 98831ad..484108d 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -18,18 +18,43 @@ # along with HERMESv3_GR. If not, see . -import os import sys -from hermesv3_gr.modules.writing.writer import Writer import timeit -from hermesv3_gr.config import settings import numpy as np from netCDF4 import Dataset from mpi4py import MPI +from hermesv3_gr.modules.writing.writer import Writer +from hermesv3_gr.config import settings class WriterMonarch(Writer): + """ + Class to Write the output file in CF-1.6 conventions. + + :param path: Path to the destination file. + :type path: str + + :param grid: Grid of the destination file. + :type grid: Grid + :param levels: List with the levels of the grid. + :type levels: list + + :param date: Date of the output file + :type date: datetime.datetime + + :param hours: List with the timestamp hours. + :type hours: list. + + :param global_attributes_path: Path to the file that contains the static global attributes. + :type global_attributes_path: str + + :param compress: Indicates if you want to compress the netCDF variable data. + :type compress: bool + + :param parallel: Indicates if you want to write in parallel mode. + :type parallel. bool + """ def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterMonarch, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) @@ -38,12 +63,17 @@ class WriterMonarch(Writer): # } def unit_change(self, variable, data): - # TODO Documentation """ + Do the unit conversions of the data. + + :param variable: Variable to convert. + :type variable: dict - :param variable: - :param data: - :return: + :param data: Data to change. + :type data: numpy.array + + :return: Data with the new units. + :rtype: numpy.array """ from cf_units import Unit st_time = timeit.default_timer() @@ -69,10 +99,11 @@ class WriterMonarch(Writer): return data def create_parallel_netcdf(self): - # TODO Documentation """ + Create an empty netCDF4. - :return: + :return: True at end. + :rtype: bool """ from cf_units import Unit, encode_time @@ -154,6 +185,10 @@ class WriterMonarch(Writer): netcdf.createDimension('x', len(self.grid.x)) settings.write_log("\t\t\t'x' dimension: {0}".format(len(self.grid.x)), level=3) lon_dim = ('y', 'x', ) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if self.levels is not None: @@ -185,10 +220,7 @@ class WriterMonarch(Writer): time[:] = [0.] else: time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - # print u.offset_by_time(encode_time(date.year, date.month, date.day, date.hour, date.minute, date.second)) - # Unit('hour since 1970-01-01 00:00:00.0000000 UTC') - time.units = str(u.offset_by_time(encode_time(self.date.year, self.date.month, self.date.day, + time.units = str(Unit('hours').offset_by_time(encode_time(self.date.year, self.date.month, self.date.day, self.date.hour, self.date.minute, self.date.second))) time.standard_name = "time" time.calendar = "gregorian" @@ -252,21 +284,21 @@ class WriterMonarch(Writer): rlon[:] = self.grid.rlon settings.write_log("\t\t\t'rlon' variable created with size: {0}".format(rlon[:].shape), level=3) if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" + x_var = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) + x_var.units = Unit("km").symbol + x_var.long_name = "x coordinate of projection" + x_var.standard_name = "projection_x_coordinate" if settings.rank == 0: - x[:] = self.grid.x - settings.write_log("\t\t\t'x' variable created with size: {0}".format(x[:].shape), level=3) + x_var[:] = self.grid.x + settings.write_log("\t\t\t'x' variable created with size: {0}".format(x_var[:].shape), level=3) - y = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" + y_var = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) + y_var.units = Unit("km").symbol + y_var.long_name = "y coordinate of projection" + y_var.standard_name = "projection_y_coordinate" if settings.rank == 0: - y[:] = self.grid.y - settings.write_log("\t\t\t'y' variable created with size: {0}".format(y[:].shape), level=3) + y_var[:] = self.grid.y + settings.write_log("\t\t\t'y' variable created with size: {0}".format(y_var[:].shape), level=3) cell_area_dim = var_dim # Levels @@ -347,13 +379,17 @@ class WriterMonarch(Writer): netcdf.close() settings.write_time('WriterMonarch', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) + return True def write_parallel_netcdf(self, emission_list): - # TODO Documentation """ + Append the data to the netCDF4 file already created in parallel mode. + + :param emission_list: Data to append. + :type emission_list: list - :param emission_list: - :return: + :return: True at end. + :rtype: bool """ st_time = timeit.default_timer() @@ -390,13 +426,17 @@ class WriterMonarch(Writer): netcdf.close() settings.write_time('WriterMonarch', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) + return True def write_serial_netcdf(self, emission_list,): - # TODO Documentation """ + Write the netCDF4 file in serial mode. - :param emission_list: - :return: + :param emission_list: Data to append. + :type emission_list: list + + :return: True at end. + :rtype: bool """ from cf_units import Unit, encode_time @@ -416,22 +456,22 @@ class WriterMonarch(Writer): if settings.rank == 0: - RegularLatLon = False - Rotated = False - LambertConformalConic = False + regular_latlon = False + rotated = False + lcc = False if self.grid.grid_type == 'global': - RegularLatLon = True + regular_latlon = True elif self.grid.grid_type == 'rotated': - Rotated = True + rotated = True elif self.grid.grid_type == 'lcc': - LambertConformalConic = True + lcc = True settings.write_log("\tCreating NetCDF file.", level=2) netcdf = Dataset(self.path, mode='w', format="NETCDF4") # ===== Dimensions ===== settings.write_log("\t\tCreating NetCDF dimensions.", level=2) - if RegularLatLon: + if regular_latlon: var_dim = ('lat', 'lon',) # Latitude @@ -471,10 +511,10 @@ class WriterMonarch(Writer): 'ERROR: Longitudes must be on a 1D or 2D array instead of {0} shape.'.format( len(self.grid.center_longitudes.shape))) sys.exit(1) - elif Rotated: + elif rotated: var_dim = ('rlat', 'rlon',) - # Rotated Latitude + # rotated Latitude if self.grid.rlat is None: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: @@ -484,7 +524,7 @@ class WriterMonarch(Writer): netcdf.createDimension('rlat', len(self.grid.rlat)) lat_dim = ('rlat', 'rlon',) - # Rotated Longitude + # rotated Longitude if self.grid.rlon is None: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: @@ -494,7 +534,7 @@ class WriterMonarch(Writer): netcdf.createDimension('rlon', len(self.grid.rlon)) lon_dim = ('rlat', 'rlon',) - elif LambertConformalConic: + elif lcc: var_dim = ('y', 'x',) settings.write_log("\t\t\t'y' dimension: {0}".format(len(self.grid.y)), level=3) netcdf.createDimension('y', len(self.grid.y)) @@ -502,6 +542,10 @@ class WriterMonarch(Writer): settings.write_log("\t\t\t'x' dimension: {0}".format(len(self.grid.x)), level=3) netcdf.createDimension('x', len(self.grid.x)) lon_dim = ('y', 'x', ) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if self.levels is not None: @@ -529,8 +573,7 @@ class WriterMonarch(Writer): time[:] = [0.] else: time = netcdf.createVariable('time', 'd', ('time',)) - u = Unit('hours') - time.units = str(u.offset_by_time(encode_time( + time.units = str(Unit('hours').offset_by_time(encode_time( self.date.year, self.date.month, self.date.day, self.date.hour, self.date.minute, self.date.second))) time.standard_name = "time" @@ -574,8 +617,8 @@ class WriterMonarch(Writer): settings.write_log( "\t\t\t'lon_bnds' variable created with size: {0}".format(lon_bnds[:].shape), level=3) - if Rotated: - # Rotated Latitude + if rotated: + # rotated Latitude rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=self.compress) rlat.long_name = "latitude in rotated pole grid" rlat.units = Unit("degrees").symbol @@ -583,27 +626,27 @@ class WriterMonarch(Writer): rlat[:] = self.grid.rlat settings.write_log("\t\t\t'rlat' variable created with size: {0}".format(rlat[:].shape), level=3) - # Rotated Longitude + # rotated Longitude rlon = netcdf.createVariable('rlon', 'f', ('rlon',), zlib=self.compress) rlon.long_name = "longitude in rotated pole grid" rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" rlon[:] = self.grid.rlon settings.write_log("\t\t\t'rlon' variable created with size: {0}".format(rlon[:].shape), level=3) - if LambertConformalConic: - x = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) - x.units = Unit("km").symbol - x.long_name = "x coordinate of projection" - x.standard_name = "projection_x_coordinate" - x[:] = self.grid.x - settings.write_log("\t\t\t'x' variable created with size: {0}".format(x[:].shape), level=3) - - y = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) - y.units = Unit("km").symbol - y.long_name = "y coordinate of projection" - y.standard_name = "projection_y_coordinate" - y[:] = self.grid.y - settings.write_log("\t\t\t'y' variable created with size: {0}".format(y[:].shape), level=3) + if lcc: + x_var = netcdf.createVariable('x', 'd', ('x',), zlib=self.compress) + x_var.units = Unit("km").symbol + x_var.long_name = "x coordinate of projection" + x_var.standard_name = "projection_x_coordinate" + x_var[:] = self.grid.x + settings.write_log("\t\t\t'x' variable created with size: {0}".format(x_var[:].shape), level=3) + + y_var = netcdf.createVariable('y', 'd', ('y',), zlib=self.compress) + y_var.units = Unit("km").symbol + y_var.long_name = "y coordinate of projection" + y_var.standard_name = "projection_y_coordinate" + y_var[:] = self.grid.y + settings.write_log("\t\t\t'y' variable created with size: {0}".format(y_var[:].shape), level=3) cell_area_dim = var_dim # Levels @@ -631,6 +674,7 @@ class WriterMonarch(Writer): if full_shape is None: full_shape = settings.comm.allgather(rank_data.shape) # print 'Rank {0} full_shape: {1}\n'.format(settings.rank, full_shape) + if mpi_numpy: if settings.size != 1: if settings.rank == 0: @@ -698,11 +742,11 @@ class WriterMonarch(Writer): if self.grid.cell_area is not None: var.cell_measures = 'area: cell_area' - if RegularLatLon: + if regular_latlon: var.grid_mapping = 'crs' - elif Rotated: + elif rotated: var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: + elif lcc: var.grid_mapping = 'Lambert_conformal' if mpi_numpy: @@ -746,19 +790,19 @@ class WriterMonarch(Writer): settings.write_log("\t\tCreating NetCDF metadata.", level=2) if settings.rank == 0: # Grid mapping - if RegularLatLon: + if regular_latlon: # CRS mapping = netcdf.createVariable('crs', 'i') mapping.grid_mapping_name = "latitude_longitude" mapping.semi_major_axis = 6371000.0 mapping.inverse_flattening = 0 - elif Rotated: - # Rotated pole + elif rotated: + # rotated pole mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' mapping.grid_north_pole_latitude = 90 - self.grid.new_pole_latitude_degrees mapping.grid_north_pole_longitude = self.grid.new_pole_longitude_degrees - elif LambertConformalConic: + elif lcc: # CRS mapping = netcdf.createVariable('Lambert_conformal', 'i') mapping.grid_mapping_name = "lambert_conformal_conic" diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index 51f776f..0d56e99 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -29,7 +29,33 @@ from mpi4py import MPI class WriterWrfChem(Writer): + """ + Class to Write the output file for the WRF-CHEM Chemical Transport Model. + :param path: Path to the destination file. + :type path: str + + :param grid: Grid of the destination file. + :type grid: Grid + + :param levels: List with the levels of the grid. + :type levels: list + + :param date: Date of the output file + :type date: datetime.datetime + + :param hours: List with the timestamp hours. + :type hours: list. + + :param global_attributes_path: Path to the file that contains the static global attributes. + :type global_attributes_path: str + + :param compress: Indicates if you want to compress the netCDF variable data. + :type compress: bool + + :param parallel: Indicates if you want to write in parallel mode. + :type parallel. bool + """ def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterWrfChem, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/hermesv3_gr/tools/coordinates_tools.py b/hermesv3_gr/tools/coordinates_tools.py index a61e0f2..8dbc7cd 100644 --- a/hermesv3_gr/tools/coordinates_tools.py +++ b/hermesv3_gr/tools/coordinates_tools.py @@ -43,6 +43,7 @@ def get_grid_area(filename): def latlon2rotated(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): # TODO Documentation """ + Transform lat lon degrees into the rotated coordinates. :param lon_pole_deg: :param lat_pole_deg: @@ -102,6 +103,7 @@ def latlon2rotated(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): def rotated2latlon(lon_pole_deg, lat_pole_deg, lon_deg, lat_deg, lon_min=-180): # TODO Documentation """ + Transform rotated coordinates into lat lon degrees. :param lon_pole_deg: :param lat_pole_deg: @@ -223,14 +225,14 @@ def create_bounds(coords, number_vertices=2): Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) - :type coords: numpy.ndarray + :type coords: numpy.array :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. (by default 2) :type number_vertices: int :return: Array with as many elements as vertices for each value of coords. - :rtype: numpy.ndarray + :rtype: numpy.array """ import numpy as np @@ -332,7 +334,7 @@ def create_regular_grid(center_lat, center_lon, west_boundary, south_boundary, i :type inc_lon: float :return: Arrays with the Center Latitudes, Center Longitudes, Boundary Latitudes, Boundary Longitudes. - :rtype: tuple (numpy.ndarray, numpy.ndarray, numpy.ndarray, numpy.ndarray) + :rtype: tuple (numpy.array, numpy.array, numpy.array, numpy.array) """ import numpy as np diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py index 74188bb..39a69f4 100644 --- a/hermesv3_gr/tools/netcdf_tools.py +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -61,7 +61,7 @@ def get_grid_area(filename): :type filename: str :return: Returns the area of each cell. - :rtype: numpy.ndarray + :rtype: numpy.array """ from cdo import Cdo @@ -74,7 +74,7 @@ def get_grid_area(filename): return grid_area -def extract_vars(netcdf_path, variables_list, attributes_list=list()): +def extract_vars(netcdf_path, variables_list, attributes_list=()): """ Get the data from the list of variabbles. @@ -115,10 +115,10 @@ def extract_vars(netcdf_path, variables_list, attributes_list=list()): def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, levels=None, date=None, hours=None, boundary_latitudes=None, boundary_longitudes=None, cell_area=None, global_attributes=None, - RegularLatLon=False, - Rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, - LambertConformalConic=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, - Mercator=False, lat_ts=None): + regular_latlon=False, + rotated=False, rotated_lats=None, rotated_lons=None, north_pole_lat=None, north_pole_lon=None, + lcc=False, lcc_x=None, lcc_y=None, lat_1_2=None, lon_0=None, lat_0=None, + mercator=False, lat_ts=None): # TODO Documentation """ @@ -133,31 +133,31 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, :param boundary_longitudes: :param cell_area: :param global_attributes: - :param RegularLatLon: - :param Rotated: + :param regular_latlon: + :param rotated: :param rotated_lats: :param rotated_lons: :param north_pole_lat: :param north_pole_lon: - :param LambertConformalConic: + :param lcc: :param lcc_x: :param lcc_y: :param lat_1_2: :param lon_0: :param lat_0: - :param Mercator: + :param mercator: :param lat_ts: :return: """ from cf_units import Unit, encode_time - if not (RegularLatLon or LambertConformalConic or Rotated or Mercator): - RegularLatLon = True + if not (regular_latlon or lcc or rotated or mercator): + regular_latlon = True netcdf = Dataset(netcdf_path, mode='w', format="NETCDF4") # ===== Dimensions ===== - if RegularLatLon: + if regular_latlon: var_dim = ('lat', 'lon',) # Latitude @@ -181,7 +181,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, else: print 'ERROR: Longitudes must be on a 1D or 2D array instead of {0}'.format(len(center_longitudes.shape)) sys.exit(1) - elif Rotated: + elif rotated: var_dim = ('rlat', 'rlon',) # Rotated Latitude @@ -197,7 +197,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, sys.exit(1) netcdf.createDimension('rlon', len(rotated_lons)) lon_dim = ('rlat', 'rlon',) - elif LambertConformalConic or Mercator: + elif lcc or mercator: var_dim = ('y', 'x',) netcdf.createDimension('y', len(lcc_y)) @@ -205,6 +205,10 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.createDimension('x', len(lcc_x)) lon_dim = ('y', 'x', ) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if levels is not None: @@ -270,7 +274,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # print lon_bnds[:].shape, boundary_longitudes.shape lon_bnds[:] = boundary_longitudes - if Rotated: + if rotated: # Rotated Latitude rlat = netcdf.createVariable('rlat', 'f', ('rlat',), zlib=True) rlat.long_name = "latitude in rotated pole grid" @@ -284,7 +288,7 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, rlon.units = Unit("degrees").symbol rlon.standard_name = "grid_longitude" rlon[:] = rotated_lons - if LambertConformalConic or Mercator: + if lcc or mercator: x = netcdf.createVariable('x', 'd', ('x',), zlib=True) x.units = Unit("km").symbol x.long_name = "x coordinate of projection" @@ -323,13 +327,13 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, var.coordinates = "lat lon" if cell_area is not None: var.cell_measures = 'area: cell_area' - if RegularLatLon: + if regular_latlon: var.grid_mapping = 'crs' - elif Rotated: + elif rotated: var.grid_mapping = 'rotated_pole' - elif LambertConformalConic: + elif lcc: var.grid_mapping = 'Lambert_conformal' - elif Mercator: + elif mercator: var.grid_mapping = 'mercator' try: var[:] = variable['data'] @@ -337,26 +341,26 @@ def write_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, print 'VAR ERROR, netcdf shape: {0}, variable shape: {1}'.format(var[:].shape, variable['data'].shape) # Grid mapping - if RegularLatLon: + if regular_latlon: # CRS mapping = netcdf.createVariable('crs', 'i') mapping.grid_mapping_name = "latitude_longitude" mapping.semi_major_axis = 6371000.0 mapping.inverse_flattening = 0 - elif Rotated: + elif rotated: # Rotated pole mapping = netcdf.createVariable('rotated_pole', 'c') mapping.grid_mapping_name = 'rotated_latitude_longitude' mapping.grid_north_pole_latitude = north_pole_lat mapping.grid_north_pole_longitude = north_pole_lon - elif LambertConformalConic: + elif lcc: # CRS mapping = netcdf.createVariable('Lambert_conformal', 'i') mapping.grid_mapping_name = "lambert_conformal_conic" mapping.standard_parallel = lat_1_2 mapping.longitude_of_central_meridian = lon_0 mapping.latitude_of_projection_origin = lat_0 - elif Mercator: + elif mercator: # Mercator mapping = netcdf.createVariable('mercator', 'i') mapping.grid_mapping_name = "mercator" @@ -470,6 +474,10 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, netcdf.createDimension('x', len(lcc_x)) lon_dim = ('y', 'x', ) + else: + lat_dim = None + lon_dim = None + var_dim = None # Levels if levels is not None: @@ -597,7 +605,7 @@ def create_netcdf(netcdf_path, center_latitudes, center_longitudes, data_list, # print 'HOURSSSSSSSSSSSSSSSSSSSSS:', hours # if variable['data'] is not 0: # print var[:].shape, variable['data'].shape, variable['data'].max() - shape = None + shape = tuple() exec ("shape = (len(hours), {0}.size, {1}.size, {2}.size)".format(var_dim[0], var_dim[1], var_dim[2])) # exit() print shape @@ -648,11 +656,11 @@ def tuple_to_index(tuple_list, bidimensional=False): """ from operator import mul new_list = [] - for tuple in tuple_list: + for my_tuple in tuple_list: if bidimensional: - new_list.append(tuple[-1] * tuple[-2]) + new_list.append(my_tuple[-1] * my_tuple[-2]) else: - new_list.append(reduce(mul, tuple)) + new_list.append(reduce(mul, my_tuple)) return new_list diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py index 1b13729..c458dff 100755 --- a/preproc/ceds_preproc.py +++ b/preproc/ceds_preproc.py @@ -34,7 +34,8 @@ LIST_SECTORS = ['agriculture', 'energy', 'industry', 'transport', 'residential', # LIST_YEARS = from 1950 to 2014 LIST_YEARS = [2010] INPUT_NAME = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' -VOC_INPUT_NAME = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim-supplemental-data_gr_01-12.nc' +VOC_INPUT_NAME = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sector' + \ + 'Dim-supplemental-data_gr_01-12.nc' DO_AIR = True AIR_INPUT_NAME = '-em-AIR-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26_gr_01-12.nc' # ============================================================== diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index 1413710..5a8049d 100755 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -37,9 +37,9 @@ LIST_POLLUTANTS = ['BC', 'CH4', 'CO', 'NH3', 'NOx', 'OC', 'OM', 'PM10', 'PM25', # ============================================================== -month_factor = 1000000. / (30. * 24. * 3600.) # To pass from kt/month to Kg/s -year_factor = 1000000. / (365. * 24. * 3600.) # To pass from kt/year to Kg/s -var_units = 'kg.m-2.s-1' +MONTH_FACTOR = 1000000. / (30. * 24. * 3600.) # To pass from kt/month to Kg/s +YEAR_FACTOR = 1000000. / (365. * 24. * 3600.) # To pass from kt/year to Kg/s +VAR_UNITS = 'kg.m-2.s-1' def get_grid_area(filename): @@ -69,14 +69,14 @@ def create_bounds(coordinates, number_vertices=2): Calculate the vertices coordinates. :param coordinates: Coordinates in degrees (latitude or longitude) - :type coordinates: numpy.ndarray + :type coordinates: numpy.array :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. (by default 2) :type number_vertices: int :return: Array with as many elements as vertices for each value of coords. - :rtype: numpy.ndarray + :rtype: numpy.array """ interval = coordinates[1] - coordinates[0] @@ -164,7 +164,7 @@ def write_netcdf(output_name_path, data_list, center_lats, center_lons, grid_cel nc_output.setncattr('source', 'IIASA', ) nc_output.setncattr('history', 'Re-writing of the ECLIPSEv5a input to follow the CF-1.6 conventions;\n' + '2017-11-28: Creating;\n') - nc_output.setncattr('http://www.iiasa.ac.at/web/home/research/researchPrograms/air/ECLIPSEv5a.html') + nc_output.setncattr('web', 'http://www.iiasa.ac.at/web/home/research/researchPrograms/air/ECLIPSEv5a.html') nc_output.setncattr('comment', 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + '(Barcelona Supercomputing Center)', ) @@ -243,7 +243,7 @@ def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area) profile = extract_month_profile_by_sector(sector, month, pollutant) data_aux = data[i, :, :] * profile # print factor - data_aux = (data_aux * month_factor) / cell_area + data_aux = (data_aux * MONTH_FACTOR) / cell_area # #data_aux = data_aux / cell_area # print 'original: ', data[i, 192, 404] # print 'factor: ', profile[192, 404] @@ -253,7 +253,7 @@ def do_single_transformation(pollutant, sector, data, c_lats, c_lons, cell_area) 'name': pollutant_name, 'long_name': pollutant_name, 'data': data_aux, - 'units': Unit(var_units), + 'units': Unit(VAR_UNITS), }] write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=LIST_YEARS[i], month=month + 1, day=1)) @@ -321,13 +321,13 @@ def do_flaring_transformation(): for i in xrange(len(LIST_YEARS)): output_name = get_flaring_output_name(var_name, 'flaring', LIST_YEARS[i]) data_aux = data[i, :, :] - data_aux = (data_aux * year_factor) / cell_area + data_aux = (data_aux * YEAR_FACTOR) / cell_area data_aux = data_aux.reshape((1,) + data_aux.shape) data_list = [{ 'name': var_name, 'long_name': var_name, 'data': data_aux, - 'units': Unit(var_units), + 'units': Unit(VAR_UNITS), }] write_netcdf(output_name, data_list, c_lats, c_lons, cell_area, datetime(year=LIST_YEARS[i], month=1, day=1)) @@ -380,13 +380,13 @@ def do_ship_transformation(): data = nc_in.variables[var][0, :, :] data = np.nan_to_num(data) - data = (data * year_factor) / cell_area + data = (data * YEAR_FACTOR) / cell_area data = data.reshape((1,) + data.shape) data_list = [{ 'name': var_name, 'long_name': var_name, 'data': data, - 'units': Unit(var_units), + 'units': Unit(VAR_UNITS), }] write_netcdf(get_ship_output_name(var_name, 'ship', year), data_list, c_lats, c_lons, cell_area, diff --git a/preproc/edgarv432_voc_preproc.py b/preproc/edgarv432_voc_preproc.py index ef834e1..8f8e6cc 100755 --- a/preproc/edgarv432_voc_preproc.py +++ b/preproc/edgarv432_voc_preproc.py @@ -59,14 +59,14 @@ def create_bounds(coordinates, number_vertices=2): Calculate the vertices coordinates. :param coordinates: Coordinates in degrees (latitude or longitude) - :type coordinates: numpy.ndarray + :type coordinates: numpy.array :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. (by default 2) :type number_vertices: int :return: Array with as many elements as vertices for each value of coords. - :rtype: numpy.ndarray + :rtype: numpy.array """ interval = coordinates[1] - coordinates[0] diff --git a/preproc/gfas12_preproc.py b/preproc/gfas12_preproc.py index 3630052..6dd4a62 100755 --- a/preproc/gfas12_preproc.py +++ b/preproc/gfas12_preproc.py @@ -42,14 +42,14 @@ def create_bounds(coords, number_vertices=2): Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) - :type coords: numpy.ndarray + :type coords: numpy.array :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. (by default 2) :type number_vertices: int :return: Array with as many elements as vertices for each value of coords. - :rtype: numpy.ndarray + :rtype: numpy.array """ import numpy as np diff --git a/preproc/tno_mac_iii_preproc.py b/preproc/tno_mac_iii_preproc.py index 75db7df..73c6282 100755 --- a/preproc/tno_mac_iii_preproc.py +++ b/preproc/tno_mac_iii_preproc.py @@ -29,13 +29,12 @@ INPUT_NAME = 'TNO_MACC_III_emissions_v1_1_.txt' LIST_YEARS = [2011] VOC_RATIO_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/nmvoc' VOC_RATIO_NAME = 'ratio_.nc' - # ============================================================== def get_pollutants(in_path): """ - Finds the pollutants on the ASCII emissions table. + Find the pollutants on the ASCII emissions table. :param in_path: Path to the ASCII file that contains the information of the TNO_MAC-III emissions. :type in_path: str @@ -50,33 +49,55 @@ def get_pollutants(in_path): def calculate_grid_definition(in_path): - # TODO Documentation + """ + Calculate the latitude and longitude coordinates of the cell. + + :param in_path: Path to the file that contains all the information. + :type in_path: str + + :return: Latitudes array, Longitudes array, Latitude interval, Longitude interval. + :rtype: numpy.array, numpy.array, float, float + """ import pandas as pd import numpy as np - df = pd.read_table(in_path, sep=';') - df = df[df.SourceType != 'P'] + dataframe = pd.read_table(in_path, sep=';') + dataframe = dataframe[dataframe.SourceType != 'P'] # Longitudes - lons = np.sort(np.unique(df.Lon)) + lons = np.sort(np.unique(dataframe.Lon)) lons_interval = lons[1:] - lons[:-1] print 'Lon min: {0}; Lon max: {1}; Lon inc: {2}; Lon num: {3}'.format( - df.Lon.min(), df.Lon.max(), lons_interval.min(), len(lons)) + dataframe.Lon.min(), dataframe.Lon.max(), lons_interval.min(), len(lons)) # Latitudes - lats = np.sort(np.unique(df.Lat)) + lats = np.sort(np.unique(dataframe.Lat)) lats_interval = lats[1:] - lats[:-1] print 'Lat min: {0}; Lat max: {1}; Lat inc: {2}; Lat num: {3}'.format( - df.Lat.min(), df.Lat.max(), lats_interval.min(), len(lats)) + dataframe.Lat.min(), dataframe.Lat.max(), lats_interval.min(), len(lats)) - lats = np.arange(-90 + lats_interval.min()/2, 90, lats_interval.min(), dtype=np.float64) - lons = np.arange(-180 + lons_interval.min()/2, 180, lons_interval.min(), dtype=np.float64) + lats = np.arange(-90 + lats_interval.min() / 2, 90, lats_interval.min(), dtype=np.float64) + lons = np.arange(-180 + lons_interval.min() / 2, 180, lons_interval.min(), dtype=np.float64) return lats, lons, lats_interval.min(), lons_interval.min() def create_pollutant_empty_list(in_path, len_c_lats, len_c_lons): - # TODO Documentation + """ + Crate an empty pollutant list. + + :param in_path: Path to the file that conains the information. + :type in_path: str + + :param len_c_lats: Number of elements on the latitude array + :type len_c_lats: int + + :param len_c_lons: Number of elements on the longitude array + :type len_c_lons: int + + :return: Pollutant list + :rtype: list + """ import numpy as np pollutant_list = [] @@ -98,7 +119,15 @@ def create_pollutant_empty_list(in_path, len_c_lats, len_c_lons): def do_transformation(year): - # TODO Docuemtnation + """ + Make al the process to transform the emissions of the current year. + + :param year: year to process. + :type year: int + + :return: True when everything finish well. + :rtype: Bool + """ from hermesv3_gr.tools.netcdf_tools import write_netcdf, get_grid_area from hermesv3_gr.tools.coordinates_tools import create_bounds from datetime import datetime @@ -107,7 +136,7 @@ def do_transformation(year): in_file = os.path.join(INPUT_PATH, INPUT_NAME.replace('', str(year))) - unit_factor = 1000./(365.*24.*3600.) # To pass from Mg/year to Kg/s + unit_factor = 1000. / (365. * 24. * 3600.) # To pass from Mg/year to Kg/s # unit_factor = 1000000 # To pass from Mg/m2.year to Mg/Km2.year c_lats, c_lons, lat_interval, lon_interval = calculate_grid_definition(in_file) @@ -115,20 +144,20 @@ def do_transformation(year): b_lats = create_bounds(c_lats, number_vertices=2) b_lons = create_bounds(c_lons, number_vertices=2) - df = pd.read_table(in_file, sep=';') + dataframe = pd.read_table(in_file, sep=';') - df_np = df[df.SourceType != 'P'] - df_p = df[df.SourceType == 'P'] + df_np = dataframe[dataframe.SourceType != 'P'] + df_p = dataframe[dataframe.SourceType == 'P'] df_np.loc[:, 'row_lat'] = np.array((df_np.Lat - (-90 + lat_interval / 2)) / lat_interval, dtype=np.int32) df_np.loc[:, 'col_lon'] = np.array((df_np.Lon - (-180 + lon_interval / 2)) / lon_interval, dtype=np.int32) - df_p.loc[:, 'row_lat'] = abs(np.array([c_lats]*len(df_p.Lat)) - df_p.Lat.values[:, None]).argmin(axis=1) - df_p.loc[:, 'col_lon'] = abs(np.array([c_lons]*len(df_p.Lon)) - df_p.Lon.values[:, None]).argmin(axis=1) + df_p.loc[:, 'row_lat'] = abs(np.array([c_lats] * len(df_p.Lat)) - df_p.Lat.values[:, None]).argmin(axis=1) + df_p.loc[:, 'col_lon'] = abs(np.array([c_lons] * len(df_p.Lon)) - df_p.Lon.values[:, None]).argmin(axis=1) - df = pd.concat([df_np, df_p]) + dataframe = pd.concat([df_np, df_p]) - for name, group in df.groupby('SNAP'): + for name, group in dataframe.groupby('SNAP'): print 'snap', name pollutant_list = create_pollutant_empty_list(in_file, len(c_lats), len(c_lons)) @@ -153,7 +182,7 @@ def do_transformation(year): boundary_latitudes=b_lats, boundary_longitudes=b_lons) cell_area = get_grid_area(aux_output_path) - pollutant_list[i]['data'] = pollutant_list[i]['data']*unit_factor/cell_area + pollutant_list[i]['data'] = pollutant_list[i]['data'] * unit_factor/cell_area write_netcdf(aux_output_path, c_lats, c_lons, [pollutant_list[i]], date=datetime(year, month=1, day=1), boundary_latitudes=b_lats, boundary_longitudes=b_lons, cell_area=cell_area, @@ -164,14 +193,27 @@ def do_transformation(year): 'for air quality modelling Atmospheric Chemistry and Physics 14 ' + '10963-10976 2014', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + - '(Barcelona Supercomputing Center)' - } - ) + '(Barcelona Supercomputing Center)'}) return True -def extract_vars(netcdf_path, variables_list, attributes_list=list()): - # TODO Documentation +def extract_vars(netcdf_path, variables_list, attributes_list=()): + """ + Get the data from the list of variabbles. + + :param netcdf_path: Path to the netCDF file + :type netcdf_path: str + + :param variables_list: List of the names of the variables to get. + :type variables_list: list + + :param attributes_list: List of the names of the variable attributes to get. + :type attributes_list: list + + :return: List of the variables from the netCDF as a dictionary with data as values and with the other keys their + attributes. + :rtype: list. + """ from netCDF4 import Dataset data_list = [] # print netcdf_path @@ -196,7 +238,18 @@ def extract_vars(netcdf_path, variables_list, attributes_list=list()): def get_voc_ratio(ratio_path, snap): - # TODO Documentation + """ + Get the ratio of the VOC for the current SNAP. + + :param ratio_path: Path to the file with the ratios. + :type ratio_path: str + + :param snap: SNAP to get the ratio. + :type snap: str + + :return: VOC Ratio + :rtype: dict + """ if snap == 'snap34': snap = 'snap3' try: @@ -207,21 +260,39 @@ def get_voc_ratio(ratio_path, snap): def get_voc_list(): - # TODO Documentation + """ + Get the VOC list. + + :return: VOC list + :rtype: list + """ return ['voc{0}'.format(str(x).zfill(2)) for x in [1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]] def get_sector_list(): - # TODO Documentation + """ + Get the sector list. + + :return: Sector list + :rtype: list + """ return ['snap{0}'.format(x) for x in [1, 2, 34, 5, 6, 71, 72, 73, 74, 8, 9]] def do_voc_transformation(year): - # TODO Docuemtnation + """ + Make al the process to transform the VOC emissions of the current year. + + :param year: year to process. + :type year: int + + :return: True when everything finish well. + :rtype: Bool + """ + from warnings import warn as warning from hermesv3_gr.tools.netcdf_tools import write_netcdf, extract_vars from hermesv3_gr.tools.coordinates_tools import create_bounds - from warnings import warn as warning for snap in get_sector_list(): in_path = os.path.join(OUTPUT_PATH, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) @@ -255,8 +326,7 @@ def do_voc_transformation(year): 'for air quality modelling Atmospheric Chemistry and Physics 14 ' + '10963-10976 2014', 'comment': 'Re-writing done by Carles Tena (carles.tena@bsc.es) from the BSC-CNS ' + - '(Barcelona Supercomputing Center)' - }) + '(Barcelona Supercomputing Center)'}) else: warning("The pollutant {0} for the sector {1} does not exist.\n SNAP not found: {2}".format( voc, snap, ratio_path)) @@ -265,7 +335,15 @@ def do_voc_transformation(year): def check_vocs(year): - # TODO Documentation + """ + Check that the VOCs are calculated correctly. + + :param year: Year to evaluate + :type year: int + + :return: True when finish. + :rtype: bool + """ for snap in get_sector_list(): nmvoc_path = os.path.join(OUTPUT_PATH, 'nmvoc_{0}'.format(snap), 'nmvoc_{0}.nc'.format(year)) [new_voc] = extract_vars(nmvoc_path, ['nmvoc']) @@ -279,7 +357,7 @@ def check_vocs(year): voc_sum += new_voc['data'].sum() print '{0} NMVOC sum: {1}; VOCs sum: {2}; %diff: {3}'.format( - snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum)/nmvoc_sum) + snap, nmvoc_sum, voc_sum, 100*(nmvoc_sum - voc_sum) / nmvoc_sum) return True diff --git a/preproc/tno_mac_iii_preproc_voc_ratios.py b/preproc/tno_mac_iii_preproc_voc_ratios.py index 08d4c5d..9aea926 100755 --- a/preproc/tno_mac_iii_preproc_voc_ratios.py +++ b/preproc/tno_mac_iii_preproc_voc_ratios.py @@ -30,7 +30,7 @@ CSV_PATH = '/esarchive/recon/tno/tno_macc_iii/original_files/TNO_MACC_NMVOC prof # ============================================================== -def extract_vars(netcdf_path, variables_list, attributes_list=[]): +def extract_vars(netcdf_path, variables_list, attributes_list=()): # TODO Docuemtnation """ @@ -334,7 +334,7 @@ def get_grid_area(filename): :type filename: str :return: Returns the area of each cell. - :rtype: numpy.ndarray + :rtype: numpy.array """ from cdo import Cdo from netCDF4 import Dataset @@ -353,14 +353,14 @@ def create_bounds(coords, number_vertices=2): Calculate the vertices coordinates. :param coords: Coordinates in degrees (latitude or longitude) - :type coords: numpy.ndarray + :type coords: numpy.array :param number_vertices: Non mandatory parameter that informs the number of vertices that must have the boundaries. (by default 2) :type number_vertices: int :return: Array with as many elements as vertices for each value of coords. - :rtype: numpy.ndarray + :rtype: numpy.array """ import numpy as np -- GitLab From 3a5cdddc87d80c1bd3e4494f2bdbae6e8c6275d9 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Tue, 11 Sep 2018 14:25:36 +0200 Subject: [PATCH 25/51] Correcting Code conventions --- hermesv3_gr/modules/grids/grid.py | 1 + hermesv3_gr/modules/masking/masking.py | 71 +++++++++++++++---- hermesv3_gr/modules/writing/writer.py | 1 + hermesv3_gr/modules/writing/writer_cmaq.py | 1 + hermesv3_gr/modules/writing/writer_monarch.py | 1 + .../modules/writing/writer_wrf_chem.py | 1 + preproc/eclipsev5a_preproc.py | 5 +- 7 files changed, 63 insertions(+), 18 deletions(-) diff --git a/hermesv3_gr/modules/grids/grid.py b/hermesv3_gr/modules/grids/grid.py index e9227c6..0c424d1 100644 --- a/hermesv3_gr/modules/grids/grid.py +++ b/hermesv3_gr/modules/grids/grid.py @@ -39,6 +39,7 @@ class Grid(object): :param temporal_path: Path to the temporal folder. :type temporal_path: str """ + def __init__(self, grid_type, vertical_description_path, temporal_path): st_time = timeit.default_timer() # settings.write_log('Creating Grid...', level=1) diff --git a/hermesv3_gr/modules/masking/masking.py b/hermesv3_gr/modules/masking/masking.py index a6a98b6..36b1c93 100644 --- a/hermesv3_gr/modules/masking/masking.py +++ b/hermesv3_gr/modules/masking/masking.py @@ -20,11 +20,30 @@ import os import timeit -import hermesv3_gr.config.settings as settings from warnings import warn as warning +import hermesv3_gr.config.settings as settings class Masking(object): + """ + Masking object to apply simple mask or factor mask. + + :param world_info: Path to the file that contains the ISO Codes and other relevant information. + :type world_info: str + + :param factors_mask_values: List of the factor mask values. + :type factors_mask_values: list + + :param regrid_mask_values: List of the mask values. + :type regrid_mask_values: list + + :param grid: Grid. + :type grid: Grid + + :param world_mask_file: + :type world_mask_file: str + """ + def __init__(self, world_info, factors_mask_values, regrid_mask_values, grid, world_mask_file=None): from timezonefinder import TimezoneFinder @@ -39,37 +58,53 @@ class Masking(object): self.regrid_mask_values = self.parse_masking_values(regrid_mask_values) self.regrid_mask = None self.scale_mask = None - self.tf = TimezoneFinder() + self.timezonefinder = TimezoneFinder() self.grid = grid settings.write_time('Masking', 'Init', timeit.default_timer() - st_time, level=3) def get_country_codes(self): + """ + Get the country code information. + + :return: Dictionary of country codes. + :rtype: dict + """ import pandas as pd st_time = timeit.default_timer() - # settings.write_log('\t\t\tGetting country codes.', level=3) - # df = pd.read_csv(self.world_info, sep=';', index_col=False, names=["country", "country_code"]) - df = pd.read_csv(self.world_info, sep=';') - del df['time_zone'], df['time_zone_code'] - df = df.drop_duplicates().dropna() - df = df.set_index('country_code_alpha') - countries_dict = df.to_dict() + dataframe = pd.read_csv(self.world_info, sep=';') + del dataframe['time_zone'], dataframe['time_zone_code'] + dataframe = dataframe.drop_duplicates().dropna() + dataframe = dataframe.set_index('country_code_alpha') + countries_dict = dataframe.to_dict() countries_dict = countries_dict['country_code'] settings.write_time('Masking', 'get_country_codes', timeit.default_timer() - st_time, level=3) return countries_dict @staticmethod - def partlst(lst, n): + def partlst(lst, num): + """ + Split a Array in N balanced arrays. + + :param lst: Array to split + :type lst: numpy.array + + :param num: Number of mini arrays. + :type num: int + + :return: Array + :type: numpy.array + """ import itertools - """Partition @lst in @n balanced parts, in given order""" - parts, rest = divmod(len(lst), n) + # Partition @lst in @n balanced parts, in given order + parts, rest = divmod(len(lst), num) lstiter = iter(lst) - for j in xrange(n): - plen = len(lst) / n + (1 if rest > 0 else 0) + for j in xrange(num): + plen = len(lst) / num + (1 if rest > 0 else 0) rest -= 1 yield list(itertools.islice(lstiter, plen)) @@ -128,7 +163,7 @@ class Masking(object): elif longitude > +180: longitude -= 360 - tz = self.tf.timezone_at(lng=longitude, lat=latitude) + tz = self.timezonefinder.timezone_at(lng=longitude, lat=latitude) settings.write_time('Masking', 'find_timezone', timeit.default_timer() - st_time, level=3) @@ -151,6 +186,12 @@ class Masking(object): return code[0] def parse_factor_values(self, values): + """ + + :param values: + :return: + :rtype: dict + """ import re st_time = timeit.default_timer() diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index 411c164..b72284c 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -52,6 +52,7 @@ class Writer(object): :param parallel: Indicates if you want to write in parallel mode. :type parallel. bool """ + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): self.path = path diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 200878b..4d94843 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -55,6 +55,7 @@ class WriterCmaq(Writer): :param parallel: Indicates if you want to write in parallel mode. :type parallel. bool """ + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterCmaq, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index 484108d..86b1fc9 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -55,6 +55,7 @@ class WriterMonarch(Writer): :param parallel: Indicates if you want to write in parallel mode. :type parallel. bool """ + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterMonarch, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index 0d56e99..51027f8 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -56,6 +56,7 @@ class WriterWrfChem(Writer): :param parallel: Indicates if you want to write in parallel mode. :type parallel. bool """ + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterWrfChem, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index 5a8049d..df523d2 100755 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -53,11 +53,10 @@ def get_grid_area(filename): :rtype: numpy.array """ from cdo import Cdo - from netCDF4 import Dataset cdo = Cdo() - s = cdo.gridarea(input=filename) - nc_aux = Dataset(s, mode='r') + src = cdo.gridarea(input=filename) + nc_aux = Dataset(src, mode='r') grid_area = nc_aux.variables['cell_area'][:] nc_aux.close() -- GitLab From 9e908b6814cc2786105ea5f260ff3b1ea55ef31d Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 13 Sep 2018 17:19:22 +0200 Subject: [PATCH 26/51] deleting tests --- tests/unit/test_temporal.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_temporal.py b/tests/unit/test_temporal.py index 729c07a..6174229 100644 --- a/tests/unit/test_temporal.py +++ b/tests/unit/test_temporal.py @@ -289,11 +289,11 @@ class TestTemporalDistribution(unittest.TestCase): # 22: 1., # 23: 1.} # - self.assertEqual( - temporal.calculate_2d_temporal_factors( - datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), timezones).tolist(), - [[20., 1.], [1., 1.]]) - + # self.assertEqual( + # temporal.calculate_2d_temporal_factors( + # datetime(year=2017, month=6, day=23, hour=11, minute=0, second=0), timezones).tolist(), + # [[20., 1.], [1., 1.]]) + # # def testing_do_temporal(self): # import numpy as np # from hermesv3_gr.modules.grids.grid import Grid -- GitLab From d3845e3296b88590ce4eb14b933bcdd13ff4a864 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 14 Sep 2018 11:04:31 +0200 Subject: [PATCH 27/51] trying to solve MPI problem --- conf/EI_configuration.csv | 10 +++++----- conf/hermes.conf | 11 +++-------- hermesv3_gr/hermes.py | 10 ++++++++++ hermesv3_gr/modules/regrid/regrid_conservative.py | 2 +- hermesv3_gr/modules/writing/writer_monarch.py | 2 +- hermesv3_gr/tools/netcdf_tools.py | 4 ++-- 6 files changed, 22 insertions(+), 17 deletions(-) diff --git a/conf/EI_configuration.csv b/conf/EI_configuration.csv index d366331..15e01cb 100644 --- a/conf/EI_configuration.csv +++ b/conf/EI_configuration.csv @@ -1,17 +1,17 @@ ei;sector;ref_year;active;factor_mask;regrid_mask;pollutants;path;frequency;source_type;p_vertical;p_month;p_day;p_hour;p_speciation;comment HTAPv2;energy;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V001;;D002;H002;E998;added 05/2017 HTAPv2;industry;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V002;;D003;H004;E998;added 05/2017 -HTAPv2;residential;2010;1;;- FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 -HTAPv2;residential;2010;1;FRA 50;+ FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 +HTAPv2;residential;2010;0;;- FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 +HTAPv2;residential;2010;0;FRA 50;+ FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 HTAPv2;transport;2010;1;;;so2;/jrc/htapv2/monthly_mean;monthly;area;;;D001;weekday=H001, saturday=H002, sunday=H003;E998;added 05/2017 -HTAPv2;air_lto;2010;1;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V003;M001;D001;H001;E998;added 05/2017 +HTAPv2;air_lto;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V003;M001;D001;H001;E998;added 05/2017 HTAPv2;air_cds;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V004;M001;D001;H001;E998;added 05/2017 HTAPv2;air_crs;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V005;M001;D001;H001;E998;added 05/2017 HTAPv2;ships;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;;M001;D001;H001;E008;added 05/2017 wiedinmyer;;2014;0;;;so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H001;E998;added 05/2017 ECLIPSEv5a;flaring;2010;0;;;so2;/iiasa/eclipsev5a/yearly_mean;area;yearly;V006;M001;D001;H001;E998;added 11/2017 -GFASv12;;2015;1;;;so2,nox_no;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E997;added 05/2017 +GFASv12;;2015;0;;;so2,nox_no;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E997;added 05/2017 ECLIPSEv5a;transport;2010;0;;+ CHN,IND;so2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H001;E998;added 11/2017 ECLIPSEv5a;transport;2010;0;;;nox_no2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E999;added 11/2017 -CARN;;2015;1;;;so2;/mtu/carnetal/yearly_mean;yearly;point;;M001;D001;H001;E998;added ... +CARN;;2015;0;;;so2;/mtu/carnetal/yearly_mean;yearly;point;;M001;D001;H001;E998;added ... Maestra;;2015;0;;;nox_no2;/home/Earth/ctena/Models/HERMESv3/;yearly;point;;M001;D001;H001;E999;added ... diff --git a/conf/hermes.conf b/conf/hermes.conf index 82ed49f..c893141 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -1,11 +1,11 @@ [GENERAL] log_level = 3 # input_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/IN -input_dir = /home/Earth/ctena/Models/HERMESv3/IN +input_dir = /home/Earth/ctena/Models/hermesv3_gr # data_path = /gpfs/scratch/bsc32/bsc32538/HERMES_data data_path = /esarchive/recon #output_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/OUT -output_dir = /home/carles/HERMES_out +output_dir = /home/Earth/ctena/HERMES_out output_name = HERMESv3_.nc start_date = 2014/09/02 00:00:00 # ***** end_date = start_date [DEFAULT] ***** @@ -89,12 +89,7 @@ auxiliar_files_path = /data/auxiliar_files/_ [EMISSION_INVENTORY_CONFIGURATION] -# cross_table = /conf/EI_configuration_Scalability.csv -# cross_table = /conf/EI_configuration_gridded.csv -# cross_table = /conf/EI_configuration_test.csv -# cross_table = /conf/EI_configuration_publi.csv -# cross_table = /conf/EI_configuration_EU_aerosol_gas.csv -cross_table = /conf/EI_configuration_WRF_CHEM_Rene.csv +cross_table = /conf/EI_configuration.csv [EMISSION_INVENTORY_PROFILES] diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index f094111..386463e 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -17,6 +17,12 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . +import os +import sys +parentPath = os.path.abspath(os.path.join('..')) +if parentPath not in sys.path: + sys.path.insert(0, parentPath) + import timeit from hermesv3_gr.config import settings @@ -54,6 +60,10 @@ class Hermes(object): settings.write_log('Starting HERMESv3 initialization:') + print settings.size + print settings.rank + sys.exit(1) + if self.options.output_model in ['CMAQ', 'WRF_CHEM'] and self.options.domain_type == 'global': settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: diff --git a/hermesv3_gr/modules/regrid/regrid_conservative.py b/hermesv3_gr/modules/regrid/regrid_conservative.py index f22aece..f2de4d1 100644 --- a/hermesv3_gr/modules/regrid/regrid_conservative.py +++ b/hermesv3_gr/modules/regrid/regrid_conservative.py @@ -46,7 +46,7 @@ class ConservativeRegrid(Regrid): src_grid = self.grid.create_esmf_grid_from_file(self.pollutant_dicts[0]['path']) src_field = ESMF.Field(src_grid, name='my input field') src_field.read(filename=self.pollutant_dicts[0]['path'], variable=self.pollutant_dicts[0]['name'], - timeslice=[0]) + timeslice=0) dst_grid = self.grid.esmf_grid dst_field = ESMF.Field(dst_grid, name='my outut field') diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index 86b1fc9..a7316b9 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -55,7 +55,7 @@ class WriterMonarch(Writer): :param parallel: Indicates if you want to write in parallel mode. :type parallel. bool """ - + def __init__(self, path, grid, levels, date, hours, global_attributes_path, compress=True, parallel=False): super(WriterMonarch, self).__init__(path, grid, levels, date, hours, global_attributes_path, compress, parallel) diff --git a/hermesv3_gr/tools/netcdf_tools.py b/hermesv3_gr/tools/netcdf_tools.py index 39a69f4..f0d72de 100644 --- a/hermesv3_gr/tools/netcdf_tools.py +++ b/hermesv3_gr/tools/netcdf_tools.py @@ -45,10 +45,10 @@ def open_netcdf(netcdf_path): def close_netcdf(netcdf): """ Close the netCDF. - + :param netcdf: netCDF :type netcdf: Dataset - :return: + :return: """ netcdf.close() -- GitLab From dc6b1aa685d5de9cde6f815984a952b7b4a2300d Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 14 Sep 2018 11:45:57 +0200 Subject: [PATCH 28/51] trying to solve MPI problem --- environment.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/environment.yml b/environment.yml index 2916618..79dfb4c 100644 --- a/environment.yml +++ b/environment.yml @@ -17,6 +17,8 @@ dependencies: - esmpy >= 7.1.0r - pytz - timezonefinder +# - openmpi +# - mpich - mpi4py # Testing - pytest -- GitLab From f1c657d90dcb9cc6f1fc9088c61ae0b74a011a08 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 18 Sep 2018 16:00:53 +0200 Subject: [PATCH 29/51] Corrected Parallel write. Mandatory to not use UNLIMITED dimensions --- environment.yml | 7 +- hermesv3_gr/config/settings.py | 7 +- hermesv3_gr/hermes.py | 10 -- hermesv3_gr/modules/temporal/temporal.py | 4 +- hermesv3_gr/modules/writing/writer.py | 51 ++++++- hermesv3_gr/modules/writing/writer_cmaq.py | 126 +++++++----------- hermesv3_gr/modules/writing/writer_monarch.py | 103 +++++--------- .../modules/writing/writer_wrf_chem.py | 87 ++++-------- run_test.py | 1 + 9 files changed, 171 insertions(+), 225 deletions(-) diff --git a/environment.yml b/environment.yml index 79dfb4c..6b0fe67 100644 --- a/environment.yml +++ b/environment.yml @@ -3,11 +3,16 @@ name: hermesv3_gr channels: +# - anaconda - conda-forge + - spectraldns dependencies: - python = 2 - numpy + - hdf5 = 1.8.17 + - hdf5-parallel = 1.8.17 + - libnetcdf-parallel - netcdf4 >= 1.3.1 - python-cdo >= 1.3.3 - geopandas @@ -17,8 +22,6 @@ dependencies: - esmpy >= 7.1.0r - pytz - timezonefinder -# - openmpi -# - mpich - mpi4py # Testing - pytest diff --git a/hermesv3_gr/config/settings.py b/hermesv3_gr/config/settings.py index 52c5c39..1b93cfa 100644 --- a/hermesv3_gr/config/settings.py +++ b/hermesv3_gr/config/settings.py @@ -30,7 +30,7 @@ global writing_serial writing_serial = False global compressed_netcdf -compressed_netcdf = False +compressed_netcdf = True if not writing_serial: compressed_netcdf = False @@ -67,8 +67,9 @@ def define_log_file(log_path, date): # TODO Documentation log_path = os.path.join(log_path, 'logs') if not os.path.exists(log_path): - os.makedirs(log_path) - + if rank == 0: + os.makedirs(log_path) + comm.Barrier() log_path = os.path.join(log_path, 'HERMESv3_{0}_Rank{1}_Procs{2}.log'.format( date.strftime('%Y%m%d%H'), str(rank).zfill(4), str(size).zfill(4))) if os.path.exists(log_path): diff --git a/hermesv3_gr/hermes.py b/hermesv3_gr/hermes.py index 386463e..f094111 100755 --- a/hermesv3_gr/hermes.py +++ b/hermesv3_gr/hermes.py @@ -17,12 +17,6 @@ # You should have received a copy of the GNU General Public License # along with HERMESv3_GR. If not, see . -import os -import sys -parentPath = os.path.abspath(os.path.join('..')) -if parentPath not in sys.path: - sys.path.insert(0, parentPath) - import timeit from hermesv3_gr.config import settings @@ -60,10 +54,6 @@ class Hermes(object): settings.write_log('Starting HERMESv3 initialization:') - print settings.size - print settings.rank - sys.exit(1) - if self.options.output_model in ['CMAQ', 'WRF_CHEM'] and self.options.domain_type == 'global': settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: diff --git a/hermesv3_gr/modules/temporal/temporal.py b/hermesv3_gr/modules/temporal/temporal.py index 55ed096..82e1e36 100644 --- a/hermesv3_gr/modules/temporal/temporal.py +++ b/hermesv3_gr/modules/temporal/temporal.py @@ -412,7 +412,7 @@ class TemporalDistribution(object): try: timezone = self.get_tz_from_id(id_aux) tz_list[timezones == id_aux] = timezone - except: + except IndexError: pass settings.write_time('TemporalDistribution', 'calculate_timezones', timeit.default_timer() - st_time, level=3) @@ -440,7 +440,7 @@ class TemporalDistribution(object): try: df['local'] = df.groupby('tz')['utc'].apply( lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert(x.name).dt.tz_localize(None)) - except: + except pytz.exceptions.UnknownTimeZoneError: df['local'] = df.groupby('tz')['utc'].apply( lambda x: pd.to_datetime(x).dt.tz_localize(pytz.utc).dt.tz_convert( self.parse_tz(x.name)).dt.tz_localize(None)) diff --git a/hermesv3_gr/modules/writing/writer.py b/hermesv3_gr/modules/writing/writer.py index b72284c..06e6f34 100644 --- a/hermesv3_gr/modules/writing/writer.py +++ b/hermesv3_gr/modules/writing/writer.py @@ -21,6 +21,8 @@ import sys import timeit import numpy as np +from mpi4py import MPI +from netCDF4 import Dataset from hermesv3_gr.config import settings @@ -84,6 +86,7 @@ class Writer(object): settings.write_log("Writing netCDF output file {0} .".format(self.path)) self.set_variable_attributes(inventory_list) + self.change_variable_attributes() if self.parallel: if settings.rank == 0: self.create_parallel_netcdf() @@ -95,6 +98,9 @@ class Writer(object): settings.write_time('Writer', 'write', timeit.default_timer() - st_time) return True + def change_variable_attributes(self): + pass + def create_parallel_netcdf(self): """ Implemented on inner class. @@ -103,9 +109,50 @@ class Writer(object): def write_parallel_netcdf(self, emission_list): """ - Implemented on inner class. + Append the data to the netCDF4 file already created in parallel mode. + + :param emission_list: Data to append. + :type emission_list: list + + :return: True at end. + :rtype: bool """ - return None + + st_time = timeit.default_timer() + + settings.write_log("\tAppending data to parallel NetCDF file.", level=2) + if settings.size > 1: + netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) + else: + netcdf = Dataset(self.path, mode='a', format="NETCDF4") + settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) + index = 0 + # print "Rank {0} 2".format(rank) + for var_name in self.variables_attributes.iterkeys(): + + data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) + st_time = timeit.default_timer() + index += 1 + + var = netcdf.variables[var_name] + if settings.size > 1: + var.set_collective(True) + # Correcting NAN + if data is None: + data = 0 + var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = data + + settings.write_log("\t\t\t'{0}' variable filled".format(var_name)) + + if 'cell_area' in netcdf.variables: + c_area = netcdf.variables['cell_area'] + c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, + self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area + + netcdf.close() + settings.write_time('Writer', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) + return True def write_serial_netcdf(self, emission_list): """ diff --git a/hermesv3_gr/modules/writing/writer_cmaq.py b/hermesv3_gr/modules/writing/writer_cmaq.py index 4d94843..7b3480c 100644 --- a/hermesv3_gr/modules/writing/writer_cmaq.py +++ b/hermesv3_gr/modules/writing/writer_cmaq.py @@ -76,9 +76,9 @@ class WriterCmaq(Writer): if data is not None: units = None - for var in self.variables_attributes: - if var['name'] == variable: - units = var['units'] + for var_name in self.variables_attributes: + if var_name == variable: + units = self.variables_attributes[var_name]['units'] break if Unit(units).symbol == Unit('mol.s-1').symbol: @@ -93,30 +93,37 @@ class WriterCmaq(Writer): sys.exit(1) return data - @staticmethod - def change_variable_attributes(emission_list): + def change_variable_attributes(self): """ Modify the emission list to be consistent to use the output as input for CMAQ model. - :param emission_list: List of emissions - :type emission_list: list - :return: Emission list ready for CMAQ - :rtype: list + :rtype: dict """ from cf_units import Unit - var_list = [] - for i in xrange(len(emission_list)): - emission_list[i]['var_desc'] = "{:<80}".format(emission_list[i]['long_name']) - emission_list[i]['long_name'] = "{:<16}".format(emission_list[i]['name']) + new_variable_dict = {} + for variable in self.variables_attributes: + if Unit(variable['units']).symbol == Unit('mol.s-1').symbol: + new_variable_dict[variable['name']] = { + 'units': "{:<16}".format('mole/s'), + 'var_desc': "{:<80}".format(variable['long_name']), + 'long_name': "{:<16}".format(variable['name']), + } + elif Unit(variable['units']).symbol == Unit('g.s-1').symbol: + new_variable_dict[variable['name']] = { + 'units': "{:<16}".format('g/s'), + 'var_desc': "{:<80}".format(variable['long_name']), + 'long_name': "{:<16}".format(variable['name']), + } + else: + settings.write_log('ERROR: Check the .err file to get more info.') + if settings.rank == 0: + raise TypeError("The unit '{0}' of specie {1} is not ".format(variable['units'], variable['name']) + + "defined correctly. Should be 'mol.s-1' or 'g.s-1'") + sys.exit(1) - if Unit(emission_list[i]['units']).symbol == Unit('mol.s-1').symbol: - emission_list[i]['units'] = "{:<16}".format('mole/s') - elif Unit(emission_list[i]['units']).symbol == Unit('g.s-1').symbol: - emission_list[i]['units'] = "{:<16}".format('g/s') - var_list.append(emission_list[i]['name']) - return emission_list, var_list + self.variables_attributes = new_variable_dict @staticmethod def create_tflag(st_date, hours_array, num_vars): @@ -395,8 +402,8 @@ class WriterCmaq(Writer): # ===== Dimensions ===== settings.write_log("\t\tCreating NetCDF dimensions.", level=2) - # netcdf.createDimension('TSTEP', len(self.hours)) - netcdf.createDimension('TSTEP', None) + netcdf.createDimension('TSTEP', len(self.hours)) + # netcdf.createDimension('TSTEP', None) settings.write_log("\t\t\t'TSTEP' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), level=3) netcdf.createDimension('DATE-TIME', 2) @@ -423,19 +430,17 @@ class WriterCmaq(Writer): settings.write_log("\t\t\t'TFLAG' variable created with size: {0}".format(tflag[:].shape), level=3) index = 0 - data_list, var_list = self.change_variable_attributes(self.variables_attributes) - for variable in self.variables_attributes: + # data_list, var_list = self.change_variable_attributes(self.variables_attributes) + for var_name in self.variables_attributes.iterkeys(): index += 1 - var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) - var.units = variable['units'] - var.long_name = str(variable['long_name']) - var.var_desc = str(variable['var_desc']) - settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape) + - "\n\t\t\t\t'{0}' variable will be filled later.".format(variable['name']), level=3) + var = netcdf.createVariable(var_name, 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) + var.setncatts(self.variables_attributes[var_name]) + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape) + + "\n\t\t\t\t'{0}' variable will be filled later.".format(var_name), level=3) # ===== Global attributes ===== settings.write_log("\t\tCreating NetCDF metadata.", level=2) - global_attributes = self.create_global_attributes(var_list) + global_attributes = self.create_global_attributes(self.variables_attributes.keys()) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, global_attributes[attribute]) @@ -443,42 +448,6 @@ class WriterCmaq(Writer): settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) - def write_parallel_netcdf(self, emission_list): - """ - Write the netCDF in parallel mode. - - :param emission_list: List of the processed emissions for the different emission inventories - :type emission_list: list - - :return: True when it finish well. - :rtype: bool - """ - st_time = timeit.default_timer() - settings.write_log("\tAppending data to parallel NetCDF file.", level=2) - - if settings.size > 1: - netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - else: - netcdf = Dataset(self.path, mode='a', format="NETCDF4") - settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) - index = 0 - for variable in self.variables_attributes: - data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) - - index += 1 - - var = netcdf.variables[variable['name']] - if settings.size > 1: - var.set_collective(True) - # Correcting NAN - if data is None: - data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, - self.grid.y_lower_bound:self.grid.y_upper_bound] = data - settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) - - netcdf.close() - settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) return True def write_serial_netcdf(self, emission_list): @@ -534,11 +503,11 @@ class WriterCmaq(Writer): full_shape = None index = 0 - data_list, var_list = self.change_variable_attributes(self.variables_attributes) - for variable in data_list: + # data_list, var_list = self.change_variable_attributes(self.variables_attributes) + for var_name in self.variables_attributes.iterkeys(): if settings.size != 1: - settings.write_log("\t\t\tGathering {0} data.".format(variable['name']), level=3) - rank_data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + settings.write_log("\t\t\tGathering {0} data.".format(var_name), level=3) + rank_data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) if mpi_numpy or mpi_vector: if rank_data is not None: root_shape = settings.comm.bcast(rank_data.shape, root=0) @@ -592,15 +561,16 @@ class WriterCmaq(Writer): if settings.size != 1: try: data = np.concatenate(data, axis=3) - except: + except (UnboundLocalError, TypeError, IndexError): data = 0 st_time = timeit.default_timer() index += 1 - var = netcdf.createVariable(variable['name'], 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) - var.units = variable['units'] - var.long_name = str(variable['long_name']) - var.var_desc = str(variable['var_desc']) + var = netcdf.createVariable(var_name, 'f', ('TSTEP', 'LAY', 'ROW', 'COL',), zlib=self.compress) + var.setncatts(self.variables_attributes[var_name]) + # var.units = variable['units'] + # var.long_name = str(variable['long_name']) + # var.var_desc = str(variable['var_desc']) # var[:] = variable['data'] if mpi_numpy: @@ -614,7 +584,7 @@ class WriterCmaq(Writer): else: var[:, :, :, full_position[i][2]:full_position[i][3]] = \ recvbuf[i, :, :, :, : full_shape[i][-1]] - except: + except ValueError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: raise TypeError("ERROR on i {0} ".format(i) + @@ -640,12 +610,12 @@ class WriterCmaq(Writer): var[:] = data else: var[:] = data - settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape), + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape), level=3) settings.write_log("\t\tCreating NetCDF metadata.", level=2) if settings.rank == 0: # ===== Global attributes ===== - global_attributes = self.create_global_attributes(var_list) + global_attributes = self.create_global_attributes(self.variables_attributes.keys()) for attribute in self.global_attributes_order: netcdf.setncattr(attribute, global_attributes[attribute]) diff --git a/hermesv3_gr/modules/writing/writer_monarch.py b/hermesv3_gr/modules/writing/writer_monarch.py index a7316b9..3321b06 100644 --- a/hermesv3_gr/modules/writing/writer_monarch.py +++ b/hermesv3_gr/modules/writing/writer_monarch.py @@ -81,9 +81,9 @@ class WriterMonarch(Writer): if data is not None: units = None - for var in self.variables_attributes: - if var['name'] == variable: - units = var['units'] + for var_name in self.variables_attributes: + if var_name == variable: + units = self.variables_attributes[var_name]['units'] break if Unit(units).symbol == Unit('mol.s-1.m-2').symbol: @@ -99,6 +99,20 @@ class WriterMonarch(Writer): settings.write_time('WriterMonarch', 'unit_change', timeit.default_timer() - st_time, level=3) return data + def change_variable_attributes(self): + """ + Modify the emission list to be consistent to use the output as input for CMAQ model. + + :return: Emission list ready for CMAQ + :rtype: dict + """ + new_variable_dict = {} + for variable in self.variables_attributes: + new_variable_dict[variable['name']] = variable + del new_variable_dict[variable['name']]['name'] + + self.variables_attributes = new_variable_dict + def create_parallel_netcdf(self): """ Create an empty netCDF4. @@ -319,10 +333,10 @@ class WriterMonarch(Writer): var[:] = 0 index = 0 - for variable in self.variables_attributes: + for var_name, variable in self.variables_attributes.iteritems(): index += 1 - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=self.compress) + var = netcdf.createVariable(var_name, 'f', ('time',) + var_dim, zlib=self.compress) var.units = Unit(variable['units']).symbol if 'long_name' in variable: @@ -340,8 +354,8 @@ class WriterMonarch(Writer): var.grid_mapping = 'rotated_pole' elif LambertConformalConic: var.grid_mapping = 'Lambert_conformal' - settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape) + - "\n\t\t\t\t'{0}' variable will be filled later.".format(variable['name']), level=3) + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape) + + "\n\t\t\t\t'{0}' variable will be filled later.".format(var_name), level=3) settings.write_log("\t\tCreating NetCDF metadata.", level=2) # Grid mapping @@ -382,53 +396,6 @@ class WriterMonarch(Writer): settings.write_time('WriterMonarch', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) return True - def write_parallel_netcdf(self, emission_list): - """ - Append the data to the netCDF4 file already created in parallel mode. - - :param emission_list: Data to append. - :type emission_list: list - - :return: True at end. - :rtype: bool - """ - - st_time = timeit.default_timer() - - settings.write_log("\tAppending data to parallel NetCDF file.", level=2) - if settings.size > 1: - netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - else: - netcdf = Dataset(self.path, mode='a', format="NETCDF4") - settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) - index = 0 - # print "Rank {0} 2".format(rank) - for variable in self.variables_attributes: - - data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) - st_time = timeit.default_timer() - index += 1 - - var = netcdf.variables[variable['name']] - if settings.size > 1: - var.set_collective(True) - # Correcting NAN - if data is None: - data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, - self.grid.y_lower_bound:self.grid.y_upper_bound] = data - - settings.write_log("\t\t\t'{0}' variable filled".format(variable['name'])) - - if self.grid.cell_area is not None: - c_area = netcdf.variables['cell_area'] - c_area[self.grid.x_lower_bound:self.grid.x_upper_bound, - self.grid.y_lower_bound:self.grid.y_upper_bound] = self.grid.cell_area - - netcdf.close() - settings.write_time('WriterMonarch', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) - return True - def write_serial_netcdf(self, emission_list,): """ Write the netCDF4 file in serial mode. @@ -665,10 +632,10 @@ class WriterMonarch(Writer): full_shape = None index = 0 - for variable in self.variables_attributes: + for var_name in self.variables_attributes.iterkeys(): if settings.size != 1: - settings.write_log("\t\t\tGathering {0} data.".format(variable['name']), level=3) - rank_data = self.calculate_data_by_var(variable['name'], emission_list, self.grid.shape) + settings.write_log("\t\t\tGathering {0} data.".format(var_name), level=3) + rank_data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) if mpi_numpy or mpi_vector: if rank_data is not None: root_shape = settings.comm.bcast(rank_data.shape, root=0) @@ -723,21 +690,21 @@ class WriterMonarch(Writer): if settings.size != 1: try: data = np.concatenate(data, axis=3) - except: + except (UnboundLocalError, TypeError, IndexError): data = 0 index += 1 - var = netcdf.createVariable(variable['name'], 'f', ('time',) + var_dim, zlib=self.compress) + var = netcdf.createVariable(var_name, 'f', ('time',) + var_dim, zlib=self.compress) - var.units = Unit(variable['units']).symbol + var.units = Unit(self.variables_attributes[var_name]['units']).symbol - if 'long_name' in variable: - var.long_name = str(variable['long_name']) + if 'long_name' in self.variables_attributes[var_name]: + var.long_name = str(self.variables_attributes[var_name]['long_name']) - if 'standard_name' in variable: - var.standard_name = str(variable['standard_name']) + if 'standard_name' in self.variables_attributes[var_name]: + var.standard_name = str(self.variables_attributes[var_name]['standard_name']) - if 'cell_method' in variable: - var.cell_method = str(variable['cell_method']) + if 'cell_method' in self.variables_attributes[var_name]: + var.cell_method = str(self.variables_attributes[var_name]['cell_method']) var.coordinates = "lat lon" @@ -761,7 +728,7 @@ class WriterMonarch(Writer): else: var[:, :, :, full_position[i][2]:full_position[i][3]] = \ recvbuf[i, :, :, :, : full_shape[i][-1]] - except: + except ValueError: settings.write_log('ERROR: Check the .err file to get more info.') if settings.rank == 0: raise TypeError("ERROR on i {0} ".format(i) + @@ -786,7 +753,7 @@ class WriterMonarch(Writer): var[:] = data else: var[:] = data - settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(variable['name'], var[:].shape), + settings.write_log("\t\t\t'{0}' variable created with size: {1}".format(var_name, var[:].shape), level=3) settings.write_log("\t\tCreating NetCDF metadata.", level=2) if settings.rank == 0: diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index 51027f8..fcbdd5c 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -20,13 +20,12 @@ import os import sys -from hermesv3_gr.modules.writing.writer import Writer import timeit -from hermesv3_gr.config import settings import numpy as np from netCDF4 import Dataset from mpi4py import MPI - +from hermesv3_gr.config import settings +from hermesv3_gr.modules.writing.writer import Writer class WriterWrfChem(Writer): """ @@ -141,6 +140,7 @@ class WriterWrfChem(Writer): raise TypeError("The unit '{0}' of specie {1} is not ".format(variable['units'], variable['name']) + "defined correctly. Should be 'mol.h-1.km-2' or 'ug.s-1.m-2'") sys.exit(1) + self.variables_attributes = new_variable_dict def read_global_attributes(self): @@ -320,30 +320,29 @@ class WriterWrfChem(Writer): settings.write_log("\tCreating parallel NetCDF file.", level=2) netcdf = Dataset(self.path, mode='w', format="NETCDF4") - if settings.rank == 0: - # ===== Dimensions ===== - settings.write_log("\t\tCreating NetCDF dimensions.", level=2) - netcdf.createDimension('Time', None) - settings.write_log("\t\t\t'Time' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), - level=3) - netcdf.createDimension('DateStrLen', 19) - settings.write_log("\t\t\t'DateStrLen' dimension: 19", level=3) - netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) - settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) - netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) - settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), - level=3) - netcdf.createDimension('emissions_zdim', len(self.levels)) - settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) - - # ===== Variables ===== - settings.write_log("\t\tCreating NetCDF variables.", level=2) - times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen', )) - times[:] = self.create_times_var() - settings.write_log("\t\t\t'Times' variable created with size: {0}".format(times[:].shape), level=3) + # ===== Dimensions ===== + settings.write_log("\t\tCreating NetCDF dimensions.", level=2) + netcdf.createDimension('Time', len(self.hours)) + # netcdf.createDimension('Time', None) + settings.write_log("\t\t\t'Time' dimension: {0}".format('UNLIMITED ({0})'.format(len(self.hours))), + level=3) + netcdf.createDimension('DateStrLen', 19) + settings.write_log("\t\t\t'DateStrLen' dimension: 19", level=3) + netcdf.createDimension('west_east', self.grid.center_longitudes.shape[1]) + settings.write_log("\t\t\t'west_east' dimension: {0}".format(len(self.hours)), level=3) + netcdf.createDimension('south_north', self.grid.center_latitudes.shape[0]) + settings.write_log("\t\t\t'south_north' dimension: {0}".format(self.grid.center_latitudes.shape[0]), + level=3) + netcdf.createDimension('emissions_zdim', len(self.levels)) + settings.write_log("\t\t\t'emissions_zdim' dimension: {0}".format(len(self.levels)), level=3) + + # ===== Variables ===== + settings.write_log("\t\tCreating NetCDF variables.", level=2) + times = netcdf.createVariable('Times', 'S1', ('Time', 'DateStrLen', )) + times[:] = self.create_times_var() + settings.write_log("\t\t\t'Times' variable created with size: {0}".format(times[:].shape), level=3) index = 0 - self.change_variable_attributes() for var_name in self.variables_attributes.iterkeys(): index += 1 var = netcdf.createVariable(var_name, 'f', ('Time', 'emissions_zdim', 'south_north', 'west_east',), @@ -362,39 +361,7 @@ class WriterWrfChem(Writer): settings.write_time('WriterCmaq', 'create_parallel_netcdf', timeit.default_timer() - st_time, level=3) - def write_parallel_netcdf(self, emission_list): - # TODO Documentation - """ - - :param emission_list: - :return: - """ - st_time = timeit.default_timer() - settings.write_log("\tAppending data to parallel NetCDF file.", level=2) - - if settings.size > 1: - netcdf = Dataset(self.path, mode='a', format="NETCDF4", parallel=True, comm=settings.comm, info=MPI.Info()) - else: - netcdf = Dataset(self.path, mode='a', format="NETCDF4") - settings.write_log("\t\tParallel NetCDF file ready to write.", level=2) - index = 0 - for var_name in self.variables_attributes.iterkeys(): - data = self.calculate_data_by_var(var_name, emission_list, self.grid.shape) - - index += 1 - - var = netcdf.variables[var_name] - if settings.size > 1: - var.set_collective(True) - # Correcting NAN - if data is None: - data = 0 - var[:, :, self.grid.x_lower_bound:self.grid.x_upper_bound, - self.grid.y_lower_bound:self.grid.y_upper_bound] = data - settings.write_log("\t\t\t'{0}' variable filled".format(var_name)) - - netcdf.close() - settings.write_time('WriterCmaq', 'write_parallel_netcdf', timeit.default_timer() - st_time, level=3) + return True def write_serial_netcdf(self, emission_list): # TODO Documentation @@ -441,7 +408,7 @@ class WriterWrfChem(Writer): full_shape = None index = 0 - self.change_variable_attributes() + # self.change_variable_attributes() for var_name in self.variables_attributes.iterkeys(): if settings.size != 1: @@ -476,7 +443,7 @@ class WriterWrfChem(Writer): if settings.size != 1: try: data = np.concatenate(data, axis=3) - except: + except (UnboundLocalError, TypeError, IndexError): data = 0 st_time = timeit.default_timer() index += 1 diff --git a/run_test.py b/run_test.py index 663775c..4a8b7d8 100644 --- a/run_test.py +++ b/run_test.py @@ -4,6 +4,7 @@ import os import sys import pytest + work_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) os.chdir(work_path) print(work_path) -- GitLab From aaab75cb8fc4d6f0dc1ea44676247a6b66eae692 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 18 Sep 2018 16:04:27 +0200 Subject: [PATCH 30/51] old conda enviroment, not working on parallel netCDF --- environment.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/environment.yml b/environment.yml index 6b0fe67..2916618 100644 --- a/environment.yml +++ b/environment.yml @@ -3,16 +3,11 @@ name: hermesv3_gr channels: -# - anaconda - conda-forge - - spectraldns dependencies: - python = 2 - numpy - - hdf5 = 1.8.17 - - hdf5-parallel = 1.8.17 - - libnetcdf-parallel - netcdf4 >= 1.3.1 - python-cdo >= 1.3.3 - geopandas -- GitLab From 54445bb5873dbe676bc011ad6ffced8089e70683 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 18 Sep 2018 16:12:03 +0200 Subject: [PATCH 31/51] corrected PEP-8 conventions --- hermesv3_gr/modules/writing/writer_wrf_chem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hermesv3_gr/modules/writing/writer_wrf_chem.py b/hermesv3_gr/modules/writing/writer_wrf_chem.py index fcbdd5c..5cf01cf 100644 --- a/hermesv3_gr/modules/writing/writer_wrf_chem.py +++ b/hermesv3_gr/modules/writing/writer_wrf_chem.py @@ -18,7 +18,6 @@ # along with HERMESv3_GR. If not, see . -import os import sys import timeit import numpy as np @@ -27,6 +26,7 @@ from mpi4py import MPI from hermesv3_gr.config import settings from hermesv3_gr.modules.writing.writer import Writer + class WriterWrfChem(Writer): """ Class to Write the output file for the WRF-CHEM Chemical Transport Model. -- GitLab From 73712c45ea1e708f3e6a2f1f6d6893964806de4d Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 18 Sep 2018 16:18:07 +0200 Subject: [PATCH 32/51] corrected PEP-8 conventions --- run_test.py | 1 + setup.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/run_test.py b/run_test.py index 4a8b7d8..6eb2926 100644 --- a/run_test.py +++ b/run_test.py @@ -5,6 +5,7 @@ import os import sys import pytest + work_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) os.chdir(work_path) print(work_path) diff --git a/setup.py b/setup.py index 69e262c..7bae174 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,8 @@ setup( 'holidays', 'pytz', 'timezonefinder', - 'mpi4py' + 'mpi4py', + 'pytest', ], packages=find_packages(), classifiers=[ -- GitLab From ccd9d9bbb7adcbd2bab876da5462a5e0496e2cd7 Mon Sep 17 00:00:00 2001 From: Carles Tena Medina Date: Wed, 19 Sep 2018 15:32:49 +0200 Subject: [PATCH 33/51] Corrected minor --- CHANGELOG | 4 +++- README.md | 2 +- data/profiles/vertical/1layer_vertical_description.csv | 2 -- hermesv3_gr/tools/sample_files.py | 10 +++++----- setup.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 data/profiles/vertical/1layer_vertical_description.csv diff --git a/CHANGELOG b/CHANGELOG index 2409f3c..02048fd 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,2 +1,4 @@ 0.0.0 - HERMESv3_GR first release \ No newline at end of file + 2018/09/18 + + HERMESv3_GR beta version first release \ No newline at end of file diff --git a/README.md b/README.md index ca8c806..b393f1f 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,2 @@ # HERMESv3 Global/Regional -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/34fc5d6c803444178034b99dd28c7e3c)](https://www.codacy.com/app/carlestena/hermesv3_gr?utm_source=earth.bsc.es&utm_medium=referral&utm_content=gitlab/es/hermesv3_gr&utm_campaign=Badge_Grade) \ No newline at end of file +[![Codacy Badge](https://api.codacy.com/project/badge/Grade/34fc5d6c803444178034b99dd28c7e3c)](https://www.codacy.com/app/carlestena/hermesv3_gr?utm_source=earth.bsc.es&utm_medium=referral&utm_content=gitlab/es/hermesv3_gr&utm_campaign=Badge_Grade) diff --git a/data/profiles/vertical/1layer_vertical_description.csv b/data/profiles/vertical/1layer_vertical_description.csv deleted file mode 100644 index defd16d..0000000 --- a/data/profiles/vertical/1layer_vertical_description.csv +++ /dev/null @@ -1,2 +0,0 @@ -Ilayer;height_magl -1;1000 \ No newline at end of file diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 987b806..07c642e 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -43,6 +43,7 @@ def make_profiles_file_list(): os.path.join(main_dir, 'data', 'global_attributes.csv'), {'profiles': [{ 'speciation': [ + os.path.join(main_dir, 'data', 'profiles', 'speciation', 'MolecularWeights.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', 'Speciation_profile_cb05_aero5_CMAQ.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', 'Speciation_profile_cb05_aero5_MONARCH_aerosols.csv'), @@ -58,7 +59,6 @@ def make_profiles_file_list(): os.path.join(main_dir, 'data', 'profiles', 'temporal', 'tz_world_country_iso3166.csv'), ]}, {'vertical': [ - os.path.join(main_dir, 'data', 'profiles', 'vertical', '1layer_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', 'CMAQ_15layers_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', 'MONARCH_Global_48layers_vertical_description.csv'), @@ -118,11 +118,11 @@ def query_yes_no(question, default="yes"): def check_args(args, exe_str): if len(args) == 0: print("Missing destination path after '{0}'. e.g.:".format(exe_str) + - "\n\t{0} /home/user/HERMES/sample_files".format(exe_str)) + "\n\t{0} /home/user/HERMES/HERMES_IN".format(exe_str)) sys.exit(1) elif len(args) > 1: print("Too much arguments through '{0}'. Only destination path is needed e.g.:".format(exe_str) + - "\n\t{0} /home/user/HERMES/sample_files".format(exe_str)) + "\n\t{0} /home/user/HERMES/HERMES_IN".format(exe_str)) sys.exit(1) else: dir_path = args[0] @@ -153,7 +153,7 @@ def copy_files(file_list, directory): def copy_config_files(): argv = sys.argv[1:] - parent_dir = check_args(argv, 'hermesv3_copy_config_files') + parent_dir = check_args(argv, 'hermesv3_gr_copy_config_files') copy_files(make_conf_file_list(), parent_dir) copy_files(make_profiles_file_list(), parent_dir) @@ -162,7 +162,7 @@ def copy_config_files(): def copy_preproc_files(): argv = sys.argv[1:] - parent_dir = check_args(argv, 'hermesv3_copy_preproc_files') + parent_dir = check_args(argv, 'hermesv3_gr_copy_preproc_files') copy_files(make_preproc_file_list(), parent_dir) diff --git a/setup.py b/setup.py index 7bae174..88e6bae 100644 --- a/setup.py +++ b/setup.py @@ -81,8 +81,8 @@ setup( entry_points={ 'console_scripts': [ 'hermesv3_gr = hermesv3_gr.hermes:run', - 'hermesv3_copy_config_files = hermesv3_gr.tools.sample_files:copy_config_files', - 'hermesv3_copy_preproc_files = hermesv3_gr.tools.sample_files:copy_preproc_files', + 'hermesv3_gr_copy_config_files = hermesv3_gr.tools.sample_files:copy_config_files', + 'hermesv3_gr_copy_preproc_files = hermesv3_gr.tools.sample_files:copy_preproc_files', ], }, ) -- GitLab From 8b54346a9ffb0df969654ad6eebc3148e7aba37b Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 09:28:23 +0200 Subject: [PATCH 34/51] Updated pipeline --- .gitlab-ci.yml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 87db4e1..67e39c1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,6 +23,8 @@ test_python2: - conda env update -f environment.yml -n hermesv3_gr python=2.7 - source activate hermesv3_gr - python run_test.py + - pip install codacy-coverage --upgrade + - python-codacy-coverage -r test/report/python2/coverage.xml #test_python3: # stage: test @@ -33,13 +35,6 @@ test_python2: # - source activate earthdiagnostics3 # - python run_test.py -report_codacy: - stage: report - script: - - source activate hermesv3_gr - - pip install codacy-coverage --upgrade - - python-codacy-coverage -r test/report/python2/coverage.xml - clean: stage: clean script: -- GitLab From d555728e4512cc67b02eaf4cf5d8469e8eaf5f75 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 09:33:15 +0200 Subject: [PATCH 35/51] fixed typo --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 67e39c1..08c1355 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -24,7 +24,7 @@ test_python2: - source activate hermesv3_gr - python run_test.py - pip install codacy-coverage --upgrade - - python-codacy-coverage -r test/report/python2/coverage.xml + - python-codacy-coverage -r tests/report/python2/coverage.xml #test_python3: # stage: test -- GitLab From e80a8fd37e2bc5ec80543884c84cfe379fc60816 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 11:05:47 +0200 Subject: [PATCH 36/51] updated setup.py --- setup.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 88e6bae..d96c650 100644 --- a/setup.py +++ b/setup.py @@ -18,18 +18,16 @@ # along with HERMESv3_GR. If not, see . -from os import path from setuptools import find_packages from setuptools import setup -here = path.abspath(path.dirname(__file__)) # Get the version number from the relevant file -with open(path.join(here, 'VERSION')) as f: +with open('VERSION') as f: version = f.read().strip() -with open("README.md", "r") as fh: - long_description = fh.read() +with open("README.md", "r") as f: + long_description = f.read() setup( name='hermesv3_gr', -- GitLab From 624d32635431edbc38fc9753ee48662ecd881323 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 11:18:00 +0200 Subject: [PATCH 37/51] updated setup.py --- setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.py b/setup.py index d96c650..aa5b670 100644 --- a/setup.py +++ b/setup.py @@ -73,6 +73,9 @@ setup( 'CHANGELOG', 'VERSION', 'LICENSE', + 'data', + 'conf', + 'preproc' ] }, -- GitLab From d53e4ad944fd8ce14ad47fb033a8102ddb7f6c4e Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 11:26:13 +0200 Subject: [PATCH 38/51] updated setup.py --- hermesv3_gr/__init__.py | 1 + setup.py | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/hermesv3_gr/__init__.py b/hermesv3_gr/__init__.py index e69de29..6c8e6b9 100644 --- a/hermesv3_gr/__init__.py +++ b/hermesv3_gr/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.0" diff --git a/setup.py b/setup.py index aa5b670..41fab0d 100644 --- a/setup.py +++ b/setup.py @@ -20,11 +20,12 @@ from setuptools import find_packages from setuptools import setup +from hermesv3_gr import __version__ # Get the version number from the relevant file with open('VERSION') as f: - version = f.read().strip() + version = __version__ with open("README.md", "r") as f: long_description = f.read() @@ -73,9 +74,9 @@ setup( 'CHANGELOG', 'VERSION', 'LICENSE', - 'data', - 'conf', - 'preproc' + 'data/', + 'conf/', + 'preproc/' ] }, -- GitLab From aa8bf20306fd35095c08791d243cc5472b3cd3c4 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Thu, 20 Sep 2018 11:49:27 +0200 Subject: [PATCH 39/51] updated setup.py --- VERSION | 1 - setup.py | 45 ++++++++++++++++++++++++++++++++++++--------- 2 files changed, 36 insertions(+), 10 deletions(-) delete mode 100644 VERSION diff --git a/VERSION b/VERSION deleted file mode 100644 index bd52db8..0000000 --- a/VERSION +++ /dev/null @@ -1 +0,0 @@ -0.0.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 41fab0d..f7c0126 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,7 @@ from hermesv3_gr import __version__ # Get the version number from the relevant file -with open('VERSION') as f: - version = __version__ +version = __version__ with open("README.md", "r") as f: long_description = f.read() @@ -68,17 +67,45 @@ setup( "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Atmospheric Science" ], - include_package_data=True, - package_data={'hermesv3_gr': [ - 'README', + package_data={'': [ + 'README.md', 'CHANGELOG', - 'VERSION', 'LICENSE', - 'data/', - 'conf/', - 'preproc/' + 'data/*', + 'conf/*', + 'preproc/*' ] }, + data_files=[('.', ['LICENSE', 'CHANGELOG', ]), + ('conf', ['conf/hermes.conf', + 'conf/EI_configuration.csv', ]), + ('data', ['data/global_attributes.csv', + 'data/profiles/speciation/MolecularWeights.csv', + 'data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv', + 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv', + 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv', + 'data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv', + 'data/profiles/temporal/TemporalProfile_Daily.csv', + 'data/profiles/temporal/TemporalProfile_Hourly.csv', + 'data/profiles/temporal/TemporalProfile_Monthly.csv', + 'data/profiles/temporal/tz_world_country_iso3166.csv', + 'data/profiles/vertical/CMAQ_15layers_vertical_description.csv', + 'data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv', + 'data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv', + 'data/profiles/vertical/Vertical_profile.csv', ]), + ('preproc', ['preproc/ceds_preproc.py', + 'preproc/eclipsev5a_preproc.py', + 'preproc/edgarv432_ap_preproc.py', + 'preproc/edgarv432_voc_preproc.py', + 'preproc/emep_preproc.py', + 'preproc/gfas12_preproc.py', + 'preproc/htapv2_preproc.py', + 'preproc/tno_mac_iii_preproc.py', + 'preproc/tno_mac_iii_preproc_voc_ratios.py', + 'preproc/wiedinmyer_preproc.py', ]), + ], + + include_package_data=True, entry_points={ 'console_scripts': [ -- GitLab From d0129b978b5849d5a90f57251b48069c688507b4 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 21 Sep 2018 16:37:23 +0200 Subject: [PATCH 40/51] updated setup.py --- conf/hermes.conf | 2 +- environment.yml | 2 +- preproc/ceds_preproc.py | 2 +- preproc/eclipsev5a_preproc.py | 4 +++- preproc/edgarv432_ap_preproc.py | 36 ++++++++++++++++++--------------- setup.py | 12 +++++++---- 6 files changed, 34 insertions(+), 24 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index c893141..920599d 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -21,7 +21,7 @@ output_timestep_freq = 1 # output_model = MONARCH # output_model = CMAQ output_model = WRF_CHEM -output_attributes = /home/Earth/ctena/Models/HERMESv3/IN/data/wrf_chem_global_attributes.csv +output_attributes = /data/global_attributes.csv # ***** domain_type=[global, lcc, rotated, mercator] ***** domain_type = mercator diff --git a/environment.yml b/environment.yml index 2916618..5900cd3 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ dependencies: - python = 2 - numpy - netcdf4 >= 1.3.1 - - python-cdo >= 1.3.3 + - python-cdo = 1.6.3 - geopandas - pyproj - configargparse diff --git a/preproc/ceds_preproc.py b/preproc/ceds_preproc.py index c458dff..3e22812 100755 --- a/preproc/ceds_preproc.py +++ b/preproc/ceds_preproc.py @@ -32,7 +32,7 @@ VOC_POLLUTANTS = ['VOC01', 'VOC02', 'VOC03', 'VOC04', 'VOC05', 'VOC06', 'VOC07', LIST_SECTORS = ['agriculture', 'energy', 'industry', 'transport', 'residential', 'solvents', 'waste', 'ships'] # LIST_YEARS = from 1950 to 2014 -LIST_YEARS = [2010] +LIST_YEARS = [2014] INPUT_NAME = '-em-anthro_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sectorDim_gr_01-12.nc' VOC_INPUT_NAME = '-em-speciated-VOC_input4MIPs_emissions_CMIP_CEDS-v2016-07-26-sector' + \ 'Dim-supplemental-data_gr_01-12.nc' diff --git a/preproc/eclipsev5a_preproc.py b/preproc/eclipsev5a_preproc.py index df523d2..9d28b77 100755 --- a/preproc/eclipsev5a_preproc.py +++ b/preproc/eclipsev5a_preproc.py @@ -27,7 +27,7 @@ from cf_units import Unit # ============== CONFIGURATION PARAMETERS ====================== INPUT_PATH = '/esarchive/recon/iiasa/eclipsev5a/original_files' -OUTPUT_PATH = '/esarchive/recon/iiasa/eclipsev5a/original_files/test' +OUTPUT_PATH = '/esarchive/recon/iiasa/eclipsev5a' INPUT_NAME = 'ECLIPSE_base_CLE_V5a_.nc' INPUT_NAME_FLARING = 'ECLIPSE_V5a_baseline_CLE_flaring.nc' INPUT_NAME_SHIPS = "ship_CLE_emis_.nc" @@ -180,6 +180,7 @@ def extract_sector_by_name(name): 'emis_ind': 'industry', 'emis_tra': 'transport', 'emis_wst': 'waste', + 'emis_slv': 'solvent', } try: @@ -200,6 +201,7 @@ def extract_month_profile_by_sector(sector, month, pollutant=None): 'industry': 'ind', 'transport': 'tra', 'waste': 'other', + 'solvent': 'other', } if sector == '' and pollutant == 'NH3': profile_name = 'agr_NH3' diff --git a/preproc/edgarv432_ap_preproc.py b/preproc/edgarv432_ap_preproc.py index 3dbf1df..945781e 100755 --- a/preproc/edgarv432_ap_preproc.py +++ b/preproc/edgarv432_ap_preproc.py @@ -27,14 +27,15 @@ from warnings import warn as warning # ============== CONFIGURATION PARAMETERS ====================== INPUT_PATH = '/esarchive/recon/jrc/edgarv432_ap/original_files/' OUTPUT_PATH = '/esarchive/recon/jrc/edgarv432_ap' -LIST_POLLUTANTS = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] +# LIST_POLLUTANTS = ['BC', 'CO', 'NH3', 'NOx', 'OC', 'PM10', 'PM2.5_bio', 'PM2.5_fossil', 'SO2', 'NMVOC'] +LIST_POLLUTANTS = ['PM2.5_bio', 'PM2.5_fossil'] # LIST_YEARS = [1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, # 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, # 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012] -LIST_YEARS = [2012] +LIST_YEARS = [2010] # To do yearly emissions -PROCESS_YEARLY = True +PROCESS_YEARLY = False YEARLY_INPUT_NAME = 'yearly/v432___.0.1x0.1.nc' # To process monthly emissions, 2010 directly from monthly_input_name and other years calculated using bla bla bla @@ -186,7 +187,7 @@ def write_netcdf(output_name_path, data, data_atts, center_lats, center_lons, gr nc_var = nc_output.createVariable(data_atts['long_name'], 'f', ('time', 'lat', 'lon',), zlib=True) nc_var.units = data_atts['units'] nc_var.long_name = data_atts['long_name'] - nc_var.coordinates = data_atts['coordiantes'] + nc_var.coordinates = data_atts['coordinates'] nc_var.grid_mapping = data_atts['grid_mapping'] nc_var.cell_measures = 'area: cell_area' nc_var[:] = data.reshape((1,) + data.shape) @@ -348,13 +349,16 @@ def do_2010_monthly_transformation(): grid_area = get_grid_area(file_path) print file_path nc_in = Dataset(file_path, mode='r') - + # print pollutant + # print pollutant in ['PM2.5_bio', 'PM2.5_fossil'] if pollutant in ['PM2.5_bio', 'PM2.5_fossil']: - in_pollutant = pollutant - pollutant = 'PM2.5' + aux_pollutant = pollutant.replace('.', '') + in_pollutant = 'PM2.5' else: - in_pollutant = None - data = nc_in.variables['emi_{0}'.format(pollutant.lower())][:] + in_pollutant = pollutant + aux_pollutant = pollutant + + data = nc_in.variables['emi_{0}'.format(in_pollutant.lower())][:] data = np.array(data) @@ -364,20 +368,20 @@ def do_2010_monthly_transformation(): nc_in.close() sector = ipcc_to_sector_dict()[ipcc] - if pollutant == 'PM2.5': - pollutant = in_pollutant.replace('.', '') - elif pollutant == 'NOx': - pollutant = 'nox_no2' + if aux_pollutant in ['PM2.5_bio', 'PM2.5_fossil']: + aux_pollutant = aux_pollutant.replace('.', '') + elif aux_pollutant == 'NOx': + aux_pollutant = 'nox_no2' - data_attributes = {'long_name': pollutant.lower(), + data_attributes = {'long_name': aux_pollutant.lower(), 'units': 'kg.m-2.s-1', 'coordinates': 'lat lon', 'grid_mapping': 'crs'} - out_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', pollutant.lower() + '_' + sector.lower()) + out_path_aux = os.path.join(OUTPUT_PATH, 'monthly_mean', aux_pollutant.lower() + '_' + sector.lower()) if not os.path.exists(out_path_aux): os.makedirs(out_path_aux) - write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(pollutant.lower(), 2010, + write_netcdf(os.path.join(out_path_aux, '{0}_{1}{2}.nc'.format(aux_pollutant.lower(), 2010, str(month).zfill(2))), data, data_attributes, lats, lons, grid_area, 2010, sector.lower()) diff --git a/setup.py b/setup.py index f7c0126..9e3aa71 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ setup( install_requires=[ 'numpy', 'netCDF4>=1.3.1', - 'cdo>=1.3.3', + 'cdo==1.6.3', 'pandas', 'geopandas', 'pyproj', @@ -79,16 +79,20 @@ setup( data_files=[('.', ['LICENSE', 'CHANGELOG', ]), ('conf', ['conf/hermes.conf', 'conf/EI_configuration.csv', ]), - ('data', ['data/global_attributes.csv', + ('data', ['data/global_attributes.csv', ]), + ('data/profiles', []), + ('data/profiles/speciation', [ 'data/profiles/speciation/MolecularWeights.csv', 'data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv', 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv', 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv', - 'data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv', + 'data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv', ]), + ('data/profiles/temporal', [ 'data/profiles/temporal/TemporalProfile_Daily.csv', 'data/profiles/temporal/TemporalProfile_Hourly.csv', 'data/profiles/temporal/TemporalProfile_Monthly.csv', - 'data/profiles/temporal/tz_world_country_iso3166.csv', + 'data/profiles/temporal/tz_world_country_iso3166.csv', ]), + ('data/profiles/vertical', [ 'data/profiles/vertical/CMAQ_15layers_vertical_description.csv', 'data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv', 'data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv', -- GitLab From 06f3e2a89b9dbbd6d5567d252cd838eea550b81e Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 16:46:03 +0200 Subject: [PATCH 41/51] Added pm25_fossil and pm25_bio to molecular weight file --- data/profiles/speciation/MolecularWeights.csv | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/profiles/speciation/MolecularWeights.csv b/data/profiles/speciation/MolecularWeights.csv index 9e3472c..f005210 100644 --- a/data/profiles/speciation/MolecularWeights.csv +++ b/data/profiles/speciation/MolecularWeights.csv @@ -6,6 +6,8 @@ so2;64.06 nh3;17.03 pm10;1.0 pm25;1.0 +pm25_fossil;1.0 +pm25_bio;1.0 oc;1.0 bc;1.0 c2h6s;62.13 -- GitLab From 6472084b07c9247beb99f11beb92c46ff4b669b0 Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 16:49:18 +0200 Subject: [PATCH 42/51] Erased extra vertical description files --- ...H_Global_48layers_vertical_description.csv | 49 ------------------- ...regional_48layers_vertical_description.csv | 49 ------------------- 2 files changed, 98 deletions(-) delete mode 100644 data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv delete mode 100644 data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv diff --git a/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv b/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv deleted file mode 100644 index 00da2f6..0000000 --- a/data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv +++ /dev/null @@ -1,49 +0,0 @@ -Ilayer;height_magl -1;48 -2;104 -3;167 -4;240 -5;322 -6;416 -7;522 -8;643 -9;782 -10;942 -11;1126 -12;1337 -13;1580 -14;1857 -15;2170 -16;2520 -17;2908 -18;3334 -19;3796 -20;4290 -21;4811 -22;5351 -23;5898 -24;6440 -25;6967 -26;7473 -27;7952 -28;8407 -29;8843 -30;9278 -31;9730 -32;10218 -33;10754 -34;11348 -35;12003 -36;12725 -37;13517 -38;14386 -39;15341 -40;16398 -41;17580 -42;18921 -43;20463 -44;22260 -45;24390 -46;26990 -47;30299 -48;34719 \ No newline at end of file diff --git a/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv b/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv deleted file mode 100644 index b4a06e1..0000000 --- a/data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv +++ /dev/null @@ -1,49 +0,0 @@ -Ilayer;height_magl -1;48 -2;104 -3;168 -4;241 -5;324 -6;418 -7;525 -8;646 -9;786 -10;946 -11;1130 -12;1342 -13;1584 -14;1860 -15;2171 -16;2518 -17;2902 -18;3322 -19;3776 -20;4260 -21;4768 -22;5291 -23;5816 -24;6332 -25;6827 -26;7295 -27;7732 -28;8140 -29;8528 -30;8911 -31;9307 -32;9728 -33;10184 -34;10681 -35;11217 -36;11793 -37;12409 -38;13065 -39;13761 -40;14492 -41;15256 -42;16048 -43;16858 -44;17676 -45;18480 -46;19242 -47;19923 -48;20480 -- GitLab From de80eff2850fe777583a2f87a5f636021c2c79e9 Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 16:52:27 +0200 Subject: [PATCH 43/51] benchmark vertical description files --- ...cription.csv => Benchmark_15layers_vertical_description.csv} | 0 hermesv3_gr/tools/sample_files.py | 2 +- setup.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename data/profiles/vertical/{CMAQ_15layers_vertical_description.csv => Benchmark_15layers_vertical_description.csv} (100%) diff --git a/data/profiles/vertical/CMAQ_15layers_vertical_description.csv b/data/profiles/vertical/Benchmark_15layers_vertical_description.csv similarity index 100% rename from data/profiles/vertical/CMAQ_15layers_vertical_description.csv rename to data/profiles/vertical/Benchmark_15layers_vertical_description.csv diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 07c642e..97573b4 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -59,7 +59,7 @@ def make_profiles_file_list(): os.path.join(main_dir, 'data', 'profiles', 'temporal', 'tz_world_country_iso3166.csv'), ]}, {'vertical': [ - os.path.join(main_dir, 'data', 'profiles', 'vertical', 'CMAQ_15layers_vertical_description.csv'), + os.path.join(main_dir, 'data', 'profiles', 'vertical', 'Benchmark_15layers_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', 'MONARCH_Global_48layers_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', diff --git a/setup.py b/setup.py index 9e3aa71..688f5dd 100644 --- a/setup.py +++ b/setup.py @@ -93,7 +93,7 @@ setup( 'data/profiles/temporal/TemporalProfile_Monthly.csv', 'data/profiles/temporal/tz_world_country_iso3166.csv', ]), ('data/profiles/vertical', [ - 'data/profiles/vertical/CMAQ_15layers_vertical_description.csv', + 'data/profiles/vertical/Benchmark_15layers_vertical_description.csv', 'data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv', 'data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv', 'data/profiles/vertical/Vertical_profile.csv', ]), -- GitLab From c06ad2aa0bca02cdb968e48db0666b42445f2eb9 Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 17:07:24 +0200 Subject: [PATCH 44/51] added EI benchmark file --- conf/EI_configuration_benchmark.csv | 98 +++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 conf/EI_configuration_benchmark.csv diff --git a/conf/EI_configuration_benchmark.csv b/conf/EI_configuration_benchmark.csv new file mode 100644 index 0000000..f24f2f7 --- /dev/null +++ b/conf/EI_configuration_benchmark.csv @@ -0,0 +1,98 @@ +ei;sector;ref_year;active;factor_mask;regrid_mask;pollutants;path;frequency;source_type;p_vertical;p_month;p_day;p_hour;p_speciation;comment +GFASv12;;;1;;;co,nox_no,pm25,oc,bc,so2,ch3oh,c2h5oh,c3h8,c2h4,c3h6,c5h8,terpenes,hialkenes,hialkanes,ch2o,c2h4o,c3h6o,nh3,c2h6s,c2h6,c7h8,c6h6,c8h10,c4h8,c5h10,c6h12,c8h16,c4h10,c5h12,c6h14,c7h16;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E001; +HTAPv2;energy;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;V001;;D002;H002;E002; +HTAPv2;industry;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;V002;;D003;H004;E003; +HTAPv2;residential;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;;;D003;H003;E004; +HTAPv2;transport;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23;/jrc/htapv2/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E005; +HTAPv2;agriculture;2010;1;;;nh3;/jrc/htapv2/monthly_mean;monthly;area;;;D001;H007;E006; +HTAPv2;air_lto;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/jrc/htapv2/yearly_mean;yearly;area;V003;M001;D001;H001;E007; +HTAPv2;air_cds;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/jrc/htapv2/yearly_mean;yearly;area;V004;M001;D001;H001;E007; +HTAPv2;air_crs;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/jrc/htapv2/yearly_mean;yearly;area;V005;M001;D001;H001;E007; +HTAPv2;ships;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc21,voc22,voc23,voc24;/jrc/htapv2/yearly_mean;yearly;area;;M001;D001;H001;E008; +wiedinmyer;;2014;1;;;bc,c2h2,c2h4,c3h6,c6h6,ch2o,ch3cooh,ch3oh,co,hcl,nh3,nox_no,oc,pm10,pm25,so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H008;E009; +TNO_MACC-III;snap1;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;V001;M002;D002;H002;E010; +TNO_MACC-III;snap2;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M003;D003;H003;E011; +TNO_MACC-III;snap34;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;V002;M004;D003;H004;E012; +TNO_MACC-III;snap5;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc02,voc03,voc04,voc05,voc06,voc12,voc13,voc14,voc15;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M005;D001;H001;E013; +TNO_MACC-III;snap6;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc06,voc14,voc15,voc17,voc18,voc19,voc20,voc23;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M006;D004;H005;E014; +TNO_MACC-III;snap71;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E015; +TNO_MACC-III;snap72;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E016; +TNO_MACC-III;snap73;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc02,voc03,voc07,voc08,voc12,voc17,voc21,voc22;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E017; +TNO_MACC-III;snap74;2011;0;;;voc03,voc04,voc05,voc06,voc12,voc13,voc14,voc15;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E018; +TNO_MACC-III;snap75;2011;0;;;pm10,pm25;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E019; +TNO_MACC-III;snap8;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M008;D001;H001;E020; +TNO_MACC-III;snap9;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M001;D001;H001;E021; +TNO_MACC-III;snap10;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M009;D001;H007;E022; +ECLIPSEv5a;agriculture;2010;0;;;pm10,pm25,nh3;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H007;E023; +ECLIPSEv5a;flaring;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/yearly_mean;yearly;area;V006;M001;D001;H001;E024; +ECLIPSEv5a;energy;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;V001;;D002;H002;E025; +ECLIPSEv5a;industry;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;V002;;D003;H004;E026; +ECLIPSEv5a;transport;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E027; +ECLIPSEv5a;residential;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D003;H003;E028; +ECLIPSEv5a;waste;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H001;E029; +ECLIPSEv5.a;solvent;2010;0;;;nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;;M006;D004;H005;E030; +ECLIPSEv5a;agricultural_waste;2010;0;;;co,nox_no2,nh3,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H007;E031; +ECLIPSEv5a;ships;2010;0;;;co,nox_no2,pm10,pm25,oc,bc,so2,nmvoc;/iiasa/eclipsev5a/yearly_mean;yearly;area;;M001;D001;H001;E032; +EDGARv432_AP;ags;2010;0;;;nox_no2,nh3,pm10,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H007;E033; +EDGARv432_AP;awb;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H007;E034; +EDGARv432_AP;che;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E035; +EDGARv432_AP;ene;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V001;;D002;H002;E036; +EDGARv432_AP;foo_pap;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E037; +EDGARv432_AP;fff;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,oc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V006;;D001;H001;E038; +EDGARv432_AP;ind;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E039; +EDGARv432_AP;iro;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E040; +EDGARv432_AP;mnm;2010;0;;;nox_no2,nh3,pm10,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H007;E041; +EDGARv432_AP;neu;2010;0;;;pm10;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E042; +EDGARv432_AP;nfe;2010;0;;;nox_no2,co,so2,pm10,bc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E043; +EDGARv432_AP;nmm;2010;0;;;co,so2,nh3,nmvoc,pm10,bc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V002;;D003;H004;E044; +EDGARv432_AP;pro;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,oc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E045; +EDGARv432_AP;pru_sol;2010;0;;;nh3,nmvoc,pm10,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D004;H005;E046; +EDGARv432_AP;rco;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D003;H003;E047; +EDGARv432_AP;ref_trf;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V006;;D001;H001;E048; +EDGARv432_AP;swd_inc;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V006;;D001;H001;E049; +EDGARv432_AP;swd_ldf;2010;0;;;nh3,nmvoc,pm10,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E050; +EDGARv432_AP;tnr_aviation_cds;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,oc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V004;;D001;H001;E051; +EDGARv432_AP;tnr_aviation_crs;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,oc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V005;;D001;H001;E052; +EDGARv432_AP;tnr_aviation_lto;2010;0;;;nox_no2,co,so2,nmvoc,pm10,bc,oc,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;V003;;D001;H001;E053; +EDGARv432_AP;tnr_other;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E054; +EDGARv432_AP;tnr_ship;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E055; +EDGARv432_AP;tro;2010;0;;;nox_no2,co,so2,nh3,nmvoc,pm10,bc,oc,pm25_bio,pm25_fossil;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E056; +EDGARv432_AP;wwt;2010;0;;;nh3,nmvoc;/jrc/edgarv432_ap/monthly_mean;monthly;area;;;D001;H001;E057; +EDGARv432_VOC;awb;2010;0;;;voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H007;E058; +EDGARv432_VOC;ene;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;V001;;D002;H002;E059; +EDGARv432_VOC;fff;2010;0;;;voc2,voc3,voc4,voc5,voc6,voc8,voc10,voc12,voc13,voc14,voc15,voc17,voc18,voc19,voc20;/jrc/edgarv432_voc/monthly_mean;monthly;area;V006;;D001;H001;E060; +EDGARv432_VOC;ind;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;V002;;D003;H004;E061; +EDGARv432_VOC;ppa;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E062; +EDGARv432_VOC;pro;2010;0;;;voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E063; +EDGARv432_VOC;rco;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D003;H003;E064; +EDGARv432_VOC;ref;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc12,voc13,voc14,voc15,voc16,voc17,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;V006;;D003;H004;E065; +EDGARv432_VOC;swd;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc17,voc18,voc19,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E066; +EDGARv432_VOC;tnr_aviation_cds;2010;0;;;voc1,voc2,voc3,voc5,voc6,voc7,voc8,voc9,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;V004;;D001;H001;E067; +EDGARv432_VOC;tnr_aviation_crs;2010;0;;;voc1,voc2,voc3,voc5,voc6,voc7,voc8,voc9,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;V005;;D001;H001;E068; +EDGARv432_VOC;tnr_aviation_lto;2010;0;;;voc1,voc2,voc3,voc5,voc6,voc7,voc8,voc9,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23;/jrc/edgarv432_voc/monthly_mean;monthly;area;V003;;D001;H001;E069; +EDGARv432_VOC;tnr_other;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E070; +EDGARv432_VOC;tnr_ship;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E071; +EDGARv432_VOC;trf;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D001;H001;E072; +EDGARv432_VOC;tro;2010;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc10,voc11,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/edgarv432_voc/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E073; +EMEP;a_publicpower;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;V001;M002;D002;H002;E074; +EMEP;b_industry;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;V002;M004;D003;H004;E075; +EMEP;c_otherstationarycomb;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M003;D003;H003;E076; +EMEP;d_fugitive;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M005;D001;H001;E077; +EMEP;e_solvents;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M006;D004;H005;E078; +EMEP;f_roadtransport;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M007;D005;weekday=H006, saturday=H009, sunday=H010;E079; +EMEP;g_shipping;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M001;D001;H001;E080; +EMEP;h_aviation;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;V003;M001;D001;H001;E081; +EMEP;i_offroad;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M001;D001;H001;E082; +EMEP;j_waste;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M001;D001;H001;E083; +EMEP;k_agrilivestock;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M009;D001;H007;E084; +EMEP;l_agriother;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M009;D001;H007;E085; +carn;;2015;1;;;so2;/ceip/emepv18/yearly_mean;yearly;point;;M001;D001;H001;E086; +CEDS;agriculture;2014;0;;;nox_no2,nh3;/jgcri/ceds/monthly_mean;monthly;area;;;D001;H007;E087; +CEDS;energy;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;V001;;D002;H002;E088; +CEDS;industrial;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;V002;;D003;H004;E089; +CEDS;transportation;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E090; +CEDS;residential;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;;;D003;H003;E091; +CEDS;solvent;2014;0;;;voc1,voc2,voc3,voc4,voc5,voc6,voc7,voc8,voc9,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;;;D004;H005;E092; +CEDS;waste;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;;;D001;H001;E093; +CEDS;shipping;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;;;D001;H001;E094; +CEDS;air;2014;0;;;co,nox_no2,oc,bc,so2,nh3,nmvoc;/jgcri/ceds/monthly_mean;monthly;area;;;D001;H001;E095; \ No newline at end of file -- GitLab From 21dc573001c92adfeea2bc72fa37b50c09a5c4df Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 17:38:40 +0200 Subject: [PATCH 45/51] Added speciation files and modified hermes.conf --- conf/EI_configuration.csv | 17 --- conf/hermes.conf | 103 +++++++----------- .../Speciation_profile_cb05_aero5_CMAQ.csv | 77 ++++++++++++- .../Speciation_profile_cb05_aero5_MONARCH.csv | 98 +++++++++++++++++ ...on_profile_cb05_aero5_MONARCH_aerosols.csv | 13 --- ...on_profile_cb05_aero5_MONARCH_fullchem.csv | 25 ----- .../Speciation_profile_cb05_aero6_CMAQ.csv | 28 +++++ .../Speciation_profile_cb05e51_aero6_CMAQ.csv | 28 +++++ ...tion_profile_radm2_madesorgam_WRF_CHEM.csv | 78 ++++++++++++- hermesv3_gr/tools/sample_files.py | 2 +- setup.py | 2 +- 11 files changed, 346 insertions(+), 125 deletions(-) delete mode 100644 conf/EI_configuration.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv delete mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv delete mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv create mode 100644 data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv diff --git a/conf/EI_configuration.csv b/conf/EI_configuration.csv deleted file mode 100644 index 15e01cb..0000000 --- a/conf/EI_configuration.csv +++ /dev/null @@ -1,17 +0,0 @@ -ei;sector;ref_year;active;factor_mask;regrid_mask;pollutants;path;frequency;source_type;p_vertical;p_month;p_day;p_hour;p_speciation;comment -HTAPv2;energy;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V001;;D002;H002;E998;added 05/2017 -HTAPv2;industry;2010;0;;;so2;/jrc/htapv2/monthly_mean;monthly;area;V002;;D003;H004;E998;added 05/2017 -HTAPv2;residential;2010;0;;- FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 -HTAPv2;residential;2010;0;FRA 50;+ FRA;so2;/jrc/htapv2/monthly_mean;monthly;area;;;;;E998;added 05/2017 -HTAPv2;transport;2010;1;;;so2;/jrc/htapv2/monthly_mean;monthly;area;;;D001;weekday=H001, saturday=H002, sunday=H003;E998;added 05/2017 -HTAPv2;air_lto;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V003;M001;D001;H001;E998;added 05/2017 -HTAPv2;air_cds;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V004;M001;D001;H001;E998;added 05/2017 -HTAPv2;air_crs;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;V005;M001;D001;H001;E998;added 05/2017 -HTAPv2;ships;2010;0;;;so2;/jrc/htapv2/yearly_mean;yearly;area;;M001;D001;H001;E008;added 05/2017 -wiedinmyer;;2014;0;;;so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H001;E998;added 05/2017 -ECLIPSEv5a;flaring;2010;0;;;so2;/iiasa/eclipsev5a/yearly_mean;area;yearly;V006;M001;D001;H001;E998;added 11/2017 -GFASv12;;2015;0;;;so2,nox_no;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E997;added 05/2017 -ECLIPSEv5a;transport;2010;0;;+ CHN,IND;so2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D001;H001;E998;added 11/2017 -ECLIPSEv5a;transport;2010;0;;;nox_no2;/iiasa/eclipsev5a/monthly_mean;monthly;area;;;D005;weekday=H006, saturday=H009, sunday=H010;E999;added 11/2017 -CARN;;2015;0;;;so2;/mtu/carnetal/yearly_mean;yearly;point;;M001;D001;H001;E998;added ... -Maestra;;2015;0;;;nox_no2;/home/Earth/ctena/Models/HERMESv3/;yearly;point;;M001;D001;H001;E999;added ... diff --git a/conf/hermes.conf b/conf/hermes.conf index 920599d..9c2be49 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -1,13 +1,10 @@ [GENERAL] log_level = 3 -# input_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/IN input_dir = /home/Earth/ctena/Models/hermesv3_gr -# data_path = /gpfs/scratch/bsc32/bsc32538/HERMES_data data_path = /esarchive/recon -#output_dir = /gpfs/projects/bsc32/bsc32538/HERMESv3_GR_rotated/OUT output_dir = /home/Earth/ctena/HERMES_out output_name = HERMESv3_.nc -start_date = 2014/09/02 00:00:00 +start_date = 2018/01/01 00:00:00 # ***** end_date = start_date [DEFAULT] ***** # end_date = 2014/09/03 00:00:00 # ***** output_timestep_type = [hourly, daily, monthly, yearly] ***** @@ -18,78 +15,60 @@ output_timestep_freq = 1 [DOMAIN] # ***** output_model = [MONARCH, CMAQ, WRF_CHEM] ***** -# output_model = MONARCH +output_model = MONARCH # output_model = CMAQ -output_model = WRF_CHEM +# output_model = WRF_CHEM output_attributes = /data/global_attributes.csv # ***** domain_type=[global, lcc, rotated, mercator] ***** -domain_type = mercator -# vertical_description = /data/profiles/vertical/1layer_vertical_description.csv -# vertical_description = /data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv -vertical_description = /data/profiles/vertical/CMAQ_15layers_vertical_description.csv +domain_type = global +# domain_type = lcc +# domain_type = rotated +# domain_type = mercator +vertical_description = /data/profiles/vertical/Benchmark_15layers_vertical_description.csv auxiliar_files_path = /data/auxiliar_files/_ # if domain_type == global: - inc_lat = 0.5 - inc_lon = 0.703125 - #inc_lat = 1. - #inc_lon = 1.40625 + inc_lat = 1. + inc_lon = 1.40625 # if domain_type == rotated: - centre_lat = 35 - centre_lon = 20 - west_boundary = -51 - south_boundary = -35 - inc_rlat = 0.1 - inc_rlon = 0.1 + #centre_lat = 35 + #centre_lon = 20 + #west_boundary = -51 + #south_boundary = -35 + #inc_rlat = 0.1 + #inc_rlon = 0.1 # if domain_type == lcc: - # CALIOPE - lat_1 = 37 - lat_2 = 43 - lon_0 = -3 - lat_0 = 40 - - # CATALUNYA - #nx = 278 - #ny = 298 - #inc_x = 1000 - #inc_y = 1000 - #x_0 = 253151.59375 - #y_0 = 43862.90625 - - # EUROPA - nx = 478 - ny = 398 - inc_x = 12000 - inc_y = 12000 - x_0 = -2131849.000 - y_0 = -2073137.875 - - # IP - #nx = 397 - #ny = 397 - #inc_x = 4000 - #inc_y = 4000 - #x_0 = -807847.688 - #y_0 = -797137.125 + # EUROPE + #lat_1 = 37 + #lat_2 = 43 + #lon_0 = -3 + #lat_0 = 40 + #nx = 478 + #ny = 398 + #inc_x = 12000 + #inc_y = 12000 + #x_0 = -2131849.000 + #y_0 = -2073137.875 # if domain_type == mercator: - lat_ts = -2.84 - lon_0 = -79.16 - nx = 99 - ny = 81 - inc_x = 1000 - inc_y = 1000 - x_0 = -49500.13899057542 - y_0 = -355986.6927808438 + # AFRICA + lat_ts = -1.5 + lon_0 = -18 + nx = 210 + ny = 236 + inc_x = 50000 + inc_y = 50000 + x_0 = -126017.5 + y_0 = -5407460 [EMISSION_INVENTORY_CONFIGURATION] -cross_table = /conf/EI_configuration.csv +cross_table = /conf/EI_configuration_benchmark.csv [EMISSION_INVENTORY_PROFILES] @@ -97,11 +76,11 @@ p_vertical = /data/profiles/vertical/Vertical_profile.csv p_month = /data/profiles/temporal/TemporalProfile_Monthly.csv p_day = /data/profiles/temporal/TemporalProfile_Daily.csv p_hour = /data/profiles/temporal/TemporalProfile_Hourly.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_test.csv -p_speciation = /data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv +p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv # p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_test.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv +# p_speciation = /data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv molecular_weights = /data/profiles/speciation/MolecularWeights.csv world_info = /data/profiles/temporal/tz_world_country_iso3166.csv diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv index fef9d59..d667082 100644 --- a/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv +++ b/data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv @@ -6,7 +6,7 @@ E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;vo E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 E005;0.823*nox_no2;0.16*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 -E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0; +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 @@ -14,7 +14,7 @@ E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc1 E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.35*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.03*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;1*pm25;pm10-pm25 -E014;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0;0 +E014;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0;0 E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0.288*pm25;0.675*pm25;0;0.01*pm25;0.037*pm25;pm10-pm25 E017;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 @@ -23,3 +23,76 @@ E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*pm25;0.06*pm25;0;0;0. E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 +E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E024;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E025;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000008*nmvoc;0;0;0.000858*nmvoc;0.004177*nmvoc;0;0.018548*nmvoc;0;0;0;0.00104*nmvoc;0.011594*nmvoc;0;0;0;0.000893*nmvoc;0;0;0;oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E026;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000013*nmvoc;0;0;0.003224*nmvoc;0.001662*nmvoc;0.000659*nmvoc;0.008038*nmvoc;0;0;0.000175*nmvoc;0.000496*nmvoc;0.025751*nmvoc;0;0;0;0.001726*nmvoc;0;0;0;oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E027;0.823*nox_no2;0.16*nox_no2;0.017*nox_no2;co;so2;nh3;0.000592*nmvoc;0;0;0.002423*nmvoc;0.001607*nmvoc;0;0.000899*nmvoc;0;0;0;0.002589*nmvoc;0.028079*nmvoc;0;0;0;0.003302*nmvoc;0;0;0;oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E028;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000047*nmvoc;0;0;0.002914*nmvoc;0.004187*nmvoc;0.000849*nmvoc;0.00186*nmvoc;0;0;0;0.002559*nmvoc;0.029992*nmvoc;0;0;0;0.001755*nmvoc;0;0;0;oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E029;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.001867*nmvoc;0.008553*nmvoc;0.000529*nmvoc;0.005349*nmvoc;0;0.000169*nmvoc;0.000133*nmvoc;0.000435*nmvoc;0.025797*nmvoc;0;0;0;0.000742*nmvoc;0;0;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E030;0;0;0;0;0;0;0;0;0;0.000087*nmvoc;0.000148*nmvoc;0.003563*nmvoc;0.000004*nmvoc;0;0.000001*nmvoc;0.001904*nmvoc;0.000161*nmvoc;0.036006*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;0;0;0 +E031;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.002093*nmvoc;0.000525*nmvoc;0.000105*nmvoc;0.002929*nmvoc;0.001894*nmvoc;0.000023*nmvoc;0.002826*nmvoc;0.000116*nmvoc;0.000043*nmvoc;0.003664*nmvoc;0.003078*nmvoc;0.010261*nmvoc;0;0.000006*nmvoc;0.00013*nmvoc;0.000006*nmvoc;0;0;0;oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E032;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0.002496*nmvoc;0.004456*nmvoc;0;0;0.001693*nmvoc;0;0;0;0.001111*nmvoc;0.030672*nmvoc;0;0;0.001132*nmvoc;0.000762*nmvoc;0;0;0;oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E033;nox_no2;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E034;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_bio;pm10-pm25_bio +E035;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E036;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_fossil+pm25_bio-oc-bc)*0.155;(pm25_fossil+pm25_bio-oc-bc)*0.845;pm10-pm25_fossil-pm25_bio +E037;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E038;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil;pm10-pm25_fossil +E039;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_fossil+pm25_bio-oc-bc)*0.089;(pm25_fossil+pm25_bio-oc-bc)*0.911;pm10-pm25_fossil-pm25_bio +E040;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E041;0.9*nox_no2;0.1*nox_no2;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E042;0.9*nox_no2;0.1*nox_no2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10 +E043;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E044;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E045;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E046;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E047;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.047;(pm25_bio+pm25_fossil-oc-bc)*0.953;pm10-pm25_fossil-pm25_bio +E048;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.155;(pm25_bio+pm25_fossil-oc-bc)*0.845;pm10-pm25_fossil-pm25_bio +E049;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil+pm25_bio;pm10-pm25_fossil-pm25_bio +E050;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E051;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E052;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E053;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E054;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.059;(pm25_bio+pm25_fossil-oc-bc)*0.941;pm10-pm25_bio-pm25_fossil +E055;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.537;(pm25_bio+pm25_fossil-oc-bc)*0.463;pm10-pm25_bio-pm25_fossil +E056;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.059;(pm25_bio+pm25_fossil-oc-bc)*0.941;pm10-pm25_bio-pm25_fossil +E057;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E058;0;0;0;0;0;0;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E059;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E060;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;voc13;voc07;voc02;0;0;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc13+2.2*voc17+4.11*voc18+4*voc19;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0;0 +E061;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E062;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E063;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E064;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E065;0;0;0;0;0;0;0;0;voc13;voc07;voc02;0.5*voc01;0;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E066;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E067;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E068;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E069;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E070;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E071;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E072;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E073;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0;0 +E074;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000008*nmvoc;0;0;0.000858*nmvoc;0.004177*nmvoc;0;0.018548*nmvoc;0;0;0;0.00104*nmvoc;0.011594*nmvoc;0;0;0;0.000893*nmvoc;0;0;0;0.02*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 +E075;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000013*nmvoc;0;0;0.003224*nmvoc;0.001662*nmvoc;0.000659*nmvoc;0.008038*nmvoc;0;0;0.000175*nmvoc;0.000496*nmvoc;0.025751*nmvoc;0;0;0;0.001726*nmvoc;0;0;0;0.03*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 +E076;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000047*nmvoc;0;0;0.002914*nmvoc;0.004187*nmvoc;0.000849*nmvoc;0.00186*nmvoc;0;0;0;0.002559*nmvoc;0.029992*nmvoc;0;0;0;0.001755*nmvoc;0;0;0;0.35*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 +E077;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.000006*nmvoc;0.005934*nmvoc;0;0.000026*nmvoc;0;0.000001*nmvoc;0;0.000373*nmvoc;0.055357*nmvoc;0;0;0;0.000048*nmvoc;0;0;0;0;0;0;0;1*pm25;pm10-pm25 +E078;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.000087*nmvoc;0.000148*nmvoc;0.003563*nmvoc;0.000004*nmvoc;0;0.000001*nmvoc;0.001904*nmvoc;0.000161*nmvoc;0.036006*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;0;1*pm25;pm10-pm25 +E079;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.000592*nmvoc;0;0;0.002423*nmvoc;0.001607*nmvoc;0;0.000899*nmvoc;0;0;0;0.002589*nmvoc;0.028079*nmvoc;0;0;0;0.003302*nmvoc;0;0;0;0.32*pm25;0.49*pm25;0;0.01*pm25;0.18*pm25;pm10-pm25 +E080;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0.002496*nmvoc;0.004456*nmvoc;0;0;0.001693*nmvoc;0;0;0;0.001111*nmvoc;0.030672*nmvoc;0;0;0.001132*nmvoc;0.000762*nmvoc;0;0;0;0.12*pm25;0.005*pm25;0;0.40*pm25;0.475*pm25;pm10-pm25 +E081;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001084*nmvoc;0.000756*nmvoc;0.000248*nmvoc;0.00622*nmvoc;0.000293*nmvoc;0;0.005437*nmvoc;0.000086*nmvoc;0;0;0.003296*nmvoc;0.011816*nmvoc;0;0;0.000192*nmvoc;0.000124*nmvoc;0;0;0;0.62*pm25;0.16*pm25;0;0.15*pm25;0.07*pm25;pm10-pm25 +E082;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000227*nmvoc;0.000311*nmvoc;0.000333*nmvoc;0.003921*nmvoc;0.000166*nmvoc;0;0.00206*nmvoc;0.00079*nmvoc;0;0;0.00111*nmvoc;0.039123*nmvoc;0;0;0.000245*nmvoc;0.000476*nmvoc;0;0;0;0.31*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 +E083;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.001867*nmvoc;0.008553*nmvoc;0.000529*nmvoc;0.005349*nmvoc;0;0.000169*nmvoc;0.000133*nmvoc;0.000435*nmvoc;0.025797*nmvoc;0;0;0;0.000742*nmvoc;0;0;0;0.31*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 +E084;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001591*nmvoc;0.000103*nmvoc;0.000038*nmvoc;0;0;0.000022*nmvoc;0;0;0;0;0.000023*nmvoc;0.03941*nmvoc;0;0.000007*nmvoc;0.000237*nmvoc;0;0;0;0;0.318*pm25;0.0516*pm25;0;0.0446*pm25;0.5858*pm25;pm10-pm25 +E085;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.002093*nmvoc;0.000525*nmvoc;0.000105*nmvoc;0.002929*nmvoc;0.001894*nmvoc;0.000023*nmvoc;0.002826*nmvoc;0.000116*nmvoc;0.000043*nmvoc;0.003664*nmvoc;0.003078*nmvoc;0.010261*nmvoc;0;0.000006*nmvoc;0.00013*nmvoc;0.000006*nmvoc;0;0;0;0.48*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 +E086;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E087;nox_no2;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;0;0 +E088;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E089;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E090;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E091;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E092;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E093;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E094;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;0;0;0 +E095;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001084*nmvoc;0.000756*nmvoc;0.000248*nmvoc;0.00622*nmvoc;0.000293*nmvoc;0;0.005437*nmvoc;0.000086*nmvoc;0;0;0.003296*nmvoc;0.011816*nmvoc;0;0;0.000192*nmvoc;0.000124*nmvoc;0;0;0;oc;bc;0;0;0;0 diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv new file mode 100644 index 0000000..dcee133 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv @@ -0,0 +1,98 @@ +ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;OLE;PAR;SESQ;TERP;TOL;XYL;DMS;HCL;POA;PEC;PNO3;PSO4;PMFINE;PMC +units;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2 +short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;terpene;toluene;xylene;dimethyl_sulfide;hydrogen_chloride;primary_organic_carbon;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;primary_others_fine;pm_coarse +E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;terpenes;ch2o+c7h8;c8h10;c2h6s;0;oc;bc;0;0;3.3*pm25-3*oc-5.9*bc;0 +E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.8*oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E005;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;1.44*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.02*1.3*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 +E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.35*1.8*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 +E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.03*1.3*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 +E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;1*pm25;pm10-pm25 +E014;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0 +E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0.288*1.3*pm25;0.675*pm25;0;0.01*pm25;0.037*pm25;pm10-pm25 +E017;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 +E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*1.3*pm25;0.06*pm25;0;0;0.74*pm25;pm10-pm25 +E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 +E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 +E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*1.8*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 +E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E024;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E025;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000008*nmvoc;0;0;0.000858*nmvoc;0.004177*nmvoc;0;0.018548*nmvoc;0;0;0;0.00104*nmvoc;0.011594*nmvoc;0;0;0;0.000893*nmvoc;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 +E026;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000013*nmvoc;0;0;0.003224*nmvoc;0.001662*nmvoc;0.000659*nmvoc;0.008038*nmvoc;0;0;0.000175*nmvoc;0.000496*nmvoc;0.025751*nmvoc;0;0;0;0.001726*nmvoc;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 +E027;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000592*nmvoc;0;0;0.002423*nmvoc;0.001607*nmvoc;0;0.000899*nmvoc;0;0;0;0.002589*nmvoc;0.028079*nmvoc;0;0;0;0.003302*nmvoc;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 +E028;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000047*nmvoc;0;0;0.002914*nmvoc;0.004187*nmvoc;0.000849*nmvoc;0.00186*nmvoc;0;0;0;0.002559*nmvoc;0.029992*nmvoc;0;0;0;0.001755*nmvoc;0;0;1.8*oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 +E029;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.001867*nmvoc;0.008553*nmvoc;0.000529*nmvoc;0.005349*nmvoc;0;0.000169*nmvoc;0.000133*nmvoc;0.000435*nmvoc;0.025797*nmvoc;0;0;0;0.000742*nmvoc;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E030;0;0;0;0;0;0;0;0;0;0.000087*nmvoc;0.000148*nmvoc;0.003563*nmvoc;0.000004*nmvoc;0;0.000001*nmvoc;0.001904*nmvoc;0.000161*nmvoc;0.036006*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;0;0 +E031;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.002093*nmvoc;0.000525*nmvoc;0.000105*nmvoc;0.002929*nmvoc;0.001894*nmvoc;0.000023*nmvoc;0.002826*nmvoc;0.000116*nmvoc;0.000043*nmvoc;0.003664*nmvoc;0.003078*nmvoc;0.010261*nmvoc;0;0.000006*nmvoc;0.00013*nmvoc;0.000006*nmvoc;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 +E032;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0.002496*nmvoc;0.004456*nmvoc;0;0;0.001693*nmvoc;0;0;0;0.001111*nmvoc;0.030672*nmvoc;0;0;0.001132*nmvoc;0.000762*nmvoc;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 +E033;nox_no2;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E034;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_bio;pm10-pm25_bio +E035;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E036;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_fossil+pm25_bio-oc-bc)*0.155;(pm25_fossil+pm25_bio-oc-bc)*0.845;pm10-pm25_fossil-pm25_bio +E037;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E038;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil;pm10-pm25_fossil +E039;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_fossil+pm25_bio-oc-bc)*0.089;(pm25_fossil+pm25_bio-oc-bc)*0.911;pm10-pm25_fossil-pm25_bio +E040;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E041;0.9*nox_no2;0.1*nox_no2;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E042;0.9*nox_no2;0.1*nox_no2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10 +E043;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E044;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;bc;0;0;pm25_fossil-bc;pm10-pm25_fossil +E045;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E046;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E047;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.8*oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.047;(pm25_bio+pm25_fossil-oc-bc)*0.953;pm10-pm25_fossil-pm25_bio +E048;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.155;(pm25_bio+pm25_fossil-oc-bc)*0.845;pm10-pm25_fossil-pm25_bio +E049;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil+pm25_bio;pm10-pm25_fossil-pm25_bio +E050;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25_fossil;pm10-pm25_fossil +E051;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E052;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E053;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;pm25_fossil-oc-bc;pm10-pm25_fossil +E054;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.059;(pm25_bio+pm25_fossil-oc-bc)*0.941;pm10-pm25_bio-pm25_fossil +E055;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.537;(pm25_bio+pm25_fossil-oc-bc)*0.463;pm10-pm25_bio-pm25_fossil +E056;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;(pm25_bio+pm25_fossil-oc-bc)*0.059;(pm25_bio+pm25_fossil-oc-bc)*0.941;pm10-pm25_bio-pm25_fossil +E057;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E058;0;0;0;0;0;0;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E059;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E060;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;voc13;voc07;voc02;0;0;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc13+2.2*voc17+4.11*voc18+4*voc19;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0 +E061;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E062;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E063;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;voc10;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E064;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E065;0;0;0;0;0;0;0;0;voc13;voc07;voc02;0.5*voc01;0;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E066;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E067;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E068;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E069;0;0;0;0;0;0;0.625*voc22;0.666*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E070;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E071;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E072;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E073;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;voc11;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0;0;0;0;0 +E074;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000008*nmvoc;0;0;0.000858*nmvoc;0.004177*nmvoc;0;0.018548*nmvoc;0;0;0;0.00104*nmvoc;0.011594*nmvoc;0;0;0;0.000893*nmvoc;0;0;0.02*1.3*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 +E075;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000013*nmvoc;0;0;0.003224*nmvoc;0.001662*nmvoc;0.000659*nmvoc;0.008038*nmvoc;0;0;0.000175*nmvoc;0.000496*nmvoc;0.025751*nmvoc;0;0;0;0.001726*nmvoc;0;0;0.03*1.3*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 +E076;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000047*nmvoc;0;0;0.002914*nmvoc;0.004187*nmvoc;0.000849*nmvoc;0.00186*nmvoc;0;0;0;0.002559*nmvoc;0.029992*nmvoc;0;0;0;0.001755*nmvoc;0;0;0.35*1.8*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 +E077;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.000006*nmvoc;0.005934*nmvoc;0;0.000026*nmvoc;0;0.000001*nmvoc;0;0.000373*nmvoc;0.055357*nmvoc;0;0;0;0.000048*nmvoc;0;0;0;0;0;0;1*pm25;pm10-pm25 +E078;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.000087*nmvoc;0.000148*nmvoc;0.003563*nmvoc;0.000004*nmvoc;0;0.000001*nmvoc;0.001904*nmvoc;0.000161*nmvoc;0.036006*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;1*pm25;pm10-pm25 +E079;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.000592*nmvoc;0;0;0.002423*nmvoc;0.001607*nmvoc;0;0.000899*nmvoc;0;0;0;0.002589*nmvoc;0.028079*nmvoc;0;0;0;0.003302*nmvoc;0;0;0.32*1.3*pm25;0.49*pm25;0;0.01*pm25;0.18*pm25;pm10-pm25 +E080;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0.002496*nmvoc;0.004456*nmvoc;0;0;0.001693*nmvoc;0;0;0;0.001111*nmvoc;0.030672*nmvoc;0;0;0.001132*nmvoc;0.000762*nmvoc;0;0;0.12*1.3*pm25;0.005*pm25;0;0.40*pm25;0.475*pm25;pm10-pm25 +E081;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001084*nmvoc;0.000756*nmvoc;0.000248*nmvoc;0.00622*nmvoc;0.000293*nmvoc;0;0.005437*nmvoc;0.000086*nmvoc;0;0;0.003296*nmvoc;0.011816*nmvoc;0;0;0.000192*nmvoc;0.000124*nmvoc;0;0;0.62*1.3*pm25;0.16*pm25;0;0.15*pm25;0.07*pm25;pm10-pm25 +E082;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.000227*nmvoc;0.000311*nmvoc;0.000333*nmvoc;0.003921*nmvoc;0.000166*nmvoc;0;0.00206*nmvoc;0.00079*nmvoc;0;0;0.00111*nmvoc;0.039123*nmvoc;0;0;0.000245*nmvoc;0.000476*nmvoc;0;0;0.31*1.3*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 +E083;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0.001867*nmvoc;0.008553*nmvoc;0.000529*nmvoc;0.005349*nmvoc;0;0.000169*nmvoc;0.000133*nmvoc;0.000435*nmvoc;0.025797*nmvoc;0;0;0;0.000742*nmvoc;0;0;0.31*1.3*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 +E084;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001591*nmvoc;0.000103*nmvoc;0.000038*nmvoc;0;0;0.000022*nmvoc;0;0;0;0;0.000023*nmvoc;0.03941*nmvoc;0;0.000007*nmvoc;0.000237*nmvoc;0;0;0;0.318*1.3*pm25;0.0516*pm25;0;0.0446*pm25;0.5858*pm25;pm10-pm25 +E085;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.002093*nmvoc;0.000525*nmvoc;0.000105*nmvoc;0.002929*nmvoc;0.001894*nmvoc;0.000023*nmvoc;0.002826*nmvoc;0.000116*nmvoc;0.000043*nmvoc;0.003664*nmvoc;0.003078*nmvoc;0.010261*nmvoc;0;0.000006*nmvoc;0.00013*nmvoc;0.000006*nmvoc;0;0;0.48*1.8*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 +E086;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E087;nox_no2;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;1.3*oc;bc;0;0;0;0 +E088;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E089;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E090;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E091;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.8*oc;bc;0;0;0;0 +E092;0;0;0;0;0;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E093;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E094;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;voc10;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;0;0;0 +E095;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.001084*nmvoc;0.000756*nmvoc;0.000248*nmvoc;0.00622*nmvoc;0.000293*nmvoc;0;0.005437*nmvoc;0.000086*nmvoc;0;0;0.003296*nmvoc;0.011816*nmvoc;0;0;0.000192*nmvoc;0.000124*nmvoc;0;0;1.3*oc;bc;0;0;0;0 diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv deleted file mode 100644 index 534beb6..0000000 --- a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv +++ /dev/null @@ -1,13 +0,0 @@ -ID;SO2;DMS;POA;PEC;PSO4;PMFINE;PMC -units;mol.s-1.m-2;mol.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2 -short_description;sulfur_dioxide;dimethyl_sulfide;primary_organic_aerosol;primary_elemental_carbon;primary_sulfate_fine;primary_others_fine;pm_coarse -E001;so2;c2h6s;1.8*3*oc;5.9*bc;0;pm25-oc-bc;0 -E002;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 -E003;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 -E004;so2;0;1.8*oc;bc;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 -E005;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 -E006;0;0;0;0;0;0;0 -E007;so2;0;1.3*oc;bc;0;pm25-oc-bc;pm10-pm25 -E008;so2;0;1.3*oc;bc;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 -E009;so2;0;1.44*oc;bc;0;pm25-oc-bc;pm10-pm25 -E010;0;0;0;0;0;pm25;pm10-pm25 \ No newline at end of file diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv b/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv deleted file mode 100644 index dc1bd1c..0000000 --- a/data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv +++ /dev/null @@ -1,25 +0,0 @@ -ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;OLE;PAR;SESQ;TERP;TOL;XYL;DMS;HCL;POA;PEC;PNO3;PSO4;PMFINE;PMC -units;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;mol.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2;kg.s-1.m-2 -short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;terpene;toluene;xylene;dimethyl_sulfide;hydrogen_chloride;primary_organic_carbon;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;primary_others_fine;pm_coarse -E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;terpenes;ch2o+c7h8;c8h10;c2h6s;0;3*oc;5.9*bc;0;0;3.3*pm25-3*oc-5.9*bc;0 -E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.155;(pm25-oc-bc)*0.845;pm10-pm25 -E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.911;pm10-pm25 -E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.8*oc;bc;0;(pm25-oc-bc)*0.047;(pm25-oc-bc)*0.953;pm10-pm25 -E005;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.059;(pm25-oc-bc)*0.941;pm10-pm25 -E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 -E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;1.3*oc;bc;0;0;pm25-oc-bc;pm10-pm25 -E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;1.3*oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.463;pm10-pm25 -E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;1.44*oc;bc;0;0;pm25-oc-bc;pm10-pm25 -E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.02*1.3*pm25;0.01*pm25;0;0.15*pm25;0.82*pm25;pm10-pm25 -E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.35*1.8*pm25;0.18*pm25;0;0.02*pm25;0.45*pm25;pm10-pm25 -E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.03*1.3*pm25;0.01*pm25;0;0.1*pm25;0.86*pm25;pm10-pm25 -E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;1*pm25;pm10-pm25 -E014;0;0;0;0;0;0;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0 -E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 -E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0.288*1.3*pm25;0.675*pm25;0;0.01*pm25;0.037*pm25;pm10-pm25 -E017;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0.58*1.3*pm25;0.21*pm25;0;0.01*pm25;0.21*pm25;pm10-pm25 -E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0 -E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*1.3*pm25;0.06*pm25;0;0;0.74*pm25;pm10-pm25 -E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.41*pm25;0;0.03*pm25;0.25*pm25;pm10-pm25 -E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0.31*1.3*pm25;0.2*pm25;0;0;0.49*pm25;pm10-pm25 -E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*1.8*pm25;0.15*pm25;0;0;0.37*pm25;pm10-pm25 diff --git a/data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv b/data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv new file mode 100644 index 0000000..1546735 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv @@ -0,0 +1,28 @@ +ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;OLE;PAR;SESQ;TERP;TOL;XYL;DMS;HCL;SULF;POC;PEC;PNO3;PSO4;PH2O;PCL;PNCOM;PCA;PSI;PMG;PMN;PNA;PNH4;PAL;PFE;PTI;PK;PMOTHR;PMC +units;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1 +short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;terpene;toluene;xylene;dimethyl_sulfide;hydrogen_chloride;sulfuric_acid;primary_organic_aerosol;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;particle_bound_water;particulate_chloride;non-carbon_organic_matter_OM_OC;particulate_calcium;particulate_silica;particulate_magnesium;particulate_manganese;particulate_sodium;particulate_ammonium;particulate_aluminum;particulate_iron;particulate_titanium;particulate_potassium;primary_others_fine;pm_coarse +E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;terpenes;ch2o+c7h8;c8h10;c2h6s;0;0;oc;bc;0;0;0;(pm25-oc-bc)*0.09669;(pm25-oc-bc)*0.75259;(pm25-oc-bc)*0.00899;(pm25-oc-bc)*0.00424;(pm25-oc-bc)*0.00073;(pm25-oc-bc)*0.00003;(pm25-oc-bc)*0.01335;(pm25-oc-bc)*0.02048;(pm25-oc-bc)*0.00141;(pm25-oc-bc)*0.00101;(pm25-oc-bc)*0.00011;(pm25-oc-bc)*0.0685;(pm25-oc-bc)*0.03181;0 +E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.155;0;(pm25-oc-bc)*0.00391;(pm25-oc-bc)*0.01187;(pm25-oc-bc)*0.03542;(pm25-oc-bc)*0.08697;(pm25-oc-bc)*0.00234;(pm25-oc-bc)*0.00022;(pm25-oc-bc)*0.00066;(pm25-oc-bc)*0.01621;(pm25-oc-bc)*0.05185;(pm25-oc-bc)*0.02467;(pm25-oc-bc)*0.00321;(pm25-oc-bc)*0.00452;(pm25-oc-bc)*0.60309;pm10-pm25 +E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.02646;0;(pm25-oc-bc)*0.03282;0;0;0;0;0;0;0;0;0;0;(pm25-oc-bc)*0.85171;pm10-pm25 +E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.047;0;(pm25-oc-bc)*0.00538;(pm25-oc-bc)*0.76284;(pm25-oc-bc)*0.01625;(pm25-oc-bc)*0.01;(pm25-oc-bc)*0.00418;(pm25-oc-bc)*0.00007;(pm25-oc-bc)*0.00158;(pm25-oc-bc)*0.02167;(pm25-oc-bc)*0.00946;(pm25-oc-bc)*0.01206;(pm25-oc-bc)*0.00055;(pm25-oc-bc)*0.02084;(pm25-oc-bc)*0.08805;pm10-pm25 +E005;0.823*nox_no2;0.16*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.059;0;(pm25-oc-bc)*0.00257;(pm25-oc-bc)*0.64241;(pm25-oc-bc)*0.00736;(pm25-oc-bc)*0.01712;(pm25-oc-bc)*0.00125;(pm25-oc-bc)*0.00007;(pm25-oc-bc)*0.00294;(pm25-oc-bc)*0.04996;(pm25-oc-bc)*0.00301;(pm25-oc-bc)*0.01009;(pm25-oc-bc)*0.0001;(pm25-oc-bc)*0.00048;(pm25-oc-bc)*0.20357;pm10-pm25 +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;oc;bc;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25-bc-oc;pm10-pm25 +E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.37066;0;(pm25-oc-bc)*0.04508;(pm25-oc-bc)*0.00259;0;(pm25-oc-bc)*0.00318;0;0;0;(pm25-oc-bc)*0.00773;(pm25-oc-bc)*0.00527;(pm25-oc-bc)*0.00005;0;(pm25-oc-bc)*0.02842;pm10-pm25 +E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;c3h6;c2h2+c3h6+ch3cooh;0;0;ch2o;0;0;hcl;0;oc;bc;0;0;0;(pm25-oc-bc)*0.1686;(pm25-oc-bc)*0.04105;(pm25-oc-bc)*0.02675;(pm25-oc-bc)*0.08393;0;(pm25-oc-bc)*0.00073;(pm25-oc-bc)*0.00988;(pm25-oc-bc)*0.10042;(pm25-oc-bc)*0.02431;(pm25-oc-bc)*0.02907;(pm25-oc-bc)*0.00293;(pm25-oc-bc)*0.02638;(pm25-oc-bc)*0.4859;pm10-pm25 +E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.02*pm25;0.01*pm25;0;0.15*pm25;0;pm25*0.00379;pm25*0.01152;pm25*0.03437;pm25*0.0844;pm25*0.00227;pm25*0.00021;pm25*0.00064;pm25*0.01573;pm25*0.05032;pm25*0.02394;pm25*0.00311;pm25*0.00438;pm25*0.58524;pm10-pm25 +E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.35*pm25;0.18*pm25;0;0.02*pm25;0;pm25*0.00254;pm25*0.3602;pm25*0.00767;pm25*0.00472;pm25*0.00197;pm25*0.00003;pm25*0.00074;pm25*0.01023;pm25*0.00446;pm25*0.00569;pm25*0.00026;pm25*0.00984;pm25*0.04158;pm10-pm25 +E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.03*pm25;0.01*pm25;0;0.1*pm25;pm25*0.02498;0;pm25*0.03098;0;0;0;0;0;0;0;0;0;0;pm25*0.80402;pm10-pm25 +E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E014;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0;pm25*0.00063;pm25*0.10898;pm25*0.00229;pm25*0.00377;pm25*0.00042;pm25*0.00003;pm25*0.00085;pm25*0.01328;pm25*0.00116;pm25*0.00321;pm25*0.00004;pm25*0.00014;pm25*0.06514;pm10-pm25 +E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0;voc14+0.2*voc17;voc15+voc17;0;0;0;0.288*pm25;0.675*pm25;0;0.01*pm25;0;pm25*0.00006;pm25*0.02119;pm25*0.00013;pm25*0.00047;pm25*0.00002;0;pm25*0.00006;pm25*0.00116;pm25*0.00003;pm25*0.00018;0;0;pm25*0.00364;pm10-pm25 +E017;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0;0.2*voc17;voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0;pm25*0.00063;pm25*0.10898;pm25*0.00229;pm25*0.00377;pm25*0.00042;pm25*0.00003;pm25*0.00085;pm25*0.01328;pm25*0.00116;pm25*0.00321;pm25*0.00004;pm25*0.00014;pm25*0.06514;pm10-pm25 +E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*pm25;0.06*pm25;0;0;pm25*0.01036;pm25*0.00617;pm25*0.15491;pm25*0.0074;pm25*0.05955;pm25*0.07439;pm25*0.00074;pm25*0.00049;pm25*0.0001;pm25*0.00118;pm25*0.07992;pm25*0.00261;pm25*0.00035;pm25*0.34144;pm10-pm25 +E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.41*pm25;0;0.03*pm25;0;0;pm25*0.07404;pm25*0.00018;pm25*0.00074;0;0;0;0;0;pm25*0.00006;0;0;pm25*0.17496;pm10-pm25 +E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0;voc14+0.2*voc17;voc15+voc16+voc17;0;0;0;0.31*pm25;0.2*pm25;0;0;0;pm25*0.08261;pm25*0.02011;pm25*0.01311;pm25*0.04112;0;pm25*0.00035;pm25*0.00484;pm25*0.0492;pm25*0.01191;pm25*0.01424;pm25*0.00143;pm25*0.01293;pm25*0.23809;pm10-pm25 +E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*pm25;0.15*pm25;0;0;pm25*0.00427;pm25*0.04911;pm25*0.17393;pm25*0.00506;pm25*0.00919;pm25*0.00042;pm25*0.00009;pm25*0.00463;pm25*0.01719;pm25*0.0031;pm25*0.00277;pm25*0.00011;pm25*0.04219;pm25*0.05788;pm10-pm25 +E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E024;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25-oc-bc;pm10-pm25 +E086;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 diff --git a/data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv b/data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv new file mode 100644 index 0000000..eb1b229 --- /dev/null +++ b/data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv @@ -0,0 +1,28 @@ +ID;NO;NO2;HONO;CO;SO2;NH3;ALD2;ALDX;BENZENE;ETH;ETHA;ETOH;FORM;IOLE;ISOP;MEOH;NAPH;OLE;PAR;SESQ;SOAALK;TERP;TOL;XYLMN;DMS;HCL;SULF;POC;PEC;PNO3;PSO4;PH2O;PCL;PNCOM;PCA;PSI;PMG;PMN;PNA;PNH4;PAL;PFE;PTI;PK;PMOTHR;PMC +units;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;mol.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1;g.s-1 +short_description;nitrogen_monoxide;nitrogen_dioxide;nitrous_acid;carbon_monoxide;sulfur_dioxide;ammonia;acetaldehyde;higher_aldehydes;benzene;ethene;ethane;ethanol;formaldehyde;internal_olefin_carbon_bond;isoprene;methanol;naphthalene;terminal_olefin_carbon_bond;paraffin_carbon_bond;sesquiterpenes;alkanes that produce aerosol material;terpene;toluene;xylene without naphtalene;dimethyl_sulfide;hydrogen_chloride;sulfuric_acid;primary_organic_aerosol;primary_elemental_carbon;primary_nitrate_fine;primary_sulfate_fine;particle_bound_water;particulate_chloride;non-carbon_organic_matter_OM_OC;particulate_calcium;particulate_silica;particulate_magnesium;particulate_manganese;particulate_sodium;particulate_ammonium;particulate_aluminum;particulate_iron;particulate_titanium;particulate_potassium;primary_others_fine;pm_coarse +E001;0.72*nox_no;0.18*nox_no;0.1*nox_no;co;so2;nh3;c2h4o;0;c6h6;c2h4;c2h6;c2h5oh;0;0.5*hialkenes;c5h8;ch3oh;0.002*c8h10;c8h16+c5h10+c3h6+c4h8+c6h12+0.5*hialkanes;4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8;0;0.108*(4*c4h10+6*c6h14+5*hialkanes+6*c8h16+3*c5h10+c3h6+3*c3h6o+2*c4h8+7*c7h16+4*c6h12+hialkenes+5*c5h12+1.5*c3h8);terpenes;ch2o+c7h8;0.998*c8h10;c2h6s;0;0;oc;bc;0;0;0;(pm25-oc-bc)*0.09669;(pm25-oc-bc)*0.75259;(pm25-oc-bc)*0.00899;(pm25-oc-bc)*0.00424;(pm25-oc-bc)*0.00073;(pm25-oc-bc)*0.00003;(pm25-oc-bc)*0.01335;(pm25-oc-bc)*0.02048;(pm25-oc-bc)*0.00141;(pm25-oc-bc)*0.00101;(pm25-oc-bc)*0.00011;(pm25-oc-bc)*0.0685;(pm25-oc-bc)*0.03181;0 +E002;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;oc;bc;0;(pm25-oc-bc)*0.155;0;(pm25-oc-bc)*0.00391;(pm25-oc-bc)*0.01187;(pm25-oc-bc)*0.03542;(pm25-oc-bc)*0.08697;(pm25-oc-bc)*0.00234;(pm25-oc-bc)*0.00022;(pm25-oc-bc)*0.00066;(pm25-oc-bc)*0.01621;(pm25-oc-bc)*0.05185;(pm25-oc-bc)*0.02467;(pm25-oc-bc)*0.00321;(pm25-oc-bc)*0.00452;(pm25-oc-bc)*0.60309;pm10-pm25 +E003;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;oc;bc;0;(pm25-oc-bc)*0.089;(pm25-oc-bc)*0.02646;0;(pm25-oc-bc)*0.03282;0;0;0;0;0;0;0;0;0;0;(pm25-oc-bc)*0.85171;pm10-pm25 +E004;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;oc;bc;0;(pm25-oc-bc)*0.047;0;(pm25-oc-bc)*0.00538;(pm25-oc-bc)*0.76284;(pm25-oc-bc)*0.01625;(pm25-oc-bc)*0.01;(pm25-oc-bc)*0.00418;(pm25-oc-bc)*0.00007;(pm25-oc-bc)*0.00158;(pm25-oc-bc)*0.02167;(pm25-oc-bc)*0.00946;(pm25-oc-bc)*0.01206;(pm25-oc-bc)*0.00055;(pm25-oc-bc)*0.02084;(pm25-oc-bc)*0.08805;pm10-pm25 +E005;0.823*nox_no2;0.16*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;oc;bc;0;(pm25-oc-bc)*0.059;0;(pm25-oc-bc)*0.00257;(pm25-oc-bc)*0.64241;(pm25-oc-bc)*0.00736;(pm25-oc-bc)*0.01712;(pm25-oc-bc)*0.00125;(pm25-oc-bc)*0.00007;(pm25-oc-bc)*0.00294;(pm25-oc-bc)*0.04996;(pm25-oc-bc)*0.00301;(pm25-oc-bc)*0.01009;(pm25-oc-bc)*0.0001;(pm25-oc-bc)*0.00048;(pm25-oc-bc)*0.20357;pm10-pm25 +E006;0;0;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E007;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;0.002*(voc15+voc17);voc08+0.333*voc12;1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0.108*(1.5*voc03+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23);0;voc14+0.2*voc17;0.998*(voc15+voc17);0;0;0;oc;bc;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25-bc-oc;pm10-pm25 +E008;0.9*nox_no2;0.1*nox_no2;0;co;so2;0;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;oc;bc;0;(pm25-oc-bc)*0.537;(pm25-oc-bc)*0.37066;0;(pm25-oc-bc)*0.04508;(pm25-oc-bc)*0.00259;0;(pm25-oc-bc)*0.00318;0;0;0;(pm25-oc-bc)*0.00773;(pm25-oc-bc)*0.00527;(pm25-oc-bc)*0.00005;0;(pm25-oc-bc)*0.02842;pm10-pm25 +E009;0.9*nox_no;0.1*nox_no;0;co;so2;nh3;0;0;c6h6;c2h4;0;0;0;0;0;ch3oh;0;c3h6;c2h2+c3h6+ch3cooh;0;0.108*(c2h2+c3h6+ch3cooh);0;ch2o;0;0;hcl;0;oc;bc;0;0;0;(pm25-oc-bc)*0.1686;(pm25-oc-bc)*0.04105;(pm25-oc-bc)*0.02675;(pm25-oc-bc)*0.08393;0;(pm25-oc-bc)*0.00073;(pm25-oc-bc)*0.00988;(pm25-oc-bc)*0.10042;(pm25-oc-bc)*0.02431;(pm25-oc-bc)*0.02907;(pm25-oc-bc)*0.00293;(pm25-oc-bc)*0.02638;(pm25-oc-bc)*0.4859;pm10-pm25 +E010;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.02*pm25;0.01*pm25;0;0.15*pm25;0;pm25*0.00379;pm25*0.01152;pm25*0.03437;pm25*0.0844;pm25*0.00227;pm25*0.00021;pm25*0.00064;pm25*0.01573;pm25*0.05032;pm25*0.02394;pm25*0.00311;pm25*0.00438;pm25*0.58524;pm10-pm25 +E011;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.35*pm25;0.18*pm25;0;0.02*pm25;0;pm25*0.00254;pm25*0.3602;pm25*0.00767;pm25*0.00472;pm25*0.00197;pm25*0.00003;pm25*0.00074;pm25*0.01023;pm25*0.00446;pm25*0.00569;pm25*0.00026;pm25*0.00984;pm25*0.04158;pm10-pm25 +E012;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.03*pm25;0.01*pm25;0;0.1*pm25;pm25*0.02498;0;pm25*0.03098;0;0;0;0;0;0;0;0;0;0;pm25*0.80402;pm10-pm25 +E013;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;voc13;0;voc02;0;0;0.666*voc12;0;0;0.002*voc15;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13);0;voc14;0.998*voc15;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E014;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0.01*voc18+0.3*voc19;0;0;0;0.5*voc01;0;0;0;0.5*voc01;0.002*(voc15+voc17);0;7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23;0;0.108*(7.5*voc06+2.2*voc17+4.11*voc18+4*voc19+4*voc23);0;voc14+0.2*voc17;0.998*(voc15+voc17);0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E015;0.95*nox_no2;0.042*nox_no2;0.008*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0;pm25*0.00063;pm25*0.10898;pm25*0.00229;pm25*0.00377;pm25*0.00042;pm25*0.00003;pm25*0.00085;pm25*0.01328;pm25*0.00116;pm25*0.00321;pm25*0.00004;pm25*0.00014;pm25*0.06514;pm10-pm25 +E016;0.7*nox_no2;0.283*nox_no2;0.017*nox_no2;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0;voc21;0.666*voc12;0;0;0.002*(voc15+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+2.2*voc17+1.875*voc22+4*voc23);0;voc14+0.2*voc17;0.998*(voc15+voc17);0;0;0;0.288*pm25;0.675*pm25;0;0.01*pm25;0;pm25*0.00006;pm25*0.02119;pm25*0.00013;pm25*0.00047;pm25*0.00002;0;pm25*0.00006;pm25*0.00116;pm25*0.00003;pm25*0.00018;0;0;pm25*0.00364;pm10-pm25 +E017;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;0;voc07;voc02;0;voc21;0.666*voc12;0;0;0.002*voc17;voc08+0.333*voc12;1.5*voc03+voc08+2.2*voc17+1.875*voc22;0;0.108*(1.5*voc03+voc08+2.2*voc17+1.875*voc22);0;0.2*voc17;0.998*voc17;0;0;0;0.58*pm25;0.21*pm25;0;0.01*pm25;0;pm25*0.00063;pm25*0.10898;pm25*0.00229;pm25*0.00377;pm25*0.00042;pm25*0.00003;pm25*0.00085;pm25*0.01328;pm25*0.00116;pm25*0.00321;pm25*0.00004;pm25*0.00014;pm25*0.06514;pm10-pm25 +E018;0;0;0;0;0;0;0;0;voc13;0;0;0;0;0.666*voc12;0;0;0.002*voc15;0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc13);0;voc14;0.998*voc15;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E019;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.2*pm25;0.06*pm25;0;0;pm25*0.01036;pm25*0.00617;pm25*0.15491;pm25*0.0074;pm25*0.05955;pm25*0.07439;pm25*0.00074;pm25*0.00049;pm25*0.0001;pm25*0.00118;pm25*0.07992;pm25*0.00261;pm25*0.00035;pm25*0.34144;pm10-pm25 +E020;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.31*pm25;0.41*pm25;0;0.03*pm25;0;0;pm25*0.07404;pm25*0.00018;pm25*0.00074;0;0;0;0;0;pm25*0.00006;0;0;pm25*0.17496;pm10-pm25 +E021;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0.625*voc22;0.01*voc18+0.3*voc19+0.375*voc22;voc13;voc07;voc02;0.5*voc01;voc21;0.666*voc12;0;0.5*voc01;0.002*(voc15+voc16+voc17);voc08+0.333*voc12;1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24;0;0.108*(1.5*voc03+4*voc04+5*voc05+7.5*voc06+voc08+voc09+voc13+voc16+2.2*voc17+4.11*voc18+4*voc19+1.875*voc22+4*voc23+voc24);0;voc14+0.2*voc17;0.998*(voc15+voc16+voc17);0;0;0;0.31*pm25;0.2*pm25;0;0;0;pm25*0.08261;pm25*0.02011;pm25*0.01311;pm25*0.04112;0;pm25*0.00035;pm25*0.00484;pm25*0.0492;pm25*0.01191;pm25*0.01424;pm25*0.00143;pm25*0.01293;pm25*0.23809;pm10-pm25 +E022;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.48*pm25;0.15*pm25;0;0;pm25*0.00427;pm25*0.04911;pm25*0.17393;pm25*0.00506;pm25*0.00919;pm25*0.00042;pm25*0.00009;pm25*0.00463;pm25*0.01719;pm25*0.0031;pm25*0.00277;pm25*0.00011;pm25*0.04219;pm25*0.05788;pm10-pm25 +E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25;pm10-pm25 +E024;0.9*nox_no2;0.1*nox_no2;0;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;oc;bc;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm25-oc-bc;pm10-pm25 +E086;0;0;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 diff --git a/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv b/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv index a85e443..56926c3 100644 --- a/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv +++ b/data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv @@ -1,5 +1,5 @@ ID;E_NO;E_CO;E_SO2;E_NH3;E_ALD;E_CSL;E_ETH;E_HC3;E_HC5;E_HC8;E_HCHO;E_ISO;E_KET;E_OL2;E_OLI;E_OLT;E_ORA1;E_ORA2;E_TOL;E_XYL;E_PM_10;E_PM25J;E_PM25I;E_ECJ;E_ECI;E_ORGJ;E_ORGI;E_NO3J;E_NO3I;E_SO4J;E_SO4I -units;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2 +units;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;mol.h-1.km-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2;ug.s-1.m-2 short_description;nitrogen_oxides;carbon_monoxide;sulfur_dioxide;ammonia;higher_aldehydes;phenols_cresols;ethane;propane;alkanes_0.5_1;alkanes_1_2;formaldehyde;isoprene;ketones;ethene;alkenes_internal;propene;formic_acid;organic_acids;toluene;xylene_and_higher_aromatics;unspeciated_primary_PM10;unspeciated_primary_PM2.5_accumulation_mode;unspeciated_primary_PM2.5_nuclei_mode;elemental_carbon_PM2.5_accumulation_mode;elemental_carbon_PM2.5_nuclei_mode;organic_carbon_PM2.5_accumulation_mode;organic_carbon_PM2.5_nuclei_mode;nitrate_PM2.5_accumulation_mode;nitrate_PM2.5_nuclei_mode;sulfate_PM2.5_accumulation_mode;sulfate_PM2.5_nuclei_mode E001;nox_no;co;so2;nh3;c2h4o;0;c2h6;c4h10+1.198*c2h5oh+0.402*ch3oh+0.519*c3h8;1.075*c2h6s+0.956*c6h14+0.43*hialkanes+0.956*c7h16+0.956*c5h12;0.57*hialkanes;ch2o;c5h8;0.253*c3h6o;c2h4;0.5*c5h10+hialkenes+terpenes;c8h16+0.5*c5h10+c3h6+c4h8+c6h12;0;0;c7h8+0.293*c6h6;c8h10;0;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 E002;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;pm10;(pm25-oc-bc)*0.676;(pm25-oc-bc)*0.169;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.124;(pm25-oc-bc)*0.031 @@ -14,7 +14,7 @@ E010;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc0 E011;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.36;pm25*0.09;pm25*0.144;pm25*0.036;pm25*0.28;pm25*0.07;0;0;pm25*0.016;pm25*0.004 E012;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.688;pm25*0.172;pm25*0.008;pm25*0.002;pm25*0.024;pm25*0.006;0;0;pm25*0.08;pm25*0.02 E013;nox_no2;co;so2;nh3;0;0;voc02;voc03+voc04;voc05+0.43*voc06;0.57*voc06;0;0;0;0;voc12;0;0;0;0.293*voc13+voc14;voc15;pm10;pm25*0.8;pm25*0.2;0;0;0;0;0;0;0;0 -E014;0;0;0;0;0;0;0;0.95*voc01+0.69*voc18+voc20;0.05*voc01+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;0;0;voc23;0;0;0;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0;0;0 +E014;nox_no2;co;so2;nh3;0;0;0;0.95*voc01+0.69*voc18+voc20;0.05*voc01+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;0;0;voc23;0;0;0;0;0;voc14;voc15;0;0;0;0;0;0;0;0;0;0;0 E015;nox_no2;co;so2;nh3;voc22;0;voc02;voc03+voc04+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.168;pm25*0.042;pm25*0.168;pm25*0.042;pm25*0.464;pm25*0.116;0;0;pm25*0.008;pm25*0.002 E016;nox_no2;co;so2;nh3;voc22;0;voc02;voc03+voc04+0.4*voc09;voc05+0.43*voc06;0.57*voc06+voc17;voc21;0;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc15;pm10;pm25*0.0296;pm25*0.0074;pm25*0.54;pm25*0.135;pm25*0.2304;pm25*0.0576;0;0;pm25*0.008;pm25*0.002 E017;nox_no2;co;so2;nh3;voc22;0;voc02;voc03;0;voc17;voc21;0;0;voc07;voc12;voc08;0;0;0;0;pm10;pm25*0.168;pm25*0.042;pm25*0.168;pm25*0.042;pm25*0.464;pm25*0.116;0;0;pm25*0.008;pm25*0.002 @@ -24,5 +24,75 @@ E020;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc0 E021;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;0;voc23;voc07;voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc15+voc16;pm10;pm25*0.392;pm25*0.098;pm25*0.16;pm25*0.04;pm25*0.248;pm25*0.062;0;0;0;0 E022;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;pm25*0.296;pm25*0.074;pm25*0.12;pm25*0.03;pm25*0.384;pm25*0.096;0;0;0;0 E023;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;pm25*0.8;pm25*0.2;0;0;0;0;0;0;0;0 -E024;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 - +E024;nox_no2;co;so2;nh3;0;0;nmvoc*0.00998;nmvoc*0.00353;0;0;nmvoc*0.00666;0;0;0;0;0;0;0;0;0;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E025;nox_no2;co;so2;nh3;0.000008*nmvoc;0;0.004177*nmvoc;0.002461*nmvoc;0;0;0.018548*nmvoc;0;0.000086*nmvoc;0.000858*nmvoc;0;0.00104*nmvoc;0;0;0;0.000893*nmvoc;pm10;(pm25-oc-bc)*0.676;(pm25-oc-bc)*0.169;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.124;(pm25-oc-bc)*0.031 +E026;nox_no2;co;so2;nh3;0.000013*nmvoc;0;0.001662*nmvoc;0.006766*nmvoc;0;0;0.008038*nmvoc;0;0.000187*nmvoc;0.003224*nmvoc;0;0.000496*nmvoc;0;0;0;0.001726*nmvoc;pm10;(pm25-oc-bc)*0.7288;(pm25-oc-bc)*0.1822;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0712;(pm25-oc-bc)*0.0178 +E027;nox_no2;co;so2;nh3;0.000592*nmvoc;0;0.001607*nmvoc;0.006087*nmvoc;0;0;0.000899*nmvoc;0;0.000058*nmvoc;0.002423*nmvoc;0;0.002589*nmvoc;0;0;0;0.003302*nmvoc;pm10;(pm25-oc-bc)*0.7528;(pm25-oc-bc)*0.1882;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0472;(pm25-oc-bc)*0.0118 +E028;nox_no2;co;so2;nh3;0.000047*nmvoc;0;0.004187*nmvoc;0.007617*nmvoc;0;0;0.00186*nmvoc;0;0.000012*nmvoc;0.002914*nmvoc;0;0.002559*nmvoc;0;0;0;0.001755*nmvoc;pm10;(pm25-oc-bc)*0.7624;(pm25-oc-bc)*0.1906;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.0376;(pm25-oc-bc)*0.0094 +E029;nox_no2;co;so2;nh3;0;0;nmvoc*0.008553;nmvoc*0.006788;0;0;nmvoc*0.005349;nmvoc*0.000169;nmvoc*0.000013;nmvoc*0.001867;0;nmvoc*0.000435;0;0;0;nmvoc*0.000742;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E030;0;0;0;0;0;0;0.000148*nmvoc;0.012341*nmvoc;0;0;0.000004*nmvoc;0.000001*nmvoc;0.001382*nmvoc;0.000087*nmvoc;0;0.000161*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;0;0;0;0;0 +E031;nox_no2;co;so2;nh3;0.000972*nmvoc;0.000614*nmvoc;0.001894*nmvoc;0.002246*nmvoc;0.000215*nmvoc;0.000253*nmvoc;0.002737*nmvoc;0.000043*nmvoc;0.000524*nmvoc;0.002929*nmvoc;0.000309*nmvoc;0.001607*nmvoc;0.000281*nmvoc;0.000782*nmvoc;0.000083*nmvoc;0.000468*nmvoc;pm10;(pm25-oc-bc)*0.8;(pm25-oc-bc)*0.2;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E032;nox_no2;co;so2;0;0;0;0;0;0;0.002161*nmvoc;0;0;0;0.004456*nmvoc;0.001693*nmvoc;0.001111*nmvoc;0;0;0.001864*nmvoc;0.000762*nmvoc;pm10;(pm25-oc-bc)*0.3704;(pm25-oc-bc)*0.0926;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;(pm25-oc-bc)*0.4296;(pm25-oc-bc)*0.1074 +E033;nox_no2;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E034;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E035;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E036;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E037;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E038;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E039;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E040;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E041;nox_no2;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E042;nox_no2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E043;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E044;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E045;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E046;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E047;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E048;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E049;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E050;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E051;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E052;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E053;nox_no2;co;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E054;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E055;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E056;nox_no2;co;so2;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;pm10;;;;;;;;;; +E057;0;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;;;;;;;;;; +E058;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E059;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E060;0;0;0;0;0;0;voc02;voc03+voc04+0.69*voc18+voc20;voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;0;voc10;0;0;voc12;voc08;0;0;0.293*voc13+voc14;voc17;0;0;0;0;0;0;0;0;0;0;0 +E061;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E062;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E063;0;0;0;0;voc22;0;voc02;voc03+voc04+0.4*voc09+0.69*voc18+voc20;voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E064;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E065;0;0;0;0;0;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;0;voc10;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E066;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E067;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;voc10;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E068;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;voc10;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E069;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+0.4*voc09;0.05*voc01+voc05+0.43*voc06;0.57*voc06+voc17;voc21;voc10;voc23;voc07;voc12;voc08;0;0;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E070;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E071;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E072;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E073;0;0;0;0;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;0;0;0;0;0;0;0;0 +E074;nox_no2;co;so2;nh3;0.000008*nmvoc;0;0.004177*nmvoc;0.002461*nmvoc;0;0;0.018548*nmvoc;0;0.000086*nmvoc;0.000858*nmvoc;0;0.00104*nmvoc;0;0;0;0.000893*nmvoc;pm10;pm25*0.656;pm25*0.164;pm25*0.008;pm25*0.002;pm25*0.016;pm25*0.004;0;0;pm25*0.12;pm25*0.03 +E075;nox_no2;co;so2;nh3;0.000013*nmvoc;0;0.001662*nmvoc;0.006766*nmvoc;0;0;0.008038*nmvoc;0;0.000187*nmvoc;0.003224*nmvoc;0;0.000496*nmvoc;0;0;0;0.001726*nmvoc;pm10;pm25*0.688;pm25*0.172;pm25*0.008;pm25*0.002;pm25*0.024;pm25*0.006;0;0;pm25*0.08;pm25*0.02 +E076;nox_no2;co;so2;nh3;0.000047*nmvoc;0;0.004187*nmvoc;0.007617*nmvoc;0;0;0.00186*nmvoc;0;0.000012*nmvoc;0.002914*nmvoc;0;0.002559*nmvoc;0;0;0;0.001755*nmvoc;pm10;pm25*0.36;pm25*0.09;pm25*0.144;pm25*0.036;pm25*0.28;pm25*0.07;0;0;pm25*0.016;pm25*0.004 +E077;nox_no2;co;so2;nh3;0;0;0.005934*nmvoc;0.013251*nmvoc;0;0;0.000026*nmvoc;0.000001*nmvoc;0;0.000006*nmvoc;0;0.000373*nmvoc;0;0;0;0.000048*nmvoc;pm10;pm25*0.8;pm25*0.2;0;0;0;0;0;0;0;0 +E078;nox_no2;co;so2;nh3;0;0;0.000148*nmvoc;0.012341*nmvoc;0;0;0.000004*nmvoc;0.000001*nmvoc;0.001382*nmvoc;0.000087*nmvoc;0;0.000161*nmvoc;0;0;0;0.001686*nmvoc;0;0;0;0;0;0;0;0;0;0;0 +E079;nox_no2;co;so2;nh3;0.000592*nmvoc;0;0.001607*nmvoc;0.006087*nmvoc;0;0;0.000899*nmvoc;0;0.000058*nmvoc;0.002423*nmvoc;0;0.002589*nmvoc;0;0;0;0.003302*nmvoc;pm10;pm25*0.0296;pm25*0.0074;pm25*0.54;pm25*0.135;pm25*0.2304;pm25*0.0576;0;0;pm25*0.008;pm25*0.002 +E080;nox_no2;co;so2;nh3;0;0;0;0;0;0.002161*nmvoc;0;0;0;0.004456*nmvoc;0.001693*nmvoc;0.001111*nmvoc;0;0;0.001864*nmvoc;0.000762*nmvoc;pm10;pm25*0.2;pm25*0.05;pm25*0.328;pm25*0.082;pm25*0.248;pm25*0.062;0;0;pm25*0.024;pm25*0.006 +E081;nox_no2;co;so2;nh3;0.001407*nmvoc;0.000026*nmvoc;0.000293*nmvoc;0.000571*nmvoc;0.000077*nmvoc;0.000265*nmvoc;0.004999*nmvoc;0;0.000107*nmvoc;0.00622*nmvoc;0.000447*nmvoc;0.002007*nmvoc;0;0;0.000414*nmvoc;0.000124*nmvoc;pm10;pm25*0.2;pm25*0.05;pm25*0.328;pm25*0.082;pm25*0.248;pm25*0.062;0;0;pm25*0.024;pm25*0.006 +E082;nox_no2;co;so2;nh3;0.000538*nmvoc;0;0.000166*nmvoc;0.00153*nmvoc;0.00201*nmvoc;0.001817*nmvoc;0.001965*nmvoc;0;0;0.003921*nmvoc;0.000885*nmvoc;0.00111*nmvoc;0;0;0.000343*nmvoc;0.000476*nmvoc;pm10;pm25*0.2;pm25*0.05;pm25*0.328;pm25*0.082;pm25*0.248;pm25*0.062;0;0;pm25*0.024;pm25*0.006 +E083;nox_no2;co;so2;nh3;0;0;nmvoc*0.008553;nmvoc*0.006788;0;0;nmvoc*0.005349;nmvoc*0.000169;nmvoc*0.000013;nmvoc*0.001867;0;nmvoc*0.000435;0;0;0;nmvoc*0.000742;pm10;pm25*0.392;pm25*0.098;pm25*0.16;pm25*0.04;pm25*0.248;pm25*0.062;0;0;0;0 +E084;nox_no2;co;so2;nh3;0.001624*nmvoc;0.000252*nmvoc;0;0.001519*nmvoc;0.009379*nmvoc;0.000267*nmvoc;0;0;0.000344*nmvoc;0;0.000031*nmvoc;0;0;0.000618*nmvoc;0.000128*nmvoc;0;pm10;pm25*0.296;pm25*0.074;pm25*0.12;pm25*0.03;pm25*0.384;pm25*0.096;0;0;0;0 +E085;nox_no2;co;so2;nh3;0.000972*nmvoc;0.000614*nmvoc;0.001894*nmvoc;0.002246*nmvoc;0.000215*nmvoc;0.000253*nmvoc;0.002737*nmvoc;0.000043*nmvoc;0.000524*nmvoc;0.002929*nmvoc;0.000309*nmvoc;0.001607*nmvoc;0.000281*nmvoc;0.000782*nmvoc;0.000083*nmvoc;0.000468*nmvoc;pm10;pm25*0.296;pm25*0.074;pm25*0.12;pm25*0.03;pm25*0.384;pm25*0.096;0;0;0;0 +E086;0;0;so2;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E087;nox_no2;0;0;nh3;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0 +E088;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E089;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E090;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E091;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E092;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E093;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E094;nox_no2;co;so2;nh3;voc22;0;voc02;0.95*voc01+voc03+voc04+0.4*voc09+0.69*voc18+voc20;0.05*voc01+voc05+0.43*voc06+0.31*voc18;0.57*voc06+voc17+voc19;voc21;voc10;voc23;voc07;voc11+voc12;voc08;0.44*voc24;0.56*voc24;0.293*voc13+voc14;voc16+voc17;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 +E095;nox_no2;co;so2;nh3;0.001407*nmvoc;0.000026*nmvoc;0.000293*nmvoc;0.000571*nmvoc;0.000077*nmvoc;0.000265*nmvoc;0.004999*nmvoc;0;0.000107*nmvoc;0.00622*nmvoc;0.000447*nmvoc;0.002007*nmvoc;0;0;0.000414*nmvoc;0.000124*nmvoc;0;0;0;bc*0.8;bc*0.2;oc*0.8;oc*0.2;0;0;0;0 diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 97573b4..1446565 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -28,7 +28,7 @@ def make_conf_file_list(): file_list = [ {'conf': [ os.path.join(main_dir, 'conf', 'hermes.conf'), - os.path.join(main_dir, 'conf', 'EI_configuration.csv'), + os.path.join(main_dir, 'conf', 'EI_configuration_benchmark.csv'), ]}, ] diff --git a/setup.py b/setup.py index 688f5dd..a436b41 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ setup( }, data_files=[('.', ['LICENSE', 'CHANGELOG', ]), ('conf', ['conf/hermes.conf', - 'conf/EI_configuration.csv', ]), + 'conf/EI_configuration_benchmark.csv', ]), ('data', ['data/global_attributes.csv', ]), ('data/profiles', []), ('data/profiles/speciation', [ -- GitLab From 9babada25bda92997f02f94adc5583095c7ec1e6 Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 17:39:42 +0200 Subject: [PATCH 46/51] Change name EI_configuration file --- conf/{EI_configuration_benchmark.csv => EI_configuration.csv} | 0 conf/hermes.conf | 2 +- hermesv3_gr/tools/sample_files.py | 2 +- setup.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename conf/{EI_configuration_benchmark.csv => EI_configuration.csv} (100%) diff --git a/conf/EI_configuration_benchmark.csv b/conf/EI_configuration.csv similarity index 100% rename from conf/EI_configuration_benchmark.csv rename to conf/EI_configuration.csv diff --git a/conf/hermes.conf b/conf/hermes.conf index 9c2be49..14f844f 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -68,7 +68,7 @@ auxiliar_files_path = /data/auxiliar_files/_ [EMISSION_INVENTORY_CONFIGURATION] -cross_table = /conf/EI_configuration_benchmark.csv +cross_table = /conf/EI_configuration.csv [EMISSION_INVENTORY_PROFILES] diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 1446565..97573b4 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -28,7 +28,7 @@ def make_conf_file_list(): file_list = [ {'conf': [ os.path.join(main_dir, 'conf', 'hermes.conf'), - os.path.join(main_dir, 'conf', 'EI_configuration_benchmark.csv'), + os.path.join(main_dir, 'conf', 'EI_configuration.csv'), ]}, ] diff --git a/setup.py b/setup.py index a436b41..688f5dd 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ setup( }, data_files=[('.', ['LICENSE', 'CHANGELOG', ]), ('conf', ['conf/hermes.conf', - 'conf/EI_configuration_benchmark.csv', ]), + 'conf/EI_configuration.csv', ]), ('data', ['data/global_attributes.csv', ]), ('data/profiles', []), ('data/profiles/speciation', [ -- GitLab From a2eaea29b54e60a20dc09384cd3ebbd27d1cfba4 Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Fri, 21 Sep 2018 17:40:05 +0200 Subject: [PATCH 47/51] updated setup.py --- environment.yml | 3 +-- setup.py | 5 +---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/environment.yml b/environment.yml index 5900cd3..251f3a7 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ dependencies: - python = 2 - numpy - netcdf4 >= 1.3.1 - - python-cdo = 1.6.3 + - python-cdo >= 1.3.6 - geopandas - pyproj - configargparse @@ -22,6 +22,5 @@ dependencies: - pytest - pytest-cov - pycodestyle - - pip: - holidays diff --git a/setup.py b/setup.py index 9e3aa71..b220b64 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ setup( install_requires=[ 'numpy', 'netCDF4>=1.3.1', - 'cdo==1.6.3', + 'cdo>=1.3.3', 'pandas', 'geopandas', 'pyproj', @@ -71,9 +71,6 @@ setup( 'README.md', 'CHANGELOG', 'LICENSE', - 'data/*', - 'conf/*', - 'preproc/*' ] }, data_files=[('.', ['LICENSE', 'CHANGELOG', ]), -- GitLab From f63ee6b75ff74e93a4bfb2459effd2e022f9f089 Mon Sep 17 00:00:00 2001 From: mguevara Date: Fri, 21 Sep 2018 17:45:15 +0200 Subject: [PATCH 48/51] Modified setup.py sample_files.py --- conf/hermes.conf | 23 ++++++++++------------- hermesv3_gr/tools/sample_files.py | 11 +++++------ setup.py | 7 +++---- 3 files changed, 18 insertions(+), 23 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index 14f844f..5f79634 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -6,7 +6,7 @@ output_dir = /home/Earth/ctena/HERMES_out output_name = HERMESv3_.nc start_date = 2018/01/01 00:00:00 # ***** end_date = start_date [DEFAULT] ***** -# end_date = 2014/09/03 00:00:00 +# end_date = 2018/01/02 00:00:00 # ***** output_timestep_type = [hourly, daily, monthly, yearly] ***** output_timestep_type = hourly output_timestep_num = 24 @@ -41,8 +41,6 @@ auxiliar_files_path = /data/auxiliar_files/_ #inc_rlon = 0.1 # if domain_type == lcc: - - # EUROPE #lat_1 = 37 #lat_2 = 43 #lon_0 = -3 @@ -55,19 +53,18 @@ auxiliar_files_path = /data/auxiliar_files/_ #y_0 = -2073137.875 # if domain_type == mercator: - - # AFRICA - lat_ts = -1.5 - lon_0 = -18 - nx = 210 - ny = 236 - inc_x = 50000 - inc_y = 50000 - x_0 = -126017.5 - y_0 = -5407460 + #lat_ts = -1.5 + #lon_0 = -18 + #nx = 210 + #ny = 236 + #inc_x = 50000 + #inc_y = 50000 + #x_0 = -126017.5 + #y_0 = -5407460 [EMISSION_INVENTORY_CONFIGURATION] + cross_table = /conf/EI_configuration.csv [EMISSION_INVENTORY_PROFILES] diff --git a/hermesv3_gr/tools/sample_files.py b/hermesv3_gr/tools/sample_files.py index 97573b4..ebb2dd8 100644 --- a/hermesv3_gr/tools/sample_files.py +++ b/hermesv3_gr/tools/sample_files.py @@ -46,11 +46,13 @@ def make_profiles_file_list(): os.path.join(main_dir, 'data', 'profiles', 'speciation', 'MolecularWeights.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', 'Speciation_profile_cb05_aero5_CMAQ.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', - 'Speciation_profile_cb05_aero5_MONARCH_aerosols.csv'), + 'Speciation_profile_cb05_aero5_MONARCH.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', - 'Speciation_profile_cb05_aero5_MONARCH_fullchem.csv'), + 'Speciation_profile_cb05_aero6_CMAQ.csv'), os.path.join(main_dir, 'data', 'profiles', 'speciation', 'Speciation_profile_radm2_madesorgam_WRF_CHEM.csv'), + os.path.join(main_dir, 'data', 'profiles', 'speciation', + 'Speciation_profile_cb05e51_aero6_CMAQ.csv'), ]}, {'temporal': [ os.path.join(main_dir, 'data', 'profiles', 'temporal', 'TemporalProfile_Daily.csv'), @@ -59,11 +61,8 @@ def make_profiles_file_list(): os.path.join(main_dir, 'data', 'profiles', 'temporal', 'tz_world_country_iso3166.csv'), ]}, {'vertical': [ - os.path.join(main_dir, 'data', 'profiles', 'vertical', 'Benchmark_15layers_vertical_description.csv'), - os.path.join(main_dir, 'data', 'profiles', 'vertical', - 'MONARCH_Global_48layers_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', - 'MONARCH_regional_48layers_vertical_description.csv'), + 'Benchmark_15layers_vertical_description.csv'), os.path.join(main_dir, 'data', 'profiles', 'vertical', 'Vertical_profile.csv'), ]}, ]}, diff --git a/setup.py b/setup.py index d32113b..4466e16 100644 --- a/setup.py +++ b/setup.py @@ -81,8 +81,9 @@ setup( ('data/profiles/speciation', [ 'data/profiles/speciation/MolecularWeights.csv', 'data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv', - 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_aerosols.csv', - 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH_fullchem.csv', + 'data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv', + 'data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv', + 'data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv', 'data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv', ]), ('data/profiles/temporal', [ 'data/profiles/temporal/TemporalProfile_Daily.csv', @@ -91,8 +92,6 @@ setup( 'data/profiles/temporal/tz_world_country_iso3166.csv', ]), ('data/profiles/vertical', [ 'data/profiles/vertical/Benchmark_15layers_vertical_description.csv', - 'data/profiles/vertical/MONARCH_Global_48layers_vertical_description.csv', - 'data/profiles/vertical/MONARCH_regional_48layers_vertical_description.csv', 'data/profiles/vertical/Vertical_profile.csv', ]), ('preproc', ['preproc/ceds_preproc.py', 'preproc/eclipsev5a_preproc.py', -- GitLab From 5701e7ca91874afb03766330b957f927995a9efc Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 25 Sep 2018 17:41:48 +0200 Subject: [PATCH 49/51] added download benchmark --- conf/EI_configuration.csv | 6 +- conf/hermes.conf | 6 +- .../gfas_emission_inventory.py | 2 +- hermesv3_gr/tools/download_benchmark.py | 104 ++++++++++++++++++ setup.py | 3 + 5 files changed, 114 insertions(+), 7 deletions(-) create mode 100644 hermesv3_gr/tools/download_benchmark.py diff --git a/conf/EI_configuration.csv b/conf/EI_configuration.csv index f24f2f7..8dba532 100644 --- a/conf/EI_configuration.csv +++ b/conf/EI_configuration.csv @@ -1,5 +1,5 @@ ei;sector;ref_year;active;factor_mask;regrid_mask;pollutants;path;frequency;source_type;p_vertical;p_month;p_day;p_hour;p_speciation;comment -GFASv12;;;1;;;co,nox_no,pm25,oc,bc,so2,ch3oh,c2h5oh,c3h8,c2h4,c3h6,c5h8,terpenes,hialkenes,hialkanes,ch2o,c2h4o,c3h6o,nh3,c2h6s,c2h6,c7h8,c6h6,c8h10,c4h8,c5h10,c6h12,c8h16,c4h10,c5h12,c6h14,c7h16;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E001; +GFASv12;;2015;0;;;co,nox_no,pm25,oc,bc,so2,ch3oh,c2h5oh,c3h8,c2h4,c3h6,c5h8,terpenes,hialkenes,hialkanes,ch2o,c2h4o,c3h6o,nh3,c2h6s,c2h6,c7h8,c6h6,c8h10,c4h8,c5h10,c6h12,c8h16,c4h10,c5h12,c6h14,c7h16;/ecmwf/gfas/daily_mean;daily;area;method=sovief,approach=uniform;;;H001;E001; HTAPv2;energy;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;V001;;D002;H002;E002; HTAPv2;industry;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;V002;;D003;H004;E003; HTAPv2;residential;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/jrc/htapv2/monthly_mean;monthly;area;;;D003;H003;E004; @@ -9,7 +9,7 @@ HTAPv2;air_lto;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,v HTAPv2;air_cds;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/jrc/htapv2/yearly_mean;yearly;area;V004;M001;D001;H001;E007; HTAPv2;air_crs;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc02,voc03,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc17,voc21,voc22,voc23;/jrc/htapv2/yearly_mean;yearly;area;V005;M001;D001;H001;E007; HTAPv2;ships;2010;1;;;co,nox_no2,pm10,pm25,oc,bc,so2,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc21,voc22,voc23,voc24;/jrc/htapv2/yearly_mean;yearly;area;;M001;D001;H001;E008; -wiedinmyer;;2014;1;;;bc,c2h2,c2h4,c3h6,c6h6,ch2o,ch3cooh,ch3oh,co,hcl,nh3,nox_no,oc,pm10,pm25,so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H008;E009; +wiedinmyer;;2010;1;;;bc,c2h2,c2h4,c3h6,c6h6,ch2o,ch3cooh,ch3oh,co,hcl,nh3,nox_no,oc,pm10,pm25,so2;/ucar/wiedinmyer/yearly_mean;yearly;area;;M001;D001;H008;E009; TNO_MACC-III;snap1;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;V001;M002;D002;H002;E010; TNO_MACC-III;snap2;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;;M003;D003;H003;E011; TNO_MACC-III;snap34;2011;0;;;co,nox_no2,so2,nh3,pm10,pm25,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc19,voc21,voc22,voc23,voc24;/tno/tno_macc_iii/yearly_mean/;yearly;area;V002;M004;D003;H004;E012; @@ -86,7 +86,7 @@ EMEP;i_offroad;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emep EMEP;j_waste;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M001;D001;H001;E083; EMEP;k_agrilivestock;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M009;D001;H007;E084; EMEP;l_agriother;2015;0;;;co,nox_no2,pm10,pm25,so2,nmvoc,nh3;/ceip/emepv18/yearly_mean;yearly;area;;M009;D001;H007;E085; -carn;;2015;1;;;so2;/ceip/emepv18/yearly_mean;yearly;point;;M001;D001;H001;E086; +carn;;2015;1;;;so2;/mtu/carnetal/yearly_mean;yearly;point;;M001;D001;H001;E086; CEDS;agriculture;2014;0;;;nox_no2,nh3;/jgcri/ceds/monthly_mean;monthly;area;;;D001;H007;E087; CEDS;energy;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;V001;;D002;H002;E088; CEDS;industrial;2014;0;;;co,nox_no2,oc,bc,so2,nh3,voc01,voc02,voc03,voc04,voc05,voc06,voc07,voc08,voc09,voc12,voc13,voc14,voc15,voc16,voc17,voc18,voc19,voc20,voc21,voc22,voc23,voc24;/jgcri/ceds/monthly_mean;monthly;area;V002;;D003;H004;E089; diff --git a/conf/hermes.conf b/conf/hermes.conf index 5f79634..654279b 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -1,8 +1,8 @@ [GENERAL] log_level = 3 input_dir = /home/Earth/ctena/Models/hermesv3_gr -data_path = /esarchive/recon -output_dir = /home/Earth/ctena/HERMES_out +data_path = /scratch/Earth/HERMESv3_GR_Benchmark/EmissionInventories +output_dir = /scratch/Earth/HERMESv3/OUT output_name = HERMESv3_.nc start_date = 2018/01/01 00:00:00 # ***** end_date = start_date [DEFAULT] ***** @@ -65,7 +65,7 @@ auxiliar_files_path = /data/auxiliar_files/_ [EMISSION_INVENTORY_CONFIGURATION] -cross_table = /conf/EI_configuration.csv +cross_table = /conf/EI_configuration_gfas.csv [EMISSION_INVENTORY_PROFILES] diff --git a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py index 9801db8..a03cc27 100755 --- a/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py +++ b/hermesv3_gr/modules/emision_inventories/gfas_emission_inventory.py @@ -80,7 +80,7 @@ class GfasEmissionInventory(EmissionInventory): super(GfasEmissionInventory, self).__init__( options, grid, current_date, inventory_name, source_type, sector, pollutants, inputs_path, frequency, vertical_output_profile, - reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, p_vertical='', + reference_year=reference_year, factors=factors, regrid_mask=regrid_mask, p_vertical=None, p_month=p_month, p_day=p_day, p_hour=p_hour, p_speciation=p_speciation) self.approach = self.get_approach(p_vertical) diff --git a/hermesv3_gr/tools/download_benchmark.py b/hermesv3_gr/tools/download_benchmark.py new file mode 100644 index 0000000..41101b7 --- /dev/null +++ b/hermesv3_gr/tools/download_benchmark.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +# Copyright 2018 Earth Sciences Department, BSC-CNS +# +# This file is part of HERMESv3_GR. +# +# HERMESv3_GR is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# HERMESv3_GR is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with HERMESv3_GR. If not, see . + + +import sys +import os + + +def query_yes_no(question, default="yes"): + valid = {"yes": True, "y": True, "1": True, 1: True, + "no": False, "n": False, "0": False, 0: False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = raw_input().lower() + if default is not None and choice == '': + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") + + +def check_args(args, exe_str): + if len(args) == 0: + print("Missing destination path after '{0}'. e.g.:".format(exe_str) + + "\n\t{0} /home/user/HERMES".format(exe_str)) + sys.exit(1) + elif len(args) > 1: + print("Too much arguments through '{0}'. Only destination path is needed e.g.:".format(exe_str) + + "\n\t{0} /home/user/HERMES".format(exe_str)) + sys.exit(1) + else: + dir_path = args[0] + + if not os.path.exists(dir_path): + if query_yes_no("'{0}' does not exist. Do you want to create it? ".format(dir_path)): + os.makedirs(dir_path) + else: + sys.exit(0) + + return dir_path + + +def download_files(parent_path): + from ftplib import FTP + + ftp = FTP('bscesftp.bsc.es') + ftp.login() + + dst_file = os.path.join(parent_path, 'HERMESv3_GR_Benchmark.zip') + + ftp.retrbinary('RETR HERMESv3_GR_Benchmark.zip', open(dst_file, 'wb').write) + + ftp.quit() + + return dst_file + + +def unzip_files(zippath, parent_path): + import zipfile + + zip_file = zipfile.ZipFile(zippath, 'r') + zip_file.extractall(parent_path) + zip_file.close() + + os.remove(zippath) + + +def download_benchmark(): + argv = sys.argv[1:] + + parent_dir = check_args(argv, 'hermesv3_gr_download_benchmark') + + zippath = download_files(parent_dir) + unzip_files(zippath, parent_dir) + + +if __name__ == '__main__': + download_benchmark() diff --git a/setup.py b/setup.py index 4466e16..1778db7 100644 --- a/setup.py +++ b/setup.py @@ -49,6 +49,8 @@ setup( 'netCDF4>=1.3.1', 'cdo>=1.3.3', 'pandas', + 'fiona', + 'Rtree', 'geopandas', 'pyproj', 'configargparse', @@ -112,6 +114,7 @@ setup( 'hermesv3_gr = hermesv3_gr.hermes:run', 'hermesv3_gr_copy_config_files = hermesv3_gr.tools.sample_files:copy_config_files', 'hermesv3_gr_copy_preproc_files = hermesv3_gr.tools.sample_files:copy_preproc_files', + 'hermesv3_gr_download_benchmark = hermesv3_gr.tools.download_benchmark:download_benchmark', ], }, ) -- GitLab From f04b7198ef9f623464895cf0455f8aadb5f4893b Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Tue, 25 Sep 2018 17:45:39 +0200 Subject: [PATCH 50/51] added download benchmark --- hermesv3_gr/tools/download_benchmark.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/hermesv3_gr/tools/download_benchmark.py b/hermesv3_gr/tools/download_benchmark.py index 41101b7..5d58bc8 100644 --- a/hermesv3_gr/tools/download_benchmark.py +++ b/hermesv3_gr/tools/download_benchmark.py @@ -71,7 +71,6 @@ def download_files(parent_path): ftp = FTP('bscesftp.bsc.es') ftp.login() - dst_file = os.path.join(parent_path, 'HERMESv3_GR_Benchmark.zip') ftp.retrbinary('RETR HERMESv3_GR_Benchmark.zip', open(dst_file, 'wb').write) @@ -87,7 +86,6 @@ def unzip_files(zippath, parent_path): zip_file = zipfile.ZipFile(zippath, 'r') zip_file.extractall(parent_path) zip_file.close() - os.remove(zippath) -- GitLab From c192ffcc03bddb42f032393b38762cd9ce120e6f Mon Sep 17 00:00:00 2001 From: Carles Tena Date: Wed, 10 Oct 2018 10:56:56 +0200 Subject: [PATCH 51/51] Updating paths --- conf/hermes.conf | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/conf/hermes.conf b/conf/hermes.conf index 654279b..aa0e356 100644 --- a/conf/hermes.conf +++ b/conf/hermes.conf @@ -1,8 +1,8 @@ [GENERAL] log_level = 3 -input_dir = /home/Earth/ctena/Models/hermesv3_gr -data_path = /scratch/Earth/HERMESv3_GR_Benchmark/EmissionInventories -output_dir = /scratch/Earth/HERMESv3/OUT +input_dir = /home/user/HERMES/HERMES_IN +data_path = /home/user/HERMES/datasets +output_dir = /home/user/HERMES/HERMES_OUT output_name = HERMESv3_.nc start_date = 2018/01/01 00:00:00 # ***** end_date = start_date [DEFAULT] ***** @@ -65,7 +65,7 @@ auxiliar_files_path = /data/auxiliar_files/_ [EMISSION_INVENTORY_CONFIGURATION] -cross_table = /conf/EI_configuration_gfas.csv +cross_table = /conf/EI_configuration.csv [EMISSION_INVENTORY_PROFILES] @@ -74,10 +74,6 @@ p_month = /data/profiles/temporal/TemporalProfile_Monthly.csv p_day = /data/profiles/temporal/TemporalProfile_Daily.csv p_hour = /data/profiles/temporal/TemporalProfile_Hourly.csv p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_MONARCH.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero5_CMAQ.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_cb05_aero6_CMAQ.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_cb05e51_aero6_CMAQ.csv -# p_speciation = /data/profiles/speciation/Speciation_profile_radm2_madesorgam_WRF_CHEM.csv molecular_weights = /data/profiles/speciation/MolecularWeights.csv world_info = /data/profiles/temporal/tz_world_country_iso3166.csv -- GitLab