diff --git a/Jupyter_notebooks/1-introduction.ipynb b/Jupyter_notebooks/1-introduction.ipynb index 615860d3bed82415dcc5a0269940034654a10415..ad512f3f136e5818ab101c68208be42ac25abd51 100644 --- a/Jupyter_notebooks/1-introduction.ipynb +++ b/Jupyter_notebooks/1-introduction.ipynb @@ -32,8 +32,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 175 ms, sys: 159 ms, total: 335 ms\n", - "Wall time: 15.7 s\n" + "CPU times: user 156 ms, sys: 52.8 ms, total: 209 ms\n", + "Wall time: 15.2 s\n" ] } ], @@ -50,7 +50,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 3, @@ -341,8 +341,8 @@ "text": [ "Rank 000: Loading O3 var (1/1)\n", "Rank 000: Loaded O3 var ((109, 24, 361, 467))\n", - "CPU times: user 1.21 s, sys: 7.1 s, total: 8.32 s\n", - "Wall time: 42 s\n" + "CPU times: user 1.15 s, sys: 6.44 s, total: 7.6 s\n", + "Wall time: 39.5 s\n" ] } ], @@ -379,8 +379,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 723 ms, sys: 719 ms, total: 1.44 s\n", - "Wall time: 14.3 s\n" + "CPU times: user 708 ms, sys: 497 ms, total: 1.2 s\n", + "Wall time: 14.5 s\n" ] } ], @@ -405,8 +405,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 403 ms, sys: 206 ms, total: 609 ms\n", - "Wall time: 611 ms\n" + "CPU times: user 340 ms, sys: 482 ms, total: 822 ms\n", + "Wall time: 823 ms\n" ] } ], @@ -442,8 +442,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 49.2 ms, sys: 30.3 ms, total: 79.4 ms\n", - "Wall time: 837 ms\n" + "CPU times: user 39.4 ms, sys: 29.8 ms, total: 69.2 ms\n", + "Wall time: 715 ms\n" ] } ], diff --git a/Jupyter_notebooks/1.1-regular_grids.ipynb b/Jupyter_notebooks/1.1-regular_grids.ipynb index c6733d6d81818c4b45234704f8b4fd7b1a539eb5..3104f0c87c3a1d8d2b62ef96fb62991635fbe403 100644 --- a/Jupyter_notebooks/1.1-regular_grids.ipynb +++ b/Jupyter_notebooks/1.1-regular_grids.ipynb @@ -1050,16 +1050,16 @@ " Domain: Global\n", " Conventions: None\n", " history: MONARCHv1.0 netcdf file.\n", - " comment: Generated on marenostrum4
  • Domain :
    Global
    Conventions :
    None
    history :
    MONARCHv1.0 netcdf file.
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", @@ -1271,7 +1271,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 5, @@ -1895,7 +1895,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -2281,14 +2281,14 @@ " Domain: Global\n", " Conventions: CF-1.7\n", " history: MONARCHv1.0 netcdf file.\n", - " comment: Generated on marenostrum4
    • O3
      (time, lev, lat, lon)
      float32
      ...
      long_name :
      O3
      units :
      unknown
      standard_name :
      O3
      grid_mapping :
      crs
      [20095344 values with dtype=float32]
    • crs
      ()
      |S1
      ...
      grid_mapping_name :
      latitude_longitude
      semi_major_axis :
      6371000.0
      inverse_flattening :
      0
      array(b'', dtype='|S1')
  • Domain :
    Global
    Conventions :
    CF-1.7
    history :
    MONARCHv1.0 netcdf file.
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", diff --git a/Jupyter_notebooks/1.2-rotated_grids.ipynb b/Jupyter_notebooks/1.2-rotated_grids.ipynb index 4cff1df6df2c0bf97ae9c90b55077140afc990b6..4116ba2633b211808256b4b66436fe10d02aa75a 100644 --- a/Jupyter_notebooks/1.2-rotated_grids.ipynb +++ b/Jupyter_notebooks/1.2-rotated_grids.ipynb @@ -415,14 +415,14 @@ " rotated_pole |S1 b''\n", "Attributes:\n", " Conventions: CF-1.7\n", - " comment: Generated on marenostrum4
    • time_bnds
      (time, nv)
      datetime64[ns]
      ...
      array([['2021-08-03T00:00:00.000000000', '2021-08-07T00:00:00.000000000']],\n",
      +       "      dtype='datetime64[ns]')
    • O3_all
      (time, lev, rlat, rlon)
      float32
      ...
      units :
      kg/m3
      long_name :
      TRACERS_044
      cell_methods :
      time: maximum (interval: 1hr)
      grid_mapping :
      rotated_pole
      [2282904 values with dtype=float32]
    • rotated_pole
      ()
      |S1
      ...
      grid_mapping_name :
      rotated_latitude_longitude
      grid_north_pole_latitude :
      39.0
      grid_north_pole_longitude :
      -170.0
      array(b'', dtype='|S1')
  • Conventions :
    CF-1.7
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", @@ -483,7 +483,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -810,7 +810,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 12, @@ -1198,14 +1198,14 @@ " rotated_pole |S1 b''\n", "Attributes:\n", " Conventions: CF-1.7\n", - " comment: Generated on marenostrum4
    • time_bnds
      (time, time_nv)
      datetime64[ns]
      ...
      array([['2021-08-03T00:00:00.000000000', '2021-08-07T00:00:00.000000000']],\n",
      +       "      dtype='datetime64[ns]')
    • O3_all
      (time, lev, rlat, rlon)
      float32
      ...
      units :
      kg/m3
      long_name :
      TRACERS_044
      cell_methods :
      time: maximum (interval: 1hr)
      grid_mapping :
      rotated_pole
      [2282904 values with dtype=float32]
    • rotated_pole
      ()
      |S1
      ...
      grid_mapping_name :
      rotated_latitude_longitude
      grid_north_pole_latitude :
      39.0
      grid_north_pole_longitude :
      -170.0
      array(b'', dtype='|S1')
  • Conventions :
    CF-1.7
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", diff --git a/Jupyter_notebooks/1.3-points_grids.ipynb b/Jupyter_notebooks/1.3-points_grids.ipynb index 966ab17efc2bc372b374a42703695727d852c786..2457c7e4f8cf9664ee58ab19b2cb599a203a052f 100644 --- a/Jupyter_notebooks/1.3-points_grids.ipynb +++ b/Jupyter_notebooks/1.3-points_grids.ipynb @@ -418,7 +418,7 @@ " station_end_date (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n", " station_rural_back (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n", " latitude (station) float32 46.81 47.48 ... 53.33 38.88\n", - " station_ozone_classification (station) |S75 b'rural' b'rural' ... b'nan'
  • " ], "text/plain": [ "\n", @@ -710,7 +710,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -805,8 +805,7 @@ " 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,\n", " 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,\n", " 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,\n", - " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83]),\n", - " 'units': ''}" + " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83])}" ] }, "execution_count": 7, @@ -1409,7 +1408,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -1789,26 +1788,27 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:                       (time: 31, station: 84)\n",
    +       "Dimensions:                       (time: 31, station: 84, strlen: 75)\n",
            "Coordinates:\n",
            "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
            "  * station                       (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n",
    +       "Dimensions without coordinates: strlen\n",
            "Data variables: (12/19)\n",
    -       "    station_start_date            (station) |S75 b'1980-01-01' ... b'nan'\n",
    -       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    country_code                  (station) |S75 b'CH' b'CH' ... b'NL' b'IT'\n",
    -       "    ccaa                          (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_name                  (station) |S75 b'payerne' ... b'lamezia terme'\n",
    -       "    ...                            ...\n",
    -       "    station_code                  (station) |S75 b'CH0002R' ... b'IT0016R'\n",
    -       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_ozone_classification  (station) |S75 b'rural' b'rural' ... b'nan'\n",
            "    lat                           (station) float64 46.81 47.48 ... 53.33 38.88\n",
            "    lon                           (station) float64 6.944 8.905 ... 6.277 16.23\n",
    +       "    station_start_date            (station, strlen) object '1' '9' '8' ... '' ''\n",
    +       "    station_zone                  (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    street_type                   (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    country_code                  (station, strlen) object 'C' 'H' '' ... '' ''\n",
    +       "    ...                            ...\n",
    +       "    country                       (station, strlen) object 's' 'w' 'i' ... '' ''\n",
    +       "    altitude                      (station) float32 489.0 538.0 ... 1.0 6.0\n",
    +       "    station_code                  (station, strlen) object 'C' 'H' '0' ... '' ''\n",
    +       "    station_end_date              (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    station_rural_back            (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    station_ozone_classification  (station, strlen) object 'r' 'u' 'r' ... '' ''\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 31, station: 84)\n", + "Dimensions: (time: 31, station: 84, strlen: 75)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", " * station (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n", + "Dimensions without coordinates: strlen\n", "Data variables: (12/19)\n", - " station_start_date (station) |S75 ...\n", - " station_zone (station) |S75 ...\n", - " street_type (station) |S75 ...\n", - " country_code (station) |S75 ...\n", - " ccaa (station) |S75 ...\n", - " station_name (station) |S75 ...\n", - " ... ...\n", - " station_code (station) |S75 ...\n", - " station_end_date (station) |S75 ...\n", - " station_rural_back (station) |S75 ...\n", - " station_ozone_classification (station) |S75 ...\n", " lat (station) float64 ...\n", " lon (station) float64 ...\n", + " station_start_date (station, strlen) object ...\n", + " station_zone (station, strlen) object ...\n", + " street_type (station, strlen) object ...\n", + " country_code (station, strlen) object ...\n", + " ... ...\n", + " country (station, strlen) object ...\n", + " altitude (station) float32 ...\n", + " station_code (station, strlen) object ...\n", + " station_end_date (station, strlen) object ...\n", + " station_rural_back (station, strlen) object ...\n", + " station_ozone_classification (station, strlen) object ...\n", "Attributes:\n", " Conventions: CF-1.7" ] @@ -2501,7 +2411,7 @@ " source: Surface observations\n", " creator_name: Dene R. Bowdalo\n", " creator_email: dene.bowdalo@bsc.es\n", - " version: 1.4
  • title :
    Surface sulphate data in the EANET network in 2019-11.
    institution :
    Barcelona Supercomputing Center
    source :
    Surface observations
    creator_name :
    Dene R. Bowdalo
    creator_email :
    dene.bowdalo@bsc.es
    version :
    1.4
  • " ], "text/plain": [ "\n", @@ -2727,7 +2637,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 18, @@ -2817,7 +2727,7 @@ { "data": { "text/plain": [ - "{'data': array([0, 1, 2]), 'units': ''}" + "{'data': array([0, 1, 2])}" ] }, "execution_count": 21, @@ -3467,7 +3377,21 @@ "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/173)\n", "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/173)\n", "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/173)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/173)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_ghost.py:568: UserWarning: WARNING!!! GHOST datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/173)\n", "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/173)\n", "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/173)\n", @@ -3978,18 +3902,373 @@ "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading station var (1/174)\n", + "Rank 000: Loaded station var ((3,))\n", + "Rank 000: Loading ASTER_v3_altitude var (2/174)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (3/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (4/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (5/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (6/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (7/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (8/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (9/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (10/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (11/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (12/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (13/174)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (14/174)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (15/174)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (16/174)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (17/174)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (18/174)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ETOPO1_altitude var (19/174)\n", + "Rank 000: Loaded ETOPO1_altitude var ((3,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (20/174)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((3,))\n", + "Rank 000: Loading GHOST_version var (21/174)\n", + "Rank 000: Loaded GHOST_version var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (22/174)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (23/174)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (24/174)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (25/174)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (26/174)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (27/174)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (28/174)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (29/174)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (30/174)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (31/174)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (32/174)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((3,))\n", + "Rank 000: Loading GHSL_population_density var (33/174)\n", + "Rank 000: Loaded GHSL_population_density var ((3,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (34/174)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (35/174)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (36/174)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (37/174)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (38/174)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_population_density var (39/174)\n", + "Rank 000: Loaded GPW_population_density var ((3,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (40/174)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((3,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (41/174)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (42/174)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (43/174)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (44/174)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (45/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (46/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (47/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (48/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (49/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (50/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (51/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (52/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (53/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (54/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (55/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (56/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (57/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (58/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((3,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (59/174)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (60/174)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (61/174)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (62/174)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (63/174)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (64/174)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (65/174)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((3,))\n", + "Rank 000: Loading WMO_region var (66/174)\n", + "Rank 000: Loaded WMO_region var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (67/174)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biome var (68/174)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((3,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (69/174)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((3,))\n", + "Rank 000: Loading administrative_country_division_1 var (70/174)\n", + "Rank 000: Loaded administrative_country_division_1 var ((3,))\n", + "Rank 000: Loading administrative_country_division_2 var (71/174)\n", + "Rank 000: Loaded administrative_country_division_2 var ((3,))\n", + "Rank 000: Loading altitude var (72/174)\n", + "Rank 000: Loaded altitude var ((3,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (73/174)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading annual_native_representativity_percent var (74/174)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading area_classification var (75/174)\n", + "Rank 000: Loaded area_classification var ((3,))\n", + "Rank 000: Loading associated_networks var (76/174)\n", + "Rank 000: Loaded associated_networks var ((3,))\n", + "Rank 000: Loading city var (77/174)\n", + "Rank 000: Loaded city var ((3,))\n", + "Rank 000: Loading climatology var (78/174)\n", + "Rank 000: Loaded climatology var ((3,))\n", + "Rank 000: Loading contact_email_address var (79/174)\n", + "Rank 000: Loaded contact_email_address var ((3,))\n", + "Rank 000: Loading contact_institution var (80/174)\n", + "Rank 000: Loaded contact_institution var ((3,))\n", + "Rank 000: Loading contact_name var (81/174)\n", + "Rank 000: Loaded contact_name var ((3,))\n", + "Rank 000: Loading country var (82/174)\n", + "Rank 000: Loaded country var ((3,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (83/174)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading daily_native_representativity_percent var (84/174)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading daily_passing_vehicles var (85/174)\n", + "Rank 000: Loaded daily_passing_vehicles var ((3,))\n", + "Rank 000: Loading data_level var (86/174)\n", + "Rank 000: Loaded data_level var ((3,))\n", + "Rank 000: Loading data_licence var (87/174)\n", + "Rank 000: Loaded data_licence var ((3,))\n", + "Rank 000: Loading day_night_code var (88/174)\n", + "Rank 000: Loaded day_night_code var ((3, 30))\n", + "Rank 000: Loading daytime_traffic_speed var (89/174)\n", + "Rank 000: Loaded daytime_traffic_speed var ((3,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (90/174)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading distance_to_building var (91/174)\n", + "Rank 000: Loaded distance_to_building var ((3,))\n", + "Rank 000: Loading distance_to_junction var (92/174)\n", + "Rank 000: Loaded distance_to_junction var ((3,))\n", + "Rank 000: Loading distance_to_kerb var (93/174)\n", + "Rank 000: Loaded distance_to_kerb var ((3,))\n", + "Rank 000: Loading distance_to_source var (94/174)\n", + "Rank 000: Loaded distance_to_source var ((3,))\n", + "Rank 000: Loading ellipsoid var (95/174)\n", + "Rank 000: Loaded ellipsoid var ((3,))\n", + "Rank 000: Loading horizontal_datum var (96/174)\n", + "Rank 000: Loaded horizontal_datum var ((3,))\n", + "Rank 000: Loading land_use var (97/174)\n", + "Rank 000: Loaded land_use var ((3,))\n", + "Rank 000: Loading main_emission_source var (98/174)\n", + "Rank 000: Loaded main_emission_source var ((3,))\n", + "Rank 000: Loading measurement_altitude var (99/174)\n", + "Rank 000: Loaded measurement_altitude var ((3,))\n", + "Rank 000: Loading measurement_methodology var (100/174)\n", + "Rank 000: Loaded measurement_methodology var ((3,))\n", + "Rank 000: Loading measurement_scale var (101/174)\n", + "Rank 000: Loaded measurement_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (102/174)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (103/174)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (104/174)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (105/174)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (106/174)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (107/174)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (108/174)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (109/174)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (110/174)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (111/174)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (112/174)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (113/174)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_further_details var (114/174)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (115/174)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((3,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (116/174)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_name var (117/174)\n", + "Rank 000: Loaded measuring_instrument_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_process_details var (118/174)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (119/174)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (120/174)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (121/174)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (122/174)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (123/174)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (124/174)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (125/174)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (126/174)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (127/174)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (128/174)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (129/174)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (130/174)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (131/174)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((3,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (132/174)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading monthly_native_representativity_percent var (133/174)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading network var (134/174)\n", + "Rank 000: Loaded network var ((3,))\n", + "Rank 000: Loading network_maintenance_details var (135/174)\n", + "Rank 000: Loaded network_maintenance_details var ((3,))\n", + "Rank 000: Loading network_miscellaneous_details var (136/174)\n", + "Rank 000: Loaded network_miscellaneous_details var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (137/174)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (138/174)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((3,))\n", + "Rank 000: Loading network_qa_details var (139/174)\n", + "Rank 000: Loaded network_qa_details var ((3,))\n", + "Rank 000: Loading network_sampling_details var (140/174)\n", + "Rank 000: Loaded network_sampling_details var ((3,))\n", + "Rank 000: Loading network_uncertainty_details var (141/174)\n", + "Rank 000: Loaded network_uncertainty_details var ((3,))\n", + "Rank 000: Loading population var (142/174)\n", + "Rank 000: Loaded population var ((3,))\n", + "Rank 000: Loading primary_sampling_further_details var (143/174)\n", + "Rank 000: Loaded primary_sampling_further_details var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (144/174)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (145/174)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (146/174)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (147/174)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_process_details var (148/174)\n", + "Rank 000: Loaded primary_sampling_process_details var ((3,))\n", + "Rank 000: Loading primary_sampling_type var (149/174)\n", + "Rank 000: Loaded primary_sampling_type var ((3,))\n", + "Rank 000: Loading principal_investigator_email_address var (150/174)\n", + "Rank 000: Loaded principal_investigator_email_address var ((3,))\n", + "Rank 000: Loading principal_investigator_institution var (151/174)\n", + "Rank 000: Loaded principal_investigator_institution var ((3,))\n", + "Rank 000: Loading principal_investigator_name var (152/174)\n", + "Rank 000: Loaded principal_investigator_name var ((3,))\n", + "Rank 000: Loading process_warnings var (153/174)\n", + "Rank 000: Loaded process_warnings var ((3,))\n", + "Rank 000: Loading projection var (154/174)\n", + "Rank 000: Loaded projection var ((3,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (155/174)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading representative_radius var (156/174)\n", + "Rank 000: Loaded representative_radius var ((3,))\n", + "Rank 000: Loading retrieval_algorithm var (157/174)\n", + "Rank 000: Loaded retrieval_algorithm var ((3,))\n", + "Rank 000: Loading sample_preparation_further_details var (158/174)\n", + "Rank 000: Loaded sample_preparation_further_details var ((3,))\n", + "Rank 000: Loading sample_preparation_process_details var (159/174)\n", + "Rank 000: Loaded sample_preparation_process_details var ((3,))\n", + "Rank 000: Loading sample_preparation_techniques var (160/174)\n", + "Rank 000: Loaded sample_preparation_techniques var ((3,))\n", + "Rank 000: Loading sample_preparation_types var (161/174)\n", + "Rank 000: Loaded sample_preparation_types var ((3,))\n", + "Rank 000: Loading sampling_height var (162/174)\n", + "Rank 000: Loaded sampling_height var ((3,))\n", + "Rank 000: Loading sconcso4 var (163/174)\n", + "Rank 000: Loaded sconcso4 var ((3, 30))\n", + "Rank 000: Loading season_code var (164/174)\n", + "Rank 000: Loaded season_code var ((3, 30))\n", + "Rank 000: Loading station_classification var (165/174)\n", + "Rank 000: Loaded station_classification var ((3,))\n", + "Rank 000: Loading station_name var (166/174)\n", + "Rank 000: Loaded station_name var ((3,))\n", + "Rank 000: Loading station_reference var (167/174)\n", + "Rank 000: Loaded station_reference var ((3,))\n", + "Rank 000: Loading station_timezone var (168/174)\n", + "Rank 000: Loaded station_timezone var ((3,))\n", + "Rank 000: Loading street_type var (169/174)\n", + "Rank 000: Loaded street_type var ((3,))\n", + "Rank 000: Loading street_width var (170/174)\n", + "Rank 000: Loaded street_width var ((3,))\n", + "Rank 000: Loading terrain var (171/174)\n", + "Rank 000: Loaded terrain var ((3,))\n", + "Rank 000: Loading vertical_datum var (172/174)\n", + "Rank 000: Loaded vertical_datum var ((3,))\n", + "Rank 000: Loading weekday_weekend_code var (173/174)\n", + "Rank 000: Loaded weekday_weekend_code var ((3, 30))\n", + "Rank 000: Loading sconcso4_prefiltered_defaultqa var (174/174)\n", + "Rank 000: Loaded sconcso4_prefiltered_defaultqa var ((3, 30))\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } ], "source": [ "nessy_ghost_2 = open_netcdf('points_file_2.nc', info=True, parallel_method='X')\n", + "nessy_ghost_2.load()\n", "nessy_ghost_2" ] }, @@ -4366,8 +4645,8 @@ " * station (station) float64 ...\n", "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", "Data variables: (12/177)\n", - " flag (station, time, N_flag_codes) int64 ...\n", - " qa (station, time, N_qa_codes) int64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", " ASTER_v3_altitude (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", @@ -4377,8 +4656,8 @@ " vertical_datum (station) object ...\n", " weekday_weekend_code (station, time) uint8 ...\n", " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", "Attributes:\n", " title: Surface sulphate data in the EANET network in 2019-11.\n", " institution: Barcelona Supercomputing Center\n", @@ -4386,7 +4665,7 @@ " creator_name: Dene R. Bowdalo\n", " creator_email: dene.bowdalo@bsc.es\n", " version: 1.4\n", - " Conventions: CF-1.7
  • title :
    Surface sulphate data in the EANET network in 2019-11.
    institution :
    Barcelona Supercomputing Center
    source :
    Surface observations
    creator_name :
    Dene R. Bowdalo
    creator_email :
    dene.bowdalo@bsc.es
    version :
    1.4
    Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -4567,8 +4846,8 @@ " * station (station) float64 ...\n", "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", "Data variables: (12/177)\n", - " flag (station, time, N_flag_codes) int64 ...\n", - " qa (station, time, N_qa_codes) int64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", " ASTER_v3_altitude (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", @@ -4578,8 +4857,8 @@ " vertical_datum (station) object ...\n", " weekday_weekend_code (station, time) uint8 ...\n", " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", "Attributes:\n", " title: Surface sulphate data in the EANET network in 2019-11.\n", " institution: Barcelona Supercomputing Center\n", @@ -4598,6 +4877,161 @@ "source": [ "xr.open_dataset('points_file_2.nc')" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Transform to points" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_3 = nessy_ghost_2.to_points()\n", + "nessy_ghost_3" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'station': {'data': masked_array(data=[0., 1., 2.],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'units': '',\n", + " 'axis': 'X',\n", + " 'long_name': '',\n", + " 'standard_name': 'station'},\n", + " 'sconcso4': {'data': masked_array(\n", + " data=[[ nan, nan, nan, nan, nan,\n", + " nan, nan, 2.31 , 2.31 , 1.12 ,\n", + " 1.12 , nan, nan, nan, nan,\n", + " 1.71 , 1.71 , nan, nan, nan,\n", + " nan, nan, nan, nan, nan,\n", + " nan, nan, 1.38 , 1.2841667, 1.28 ],\n", + " [ nan, nan, nan, 0.74 , 0.74 ,\n", + " nan, nan, nan, nan, 3.41 ,\n", + " 3.41 , nan, nan, nan, nan,\n", + " 0.74 , 0.74 , nan, nan, nan,\n", + " nan, 1.2 , 1.2 , nan, nan,\n", + " nan, nan, 1.76 , 1.76 , nan],\n", + " [ nan, nan, nan, 3.05 , 3.05 ,\n", + " nan, nan, nan, nan, 2.44 ,\n", + " 2.44 , nan, nan, nan, nan,\n", + " 2.24 , 2.24 , nan, nan, nan,\n", + " nan, 1.37 , 1.37 , nan, nan,\n", + " nan, nan, nan, nan, nan]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station', 'time'),\n", + " 'standard_name': 'sulphate',\n", + " 'long_name': 'sulphate',\n", + " 'units': 'ug m-3',\n", + " 'description': 'Measured value of surface sulphate for the stated temporal resolution.'},\n", + " 'sconcso4_prefiltered_defaultqa': {'data': masked_array(\n", + " data=[[ nan, nan, nan, nan, nan,\n", + " nan, nan, 2.31 , 2.31 , 1.12 ,\n", + " 1.12 , nan, nan, nan, nan,\n", + " 1.71 , 1.71 , nan, nan, nan,\n", + " nan, nan, nan, nan, nan,\n", + " nan, nan, 1.38 , 1.2841667, 1.28 ],\n", + " [ nan, nan, nan, 0.74 , 0.74 ,\n", + " nan, nan, nan, nan, 3.41 ,\n", + " 3.41 , nan, nan, nan, nan,\n", + " 0.74 , 0.74 , nan, nan, nan,\n", + " nan, 1.2 , 1.2 , nan, nan,\n", + " nan, nan, 1.76 , 1.76 , nan],\n", + " [ nan, nan, nan, 3.05 , 3.05 ,\n", + " nan, nan, nan, nan, 2.44 ,\n", + " 2.44 , nan, nan, nan, nan,\n", + " 2.24 , 2.24 , nan, nan, nan,\n", + " nan, 1.37 , 1.37 , nan, nan,\n", + " nan, nan, nan, nan, nan]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station', 'time'),\n", + " 'standard_name': 'sulphate',\n", + " 'long_name': 'sulphate',\n", + " 'units': 'ug m-3',\n", + " 'description': 'Measured value of surface sulphate for the stated temporal resolution. Prefiltered by default QA.'}}" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_3.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_file_3.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station var (1/3)\n", + "**ERROR** an error has occurred while writing the 'station' variable\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "NetCDF: String match to name in use", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnessy_ghost_3\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'points_file_3.nc'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1786\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1787\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1788\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1789\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1790\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1736\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[0;31m# Create variables\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1738\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_variables\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1739\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1740\u001b[0m \u001b[0;31m# Create metadata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 447\u001b[0m \u001b[0;31m# print(\"**ERROR** an error has occurredred while writing the '{0}' variable\".format(var_name),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 448\u001b[0m \u001b[0;31m# file=sys.stderr)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 449\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 450\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 451\u001b[0m \u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"WARNING!!! \"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 374\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 375\u001b[0m var = netcdf.createVariable(var_name, var_dtype, var_dims,\n\u001b[0;32m--> 376\u001b[0;31m zlib=self.zip_lvl > 0, complevel=self.zip_lvl)\n\u001b[0m\u001b[1;32m 377\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 378\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaster\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.createVariable\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Variable.__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4._ensure_nc_success\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mRuntimeError\u001b[0m: NetCDF: String match to name in use" + ] + } + ], + "source": [ + "nessy_ghost_3.to_netcdf('points_file_3.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/Jupyter_notebooks/1.4-lcc_grids.ipynb b/Jupyter_notebooks/1.4-lcc_grids.ipynb index 91f50cec892013d13f02c4d288a812ad34b0dd4d..fe5fa76df0ce9ecf9cefb3d88ffc311295498064 100644 --- a/Jupyter_notebooks/1.4-lcc_grids.ipynb +++ b/Jupyter_notebooks/1.4-lcc_grids.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "nc_path_1 = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc'" + "nc_path_1 = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/sconco3/sconco3_2021010100.nc'" ] }, { @@ -402,54 +402,54 @@ "
    <xarray.Dataset>\n",
            "Dimensions:            (time: 48, y: 398, x: 478, lev: 1)\n",
            "Coordinates:\n",
    -       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n",
            "    lat                (y, x) float32 ...\n",
            "    lon                (y, x) float32 ...\n",
            "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
            "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
            "  * lev                (lev) float32 0.0\n",
            "Data variables:\n",
    -       "    pm10               (time, lev, y, x) float32 ...\n",
    -       "    Lambert_conformal  int32 -2147483647
    " + " sconco3 (time, lev, y, x) float32 ...\n", + " Lambert_conformal int32 -2147483647" ], "text/plain": [ "\n", "Dimensions: (time: 48, y: 398, x: 478, lev: 1)\n", "Coordinates:\n", - " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n", " lat (y, x) float32 ...\n", " lon (y, x) float32 ...\n", " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", " * lev (lev) float32 0.0\n", "Data variables:\n", - " pm10 (time, lev, y, x) float32 ...\n", + " sconco3 (time, lev, y, x) float32 ...\n", " Lambert_conformal int32 ..." ] }, @@ -477,7 +477,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -498,54 +498,54 @@ { "data": { "text/plain": [ - "[datetime.datetime(2022, 6, 26, 0, 0),\n", - " datetime.datetime(2022, 6, 26, 1, 0),\n", - " datetime.datetime(2022, 6, 26, 2, 0),\n", - " datetime.datetime(2022, 6, 26, 3, 0),\n", - " datetime.datetime(2022, 6, 26, 4, 0),\n", - " datetime.datetime(2022, 6, 26, 5, 0),\n", - " datetime.datetime(2022, 6, 26, 6, 0),\n", - " datetime.datetime(2022, 6, 26, 7, 0),\n", - " datetime.datetime(2022, 6, 26, 8, 0),\n", - " datetime.datetime(2022, 6, 26, 9, 0),\n", - " datetime.datetime(2022, 6, 26, 10, 0),\n", - " datetime.datetime(2022, 6, 26, 11, 0),\n", - " datetime.datetime(2022, 6, 26, 12, 0),\n", - " datetime.datetime(2022, 6, 26, 13, 0),\n", - " datetime.datetime(2022, 6, 26, 14, 0),\n", - " datetime.datetime(2022, 6, 26, 15, 0),\n", - " datetime.datetime(2022, 6, 26, 16, 0),\n", - " datetime.datetime(2022, 6, 26, 17, 0),\n", - " datetime.datetime(2022, 6, 26, 18, 0),\n", - " datetime.datetime(2022, 6, 26, 19, 0),\n", - " datetime.datetime(2022, 6, 26, 20, 0),\n", - " datetime.datetime(2022, 6, 26, 21, 0),\n", - " datetime.datetime(2022, 6, 26, 22, 0),\n", - " datetime.datetime(2022, 6, 26, 23, 0),\n", - " datetime.datetime(2022, 6, 27, 0, 0),\n", - " datetime.datetime(2022, 6, 27, 1, 0),\n", - " datetime.datetime(2022, 6, 27, 2, 0),\n", - " datetime.datetime(2022, 6, 27, 3, 0),\n", - " datetime.datetime(2022, 6, 27, 4, 0),\n", - " datetime.datetime(2022, 6, 27, 5, 0),\n", - " datetime.datetime(2022, 6, 27, 6, 0),\n", - " datetime.datetime(2022, 6, 27, 7, 0),\n", - " datetime.datetime(2022, 6, 27, 8, 0),\n", - " datetime.datetime(2022, 6, 27, 9, 0),\n", - " datetime.datetime(2022, 6, 27, 10, 0),\n", - " datetime.datetime(2022, 6, 27, 11, 0),\n", - " datetime.datetime(2022, 6, 27, 12, 0),\n", - " datetime.datetime(2022, 6, 27, 13, 0),\n", - " datetime.datetime(2022, 6, 27, 14, 0),\n", - " datetime.datetime(2022, 6, 27, 15, 0),\n", - " datetime.datetime(2022, 6, 27, 16, 0),\n", - " datetime.datetime(2022, 6, 27, 17, 0),\n", - " datetime.datetime(2022, 6, 27, 18, 0),\n", - " datetime.datetime(2022, 6, 27, 19, 0),\n", - " datetime.datetime(2022, 6, 27, 20, 0),\n", - " datetime.datetime(2022, 6, 27, 21, 0),\n", - " datetime.datetime(2022, 6, 27, 22, 0),\n", - " datetime.datetime(2022, 6, 27, 23, 0)]" + "[datetime.datetime(2021, 1, 1, 0, 0),\n", + " datetime.datetime(2021, 1, 1, 1, 0),\n", + " datetime.datetime(2021, 1, 1, 2, 0),\n", + " datetime.datetime(2021, 1, 1, 3, 0),\n", + " datetime.datetime(2021, 1, 1, 4, 0),\n", + " datetime.datetime(2021, 1, 1, 5, 0),\n", + " datetime.datetime(2021, 1, 1, 6, 0),\n", + " datetime.datetime(2021, 1, 1, 7, 0),\n", + " datetime.datetime(2021, 1, 1, 8, 0),\n", + " datetime.datetime(2021, 1, 1, 9, 0),\n", + " datetime.datetime(2021, 1, 1, 10, 0),\n", + " datetime.datetime(2021, 1, 1, 11, 0),\n", + " datetime.datetime(2021, 1, 1, 12, 0),\n", + " datetime.datetime(2021, 1, 1, 13, 0),\n", + " datetime.datetime(2021, 1, 1, 14, 0),\n", + " datetime.datetime(2021, 1, 1, 15, 0),\n", + " datetime.datetime(2021, 1, 1, 16, 0),\n", + " datetime.datetime(2021, 1, 1, 17, 0),\n", + " datetime.datetime(2021, 1, 1, 18, 0),\n", + " datetime.datetime(2021, 1, 1, 19, 0),\n", + " datetime.datetime(2021, 1, 1, 20, 0),\n", + " datetime.datetime(2021, 1, 1, 21, 0),\n", + " datetime.datetime(2021, 1, 1, 22, 0),\n", + " datetime.datetime(2021, 1, 1, 23, 0),\n", + " datetime.datetime(2021, 1, 2, 0, 0),\n", + " datetime.datetime(2021, 1, 2, 1, 0),\n", + " datetime.datetime(2021, 1, 2, 2, 0),\n", + " datetime.datetime(2021, 1, 2, 3, 0),\n", + " datetime.datetime(2021, 1, 2, 4, 0),\n", + " datetime.datetime(2021, 1, 2, 5, 0),\n", + " datetime.datetime(2021, 1, 2, 6, 0),\n", + " datetime.datetime(2021, 1, 2, 7, 0),\n", + " datetime.datetime(2021, 1, 2, 8, 0),\n", + " datetime.datetime(2021, 1, 2, 9, 0),\n", + " datetime.datetime(2021, 1, 2, 10, 0),\n", + " datetime.datetime(2021, 1, 2, 11, 0),\n", + " datetime.datetime(2021, 1, 2, 12, 0),\n", + " datetime.datetime(2021, 1, 2, 13, 0),\n", + " datetime.datetime(2021, 1, 2, 14, 0),\n", + " datetime.datetime(2021, 1, 2, 15, 0),\n", + " datetime.datetime(2021, 1, 2, 16, 0),\n", + " datetime.datetime(2021, 1, 2, 17, 0),\n", + " datetime.datetime(2021, 1, 2, 18, 0),\n", + " datetime.datetime(2021, 1, 2, 19, 0),\n", + " datetime.datetime(2021, 1, 2, 20, 0),\n", + " datetime.datetime(2021, 1, 2, 21, 0),\n", + " datetime.datetime(2021, 1, 2, 22, 0),\n", + " datetime.datetime(2021, 1, 2, 23, 0)]" ] }, "execution_count": 5, @@ -751,7 +751,7 @@ " mask=False,\n", " fill_value=1e+20),\n", " 'dimensions': ('x',),\n", - " 'units': 'm',\n", + " 'units': '1000 m',\n", " 'long_name': 'x coordinate of projection',\n", " 'standard_name': 'projection_x_coordinate'}" ] @@ -975,7 +975,7 @@ " mask=False,\n", " fill_value=1e+20),\n", " 'dimensions': ('y',),\n", - " 'units': 'm',\n", + " 'units': '1000 m',\n", " 'long_name': 'y coordinate of projection',\n", " 'standard_name': 'projection_y_coordinate'}" ] @@ -1080,8 +1080,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Rank 000: Loading pm10 var (1/1)\n", - "Rank 000: Loaded pm10 var ((48, 1, 398, 478))\n" + "Rank 000: Loading sconco3 var (1/1)\n", + "Rank 000: Loaded sconco3 var ((48, 1, 398, 478))\n" ] } ], @@ -1097,103 +1097,103 @@ { "data": { "text/plain": [ - "{'pm10': {'data': masked_array(\n", - " data=[[[[2.05903081e-08, 2.10736815e-08, 2.16505036e-08, ...,\n", - " 1.57139212e-07, 1.56582516e-07, 1.58654302e-07],\n", - " [2.02237249e-08, 2.07961541e-08, 2.16525038e-08, ...,\n", - " 1.56947792e-07, 1.52752250e-07, 1.51975840e-07],\n", - " [1.92542160e-08, 1.95532017e-08, 2.09430890e-08, ...,\n", - " 1.56698391e-07, 1.52042290e-07, 1.49590434e-07],\n", + "{'sconco3': {'data': masked_array(\n", + " data=[[[[0.03952214, 0.0395867 , 0.03965145, ..., 0.03198181,\n", + " 0.03125041, 0.03153064],\n", + " [0.03945881, 0.03952107, 0.03958255, ..., 0.03184386,\n", + " 0.03127047, 0.03173521],\n", + " [0.03940514, 0.03945867, 0.03951972, ..., 0.03147388,\n", + " 0.031453 , 0.03222592],\n", " ...,\n", - " [2.00155412e-08, 1.88844460e-08, 1.72373600e-08, ...,\n", - " 1.03697766e-10, 1.24570437e-10, 1.35568029e-10],\n", - " [1.88825418e-08, 1.78339921e-08, 1.65571699e-08, ...,\n", - " 1.29691299e-10, 1.19366197e-10, 1.26047242e-10],\n", - " [1.75334254e-08, 1.67024794e-08, 1.57620299e-08, ...,\n", - " 5.90659299e-10, 5.99817251e-10, 6.93886892e-10]]],\n", + " [0.03757998, 0.03767523, 0.03775784, ..., 0.02404686,\n", + " 0.02617675, 0.02804444],\n", + " [0.03747029, 0.03767779, 0.03778836, ..., 0.02497317,\n", + " 0.02633872, 0.02821054],\n", + " [0.03744229, 0.03762932, 0.03774261, ..., 0.02464963,\n", + " 0.02615741, 0.02828379]]],\n", " \n", " \n", - " [[[2.04419166e-08, 2.10894218e-08, 2.14073363e-08, ...,\n", - " 1.50338764e-07, 1.46800986e-07, 1.45907649e-07],\n", - " [1.97089882e-08, 2.07061799e-08, 2.14063096e-08, ...,\n", - " 1.50352193e-07, 1.44729924e-07, 1.41249188e-07],\n", - " [1.88334628e-08, 1.97772980e-08, 2.10062652e-08, ...,\n", - " 1.50320858e-07, 1.45472683e-07, 1.40802484e-07],\n", + " [[[0.03965769, 0.03971414, 0.03978673, ..., 0.03103125,\n", + " 0.03015577, 0.03036901],\n", + " [0.03960922, 0.03966256, 0.03972849, ..., 0.03101462,\n", + " 0.03021824, 0.03060081],\n", + " [0.03957865, 0.03962931, 0.03968912, ..., 0.03126283,\n", + " 0.03049327, 0.0311233 ],\n", " ...,\n", - " [1.58354645e-08, 1.31688500e-08, 1.04182032e-08, ...,\n", - " 1.36057263e-10, 1.61256425e-10, 1.82640861e-10],\n", - " [1.58553863e-08, 1.35575196e-08, 1.12653220e-08, ...,\n", - " 4.73602046e-10, 5.22056454e-10, 5.99001682e-10],\n", - " [1.51028274e-08, 1.29572175e-08, 1.06422196e-08, ...,\n", - " 1.20180965e-09, 1.22420163e-09, 1.27862843e-09]]],\n", + " [0.03786875, 0.03777716, 0.0376072 , ..., 0.02531419,\n", + " 0.02676462, 0.02868378],\n", + " [0.03781448, 0.03783737, 0.03778312, ..., 0.02607518,\n", + " 0.02714914, 0.02904003],\n", + " [0.03779451, 0.03781895, 0.03778093, ..., 0.02577951,\n", + " 0.02662436, 0.02910396]]],\n", " \n", " \n", - " [[[2.06775415e-08, 2.09295870e-08, 2.10403801e-08, ...,\n", - " 1.43437418e-07, 1.36887849e-07, 1.33009308e-07],\n", - " [2.03318944e-08, 2.06964064e-08, 2.09895372e-08, ...,\n", - " 1.43851324e-07, 1.36785971e-07, 1.30620521e-07],\n", - " [1.96500309e-08, 1.97185361e-08, 2.00775236e-08, ...,\n", - " 1.43990789e-07, 1.38915652e-07, 1.32079862e-07],\n", + " [[[0.03982776, 0.03989794, 0.04000261, ..., 0.03058425,\n", + " 0.02924641, 0.02938778],\n", + " [0.03979794, 0.03985743, 0.03995337, ..., 0.03089899,\n", + " 0.02948623, 0.02973373],\n", + " [0.03978007, 0.03982415, 0.03991285, ..., 0.03140561,\n", + " 0.03000267, 0.0304553 ],\n", " ...,\n", - " [1.15522543e-08, 1.03884448e-08, 8.92967922e-09, ...,\n", - " 2.96768943e-10, 5.31864996e-10, 6.76325274e-10],\n", - " [1.30444580e-08, 1.24477344e-08, 1.19089290e-08, ...,\n", - " 9.38010669e-10, 1.02875852e-09, 1.09171505e-09],\n", - " [1.33038087e-08, 1.28954767e-08, 1.27622268e-08, ...,\n", - " 1.10067289e-09, 1.11675491e-09, 1.12590814e-09]]],\n", + " [0.03821166, 0.03814133, 0.0379876 , ..., 0.02608518,\n", + " 0.02720294, 0.02940145],\n", + " [0.03820223, 0.03822928, 0.03817524, ..., 0.02666686,\n", + " 0.02796096, 0.02975006],\n", + " [0.03819539, 0.03822353, 0.03819501, ..., 0.02689083,\n", + " 0.02732235, 0.02981647]]],\n", " \n", " \n", " ...,\n", " \n", " \n", - " [[[1.99427657e-08, 2.07774260e-08, 2.12375095e-08, ...,\n", - " 1.42070576e-07, 1.23106801e-07, 1.05545318e-07],\n", - " [1.94999394e-08, 2.03327897e-08, 2.09769428e-08, ...,\n", - " 1.40088332e-07, 1.22823366e-07, 1.06322936e-07],\n", - " [1.92740401e-08, 2.01404546e-08, 2.12538360e-08, ...,\n", - " 1.37738098e-07, 1.25881471e-07, 1.11278482e-07],\n", + " [[[0.04243098, 0.04246398, 0.0425217 , ..., 0.03174707,\n", + " 0.02986301, 0.02911444],\n", + " [0.04233237, 0.0423856 , 0.0424809 , ..., 0.03209906,\n", + " 0.03045077, 0.02993134],\n", + " [0.04244579, 0.04248011, 0.04255648, ..., 0.03262969,\n", + " 0.03137314, 0.03106195],\n", " ...,\n", - " [1.62301383e-09, 3.29047856e-09, 4.25983115e-09, ...,\n", - " 5.10578968e-10, 7.78555886e-10, 7.58658358e-10],\n", - " [1.61382108e-09, 3.05706660e-09, 3.85214838e-09, ...,\n", - " 1.44986870e-10, 4.45782633e-10, 6.53098131e-10],\n", - " [1.51593449e-09, 2.81125856e-09, 3.49995122e-09, ...,\n", - " 3.83349671e-11, 5.17163673e-11, 1.41100784e-10]]],\n", + " [0.03979023, 0.03997482, 0.04005315, ..., 0.03456535,\n", + " 0.03498862, 0.0354754 ],\n", + " [0.03967334, 0.0398716 , 0.04000713, ..., 0.03471798,\n", + " 0.03485566, 0.03500457],\n", + " [0.03964297, 0.03981132, 0.03993002, ..., 0.03497454,\n", + " 0.03504148, 0.03509094]]],\n", " \n", " \n", - " [[[2.12318145e-08, 2.15663487e-08, 2.17239737e-08, ...,\n", - " 1.33982709e-07, 1.17447051e-07, 1.01946490e-07],\n", - " [2.09607443e-08, 2.13947366e-08, 2.17022080e-08, ...,\n", - " 1.30945295e-07, 1.15195341e-07, 1.00572883e-07],\n", - " [2.06122941e-08, 2.12153246e-08, 2.20146017e-08, ...,\n", - " 1.27737934e-07, 1.15350041e-07, 1.01470562e-07],\n", + " [[[0.04243221, 0.04248913, 0.04257801, ..., 0.03141348,\n", + " 0.02992571, 0.02943683],\n", + " [0.04236476, 0.0424497 , 0.04255884, ..., 0.03173555,\n", + " 0.03054 , 0.03031359],\n", + " [0.04247259, 0.04253475, 0.0426126 , ..., 0.03197961,\n", + " 0.03136487, 0.03140653],\n", " ...,\n", - " [1.74116932e-09, 3.57977159e-09, 4.66885952e-09, ...,\n", - " 1.35524800e-10, 3.73824971e-10, 5.27946020e-10],\n", - " [1.69331171e-09, 3.35175421e-09, 4.35682335e-09, ...,\n", - " 6.59049343e-11, 8.77121500e-11, 1.72610246e-10],\n", - " [1.66064651e-09, 3.09451931e-09, 3.84808585e-09, ...,\n", - " 3.81935802e-11, 3.20906392e-11, 2.95690208e-11]]],\n", + " [0.03982663, 0.04001996, 0.04009991, ..., 0.03445901,\n", + " 0.0349366 , 0.03539021],\n", + " [0.03969444, 0.03990471, 0.04005693, ..., 0.03466238,\n", + " 0.03480245, 0.03491453],\n", + " [0.0396612 , 0.03983796, 0.03996958, ..., 0.03486277,\n", + " 0.03491549, 0.03492822]]],\n", " \n", " \n", - " [[[2.20933263e-08, 2.23298162e-08, 2.22625207e-08, ...,\n", - " 1.25896420e-07, 1.11788722e-07, 9.83487993e-08],\n", - " [2.17691785e-08, 2.21585328e-08, 2.22608545e-08, ...,\n", - " 1.21806693e-07, 1.07571751e-07, 9.48248058e-08],\n", - " [2.10832365e-08, 2.17355911e-08, 2.23796377e-08, ...,\n", - " 1.17739212e-07, 1.04824068e-07, 9.16652283e-08],\n", + " [[[0.04249088, 0.04256602, 0.04264675, ..., 0.03092884,\n", + " 0.02980646, 0.02965403],\n", + " [0.042451 , 0.04252698, 0.04259988, ..., 0.03106567,\n", + " 0.03033506, 0.03051317],\n", + " [0.04252941, 0.04258011, 0.04264118, ..., 0.0312073 ,\n", + " 0.03103344, 0.03165624],\n", " ...,\n", - " [1.82784676e-09, 3.94770527e-09, 5.16965803e-09, ...,\n", - " 7.80837409e-11, 9.47864148e-11, 1.31354164e-10],\n", - " [1.82815707e-09, 3.68124264e-09, 4.70819206e-09, ...,\n", - " 5.68534525e-11, 4.92194792e-11, 5.04330119e-11],\n", - " [1.79193316e-09, 3.41961126e-09, 4.36335901e-09, ...,\n", - " 4.09052167e-11, 3.92018085e-11, 3.67623848e-11]]]],\n", + " [0.03986304, 0.04006038, 0.04014875, ..., 0.03444035,\n", + " 0.03486974, 0.03531818],\n", + " [0.03971234, 0.03994204, 0.04009009, ..., 0.03459954,\n", + " 0.0347504 , 0.0348453 ],\n", + " [0.03967606, 0.03986803, 0.03999919, ..., 0.03475965,\n", + " 0.03481083, 0.03478444]]]],\n", " mask=False,\n", " fill_value=1e+20,\n", " dtype=float32),\n", " 'dimensions': ('time', 'lev', 'y', 'x'),\n", - " 'units': 'kgm-3',\n", + " 'units': 'ppm',\n", " 'coordinates': 'lat lon',\n", " 'grid_mapping': 'Lambert_conformal'}}" ] @@ -1219,11 +1219,11 @@ "Rank 000: Creating lcc_file_1.nc\n", "Rank 000: NetCDF ready to write\n", "Rank 000: Dimensions done\n", - "Rank 000: Writing pm10 var (1/1)\n", - "Rank 000: Var pm10 created (1/1)\n", - "Rank 000: Filling pm10)\n", - "Rank 000: Var pm10 data (1/1)\n", - "Rank 000: Var pm10 completed (1/1)\n" + "Rank 000: Writing sconco3 var (1/1)\n", + "Rank 000: Var sconco3 created (1/1)\n", + "Rank 000: Filling sconco3)\n", + "Rank 000: Var sconco3 data (1/1)\n", + "Rank 000: Var sconco3 completed (1/1)\n" ] } ], @@ -1246,7 +1246,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -1621,56 +1621,56 @@ "
    <xarray.Dataset>\n",
            "Dimensions:            (time: 48, lev: 1, y: 398, x: 478)\n",
            "Coordinates:\n",
    -       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n",
            "  * lev                (lev) float64 0.0\n",
            "    lat                (y, x) float64 ...\n",
            "    lon                (y, x) float64 ...\n",
            "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
            "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
            "Data variables:\n",
    -       "    pm10               (time, lev, y, x) float32 ...\n",
    +       "    sconco3            (time, lev, y, x) float32 ...\n",
            "    Lambert_conformal  |S1 b''\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
    " + " Conventions: CF-1.7" ], "text/plain": [ "\n", "Dimensions: (time: 48, lev: 1, y: 398, x: 478)\n", "Coordinates:\n", - " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n", " * lev (lev) float64 0.0\n", " lat (y, x) float64 ...\n", " lon (y, x) float64 ...\n", " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", "Data variables:\n", - " pm10 (time, lev, y, x) float32 ...\n", + " sconco3 (time, lev, y, x) float32 ...\n", " Lambert_conformal |S1 ...\n", "Attributes:\n", " Conventions: CF-1.7" diff --git a/Jupyter_notebooks/1.5-mercator_grids.ipynb b/Jupyter_notebooks/1.5-mercator_grids.ipynb index ad8cc7fa57036fb76c1b25bebd93af76b950032f..910861203ed3abab444756094393a5513c7f18fd 100644 --- a/Jupyter_notebooks/1.5-mercator_grids.ipynb +++ b/Jupyter_notebooks/1.5-mercator_grids.ipynb @@ -419,7 +419,7 @@ "Data variables:\n", " var_aux (time, y, x) float32 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0\n", " mercator int32 -2147483647\n", - " cell_area (y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09 1.051e+09
  • " ], "text/plain": [ "\n", @@ -501,7 +501,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 5, @@ -927,7 +927,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 15, @@ -1313,7 +1313,7 @@ " cell_area (time, lev, y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09\n", " mercator |S1 b''\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", diff --git a/Jupyter_notebooks/2-create_nes.ipynb b/Jupyter_notebooks/2-create_nes.ipynb index e69ddcb5f9f1492aeac2196540200ca6cd3d85cc..bf915c09d29fe9616ac15462ccf12bfb28fa3a35 100644 --- a/Jupyter_notebooks/2-create_nes.ipynb +++ b/Jupyter_notebooks/2-create_nes.ipynb @@ -431,7 +431,7 @@ "Data variables:\n", " crs |S1 b''\n", "Attributes:\n", - " Conventions: CF-1.7" + " Conventions: CF-1.7" ], "text/plain": [ "\n", @@ -871,13 +871,13 @@ " lon (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n", " rotated_pole |S1 b''\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -1147,9 +1147,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:323: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:323: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", " warnings.warn(msg)\n" ] } @@ -1523,12 +1523,12 @@ " * time (time) datetime64[ns] 1996-12-31\n", " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", "Data variables:\n", - " station_code (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n", - " area_classification (station) object 'urban-centre' ... 'nan'\n", " lat (station) float64 41.38 41.73 41.57 ... 41.24 42.36\n", " lon (station) float64 2.086 1.839 2.015 ... 1.857 1.459\n", + " station_code (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n", + " area_classification (station) object 'urban-centre' ... 'nan'\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -1643,10 +1643,10 @@ " * time (time) datetime64[ns] 1996-12-31\n", " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", "Data variables:\n", - " station_code (station) object ...\n", - " area_classification (station) object ...\n", " lat (station) float64 ...\n", " lon (station) float64 ...\n", + " station_code (station) object ...\n", + " area_classification (station) object ...\n", "Attributes:\n", " Conventions: CF-1.7" ] @@ -2213,11 +2213,11 @@ "name": "stderr", "output_type": "stream", "text": [ - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:323: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:323: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:323: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", " warnings.warn(msg)\n" ] } @@ -2586,115 +2586,69 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:       (time: 365, station: 83)\n",
    +       "Dimensions:       (time: 365, station: 83, strlen: 75)\n",
            "Coordinates:\n",
            "  * time          (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n",
            "  * station       (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n",
    +       "Dimensions without coordinates: strlen\n",
            "Data variables:\n",
    -       "    station_name  (station) object 'Barcelona (Eixample)' ... 'Vic (Centre Cí...\n",
    -       "    station_code  (station) object 'ES1438A' 'ES1928A' ... 'ES9994A' 'ES1874A'\n",
    -       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
            "    lat           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
            "    lon           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
    +       "    station_name  (station, strlen) object 'B' 'a' 'r' 'c' 'e' ... '' '' '' ''\n",
    +       "    station_code  (station, strlen) object 'E' 'S' '1' '4' '3' ... '' '' '' ''\n",
    +       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 365, station: 83)\n", + "Dimensions: (time: 365, station: 83, strlen: 75)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n", " * station (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n", + "Dimensions without coordinates: strlen\n", "Data variables:\n", - " station_name (station) object ...\n", - " station_code (station) object ...\n", - " pm10 (station, time) float64 ...\n", " lat (station) float64 ...\n", " lon (station) float64 ...\n", + " station_name (station, strlen) object ...\n", + " station_code (station, strlen) object ...\n", + " pm10 (station, time) float64 ...\n", "Attributes:\n", " Conventions: CF-1.7" ] @@ -3126,9 +3080,9 @@ " lon (y, x) float64 ...\n", " Lambert_conformal |S1 b''\n", "Attributes:\n", - " Conventions: CF-1.7" + " Conventions: CF-1.7" ], "text/plain": [ "\n", @@ -3571,8 +3525,8 @@ " lon (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n", " mercator |S1 b''\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -3622,13 +3576,6 @@ "source": [ "xr.open_dataset('mercator_grid.nc')" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/Jupyter_notebooks/4-providentia.ipynb b/Jupyter_notebooks/4-providentia.ipynb index 5e0303360a2221c26721ce15c7d648399a6d5e71..dbdb11a4536696768f5fc44fe8bd9796e00fbfb8 100644 --- a/Jupyter_notebooks/4-providentia.ipynb +++ b/Jupyter_notebooks/4-providentia.ipynb @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -36,7 +36,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -422,10 +422,10 @@ " data_version: 1.3.3\n", " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", " NCO: 4.7.2\n", - " nco_openmp_thread_number: 1" ], "text/plain": [ @@ -6289,7 +6289,7 @@ " nco_openmp_thread_number: 1" ] }, - "execution_count": 21, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -6300,767 +6300,37 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 3, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_1 = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", - "nessy_1" + "obs_nes = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", + "obs_nes" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[datetime.datetime(2018, 4, 1, 0, 0),\n", - " datetime.datetime(2018, 4, 1, 1, 0),\n", - " datetime.datetime(2018, 4, 1, 2, 0),\n", - " datetime.datetime(2018, 4, 1, 3, 0),\n", - " datetime.datetime(2018, 4, 1, 4, 0),\n", - " datetime.datetime(2018, 4, 1, 5, 0),\n", - " datetime.datetime(2018, 4, 1, 6, 0),\n", - " datetime.datetime(2018, 4, 1, 7, 0),\n", - " datetime.datetime(2018, 4, 1, 8, 0),\n", - " datetime.datetime(2018, 4, 1, 9, 0),\n", - " datetime.datetime(2018, 4, 1, 10, 0),\n", - " datetime.datetime(2018, 4, 1, 11, 0),\n", - " datetime.datetime(2018, 4, 1, 12, 0),\n", - " datetime.datetime(2018, 4, 1, 13, 0),\n", - " datetime.datetime(2018, 4, 1, 14, 0),\n", - " datetime.datetime(2018, 4, 1, 15, 0),\n", - " datetime.datetime(2018, 4, 1, 16, 0),\n", - " datetime.datetime(2018, 4, 1, 17, 0),\n", - " datetime.datetime(2018, 4, 1, 18, 0),\n", - " datetime.datetime(2018, 4, 1, 19, 0),\n", - " datetime.datetime(2018, 4, 1, 20, 0),\n", - " datetime.datetime(2018, 4, 1, 21, 0),\n", - " datetime.datetime(2018, 4, 1, 22, 0),\n", - " datetime.datetime(2018, 4, 1, 23, 0),\n", - " datetime.datetime(2018, 4, 2, 0, 0),\n", - " datetime.datetime(2018, 4, 2, 1, 0),\n", - " datetime.datetime(2018, 4, 2, 2, 0),\n", - " datetime.datetime(2018, 4, 2, 3, 0),\n", - " datetime.datetime(2018, 4, 2, 4, 0),\n", - " datetime.datetime(2018, 4, 2, 5, 0),\n", - " datetime.datetime(2018, 4, 2, 6, 0),\n", - " datetime.datetime(2018, 4, 2, 7, 0),\n", - " datetime.datetime(2018, 4, 2, 8, 0),\n", - " datetime.datetime(2018, 4, 2, 9, 0),\n", - " datetime.datetime(2018, 4, 2, 10, 0),\n", - " datetime.datetime(2018, 4, 2, 11, 0),\n", - " datetime.datetime(2018, 4, 2, 12, 0),\n", - " datetime.datetime(2018, 4, 2, 13, 0),\n", - " datetime.datetime(2018, 4, 2, 14, 0),\n", - " datetime.datetime(2018, 4, 2, 15, 0),\n", - " datetime.datetime(2018, 4, 2, 16, 0),\n", - " datetime.datetime(2018, 4, 2, 17, 0),\n", - " datetime.datetime(2018, 4, 2, 18, 0),\n", - " datetime.datetime(2018, 4, 2, 19, 0),\n", - " datetime.datetime(2018, 4, 2, 20, 0),\n", - " datetime.datetime(2018, 4, 2, 21, 0),\n", - " datetime.datetime(2018, 4, 2, 22, 0),\n", - " datetime.datetime(2018, 4, 2, 23, 0),\n", - " datetime.datetime(2018, 4, 3, 0, 0),\n", - " datetime.datetime(2018, 4, 3, 1, 0),\n", - " datetime.datetime(2018, 4, 3, 2, 0),\n", - " datetime.datetime(2018, 4, 3, 3, 0),\n", - " datetime.datetime(2018, 4, 3, 4, 0),\n", - " datetime.datetime(2018, 4, 3, 5, 0),\n", - " datetime.datetime(2018, 4, 3, 6, 0),\n", - " datetime.datetime(2018, 4, 3, 7, 0),\n", - " datetime.datetime(2018, 4, 3, 8, 0),\n", - " datetime.datetime(2018, 4, 3, 9, 0),\n", - " datetime.datetime(2018, 4, 3, 10, 0),\n", - " datetime.datetime(2018, 4, 3, 11, 0),\n", - " datetime.datetime(2018, 4, 3, 12, 0),\n", - " datetime.datetime(2018, 4, 3, 13, 0),\n", - " datetime.datetime(2018, 4, 3, 14, 0),\n", - " datetime.datetime(2018, 4, 3, 15, 0),\n", - " datetime.datetime(2018, 4, 3, 16, 0),\n", - " datetime.datetime(2018, 4, 3, 17, 0),\n", - " datetime.datetime(2018, 4, 3, 18, 0),\n", - " datetime.datetime(2018, 4, 3, 19, 0),\n", - " datetime.datetime(2018, 4, 3, 20, 0),\n", - " datetime.datetime(2018, 4, 3, 21, 0),\n", - " datetime.datetime(2018, 4, 3, 22, 0),\n", - " datetime.datetime(2018, 4, 3, 23, 0),\n", - " datetime.datetime(2018, 4, 4, 0, 0),\n", - " datetime.datetime(2018, 4, 4, 1, 0),\n", - " datetime.datetime(2018, 4, 4, 2, 0),\n", - " datetime.datetime(2018, 4, 4, 3, 0),\n", - " datetime.datetime(2018, 4, 4, 4, 0),\n", - " datetime.datetime(2018, 4, 4, 5, 0),\n", - " datetime.datetime(2018, 4, 4, 6, 0),\n", - " datetime.datetime(2018, 4, 4, 7, 0),\n", - " datetime.datetime(2018, 4, 4, 8, 0),\n", - " datetime.datetime(2018, 4, 4, 9, 0),\n", - " datetime.datetime(2018, 4, 4, 10, 0),\n", - " datetime.datetime(2018, 4, 4, 11, 0),\n", - " datetime.datetime(2018, 4, 4, 12, 0),\n", - " datetime.datetime(2018, 4, 4, 13, 0),\n", - " datetime.datetime(2018, 4, 4, 14, 0),\n", - " datetime.datetime(2018, 4, 4, 15, 0),\n", - " datetime.datetime(2018, 4, 4, 16, 0),\n", - " datetime.datetime(2018, 4, 4, 17, 0),\n", - " datetime.datetime(2018, 4, 4, 18, 0),\n", - " datetime.datetime(2018, 4, 4, 19, 0),\n", - " datetime.datetime(2018, 4, 4, 20, 0),\n", - " datetime.datetime(2018, 4, 4, 21, 0),\n", - " datetime.datetime(2018, 4, 4, 22, 0),\n", - " datetime.datetime(2018, 4, 4, 23, 0),\n", - " datetime.datetime(2018, 4, 5, 0, 0),\n", - " datetime.datetime(2018, 4, 5, 1, 0),\n", - " datetime.datetime(2018, 4, 5, 2, 0),\n", - " datetime.datetime(2018, 4, 5, 3, 0),\n", - " datetime.datetime(2018, 4, 5, 4, 0),\n", - " datetime.datetime(2018, 4, 5, 5, 0),\n", - " datetime.datetime(2018, 4, 5, 6, 0),\n", - " datetime.datetime(2018, 4, 5, 7, 0),\n", - " datetime.datetime(2018, 4, 5, 8, 0),\n", - " datetime.datetime(2018, 4, 5, 9, 0),\n", - " datetime.datetime(2018, 4, 5, 10, 0),\n", - " datetime.datetime(2018, 4, 5, 11, 0),\n", - " datetime.datetime(2018, 4, 5, 12, 0),\n", - " datetime.datetime(2018, 4, 5, 13, 0),\n", - " datetime.datetime(2018, 4, 5, 14, 0),\n", - " datetime.datetime(2018, 4, 5, 15, 0),\n", - " datetime.datetime(2018, 4, 5, 16, 0),\n", - " datetime.datetime(2018, 4, 5, 17, 0),\n", - " datetime.datetime(2018, 4, 5, 18, 0),\n", - " datetime.datetime(2018, 4, 5, 19, 0),\n", - " datetime.datetime(2018, 4, 5, 20, 0),\n", - " datetime.datetime(2018, 4, 5, 21, 0),\n", - " datetime.datetime(2018, 4, 5, 22, 0),\n", - " datetime.datetime(2018, 4, 5, 23, 0),\n", - " datetime.datetime(2018, 4, 6, 0, 0),\n", - " datetime.datetime(2018, 4, 6, 1, 0),\n", - " datetime.datetime(2018, 4, 6, 2, 0),\n", - " datetime.datetime(2018, 4, 6, 3, 0),\n", - " datetime.datetime(2018, 4, 6, 4, 0),\n", - " datetime.datetime(2018, 4, 6, 5, 0),\n", - " datetime.datetime(2018, 4, 6, 6, 0),\n", - " datetime.datetime(2018, 4, 6, 7, 0),\n", - " datetime.datetime(2018, 4, 6, 8, 0),\n", - " datetime.datetime(2018, 4, 6, 9, 0),\n", - " datetime.datetime(2018, 4, 6, 10, 0),\n", - " datetime.datetime(2018, 4, 6, 11, 0),\n", - " datetime.datetime(2018, 4, 6, 12, 0),\n", - " datetime.datetime(2018, 4, 6, 13, 0),\n", - " datetime.datetime(2018, 4, 6, 14, 0),\n", - " datetime.datetime(2018, 4, 6, 15, 0),\n", - " datetime.datetime(2018, 4, 6, 16, 0),\n", - " datetime.datetime(2018, 4, 6, 17, 0),\n", - " datetime.datetime(2018, 4, 6, 18, 0),\n", - " datetime.datetime(2018, 4, 6, 19, 0),\n", - " datetime.datetime(2018, 4, 6, 20, 0),\n", - " datetime.datetime(2018, 4, 6, 21, 0),\n", - " datetime.datetime(2018, 4, 6, 22, 0),\n", - " datetime.datetime(2018, 4, 6, 23, 0),\n", - " datetime.datetime(2018, 4, 7, 0, 0),\n", - " datetime.datetime(2018, 4, 7, 1, 0),\n", - " datetime.datetime(2018, 4, 7, 2, 0),\n", - " datetime.datetime(2018, 4, 7, 3, 0),\n", - " datetime.datetime(2018, 4, 7, 4, 0),\n", - " datetime.datetime(2018, 4, 7, 5, 0),\n", - " datetime.datetime(2018, 4, 7, 6, 0),\n", - " datetime.datetime(2018, 4, 7, 7, 0),\n", - " datetime.datetime(2018, 4, 7, 8, 0),\n", - " datetime.datetime(2018, 4, 7, 9, 0),\n", - " datetime.datetime(2018, 4, 7, 10, 0),\n", - " datetime.datetime(2018, 4, 7, 11, 0),\n", - " datetime.datetime(2018, 4, 7, 12, 0),\n", - " datetime.datetime(2018, 4, 7, 13, 0),\n", - " datetime.datetime(2018, 4, 7, 14, 0),\n", - " datetime.datetime(2018, 4, 7, 15, 0),\n", - " datetime.datetime(2018, 4, 7, 16, 0),\n", - " datetime.datetime(2018, 4, 7, 17, 0),\n", - " datetime.datetime(2018, 4, 7, 18, 0),\n", - " datetime.datetime(2018, 4, 7, 19, 0),\n", - " datetime.datetime(2018, 4, 7, 20, 0),\n", - " datetime.datetime(2018, 4, 7, 21, 0),\n", - " datetime.datetime(2018, 4, 7, 22, 0),\n", - " datetime.datetime(2018, 4, 7, 23, 0),\n", - " datetime.datetime(2018, 4, 8, 0, 0),\n", - " datetime.datetime(2018, 4, 8, 1, 0),\n", - " datetime.datetime(2018, 4, 8, 2, 0),\n", - " datetime.datetime(2018, 4, 8, 3, 0),\n", - " datetime.datetime(2018, 4, 8, 4, 0),\n", - " datetime.datetime(2018, 4, 8, 5, 0),\n", - " datetime.datetime(2018, 4, 8, 6, 0),\n", - " datetime.datetime(2018, 4, 8, 7, 0),\n", - " datetime.datetime(2018, 4, 8, 8, 0),\n", - " datetime.datetime(2018, 4, 8, 9, 0),\n", - " datetime.datetime(2018, 4, 8, 10, 0),\n", - " datetime.datetime(2018, 4, 8, 11, 0),\n", - " datetime.datetime(2018, 4, 8, 12, 0),\n", - " datetime.datetime(2018, 4, 8, 13, 0),\n", - " datetime.datetime(2018, 4, 8, 14, 0),\n", - " datetime.datetime(2018, 4, 8, 15, 0),\n", - " datetime.datetime(2018, 4, 8, 16, 0),\n", - " datetime.datetime(2018, 4, 8, 17, 0),\n", - " datetime.datetime(2018, 4, 8, 18, 0),\n", - " datetime.datetime(2018, 4, 8, 19, 0),\n", - " datetime.datetime(2018, 4, 8, 20, 0),\n", - " datetime.datetime(2018, 4, 8, 21, 0),\n", - " datetime.datetime(2018, 4, 8, 22, 0),\n", - " datetime.datetime(2018, 4, 8, 23, 0),\n", - " datetime.datetime(2018, 4, 9, 0, 0),\n", - " datetime.datetime(2018, 4, 9, 1, 0),\n", - " datetime.datetime(2018, 4, 9, 2, 0),\n", - " datetime.datetime(2018, 4, 9, 3, 0),\n", - " datetime.datetime(2018, 4, 9, 4, 0),\n", - " datetime.datetime(2018, 4, 9, 5, 0),\n", - " datetime.datetime(2018, 4, 9, 6, 0),\n", - " datetime.datetime(2018, 4, 9, 7, 0),\n", - " datetime.datetime(2018, 4, 9, 8, 0),\n", - " datetime.datetime(2018, 4, 9, 9, 0),\n", - " datetime.datetime(2018, 4, 9, 10, 0),\n", - " datetime.datetime(2018, 4, 9, 11, 0),\n", - " datetime.datetime(2018, 4, 9, 12, 0),\n", - " datetime.datetime(2018, 4, 9, 13, 0),\n", - " datetime.datetime(2018, 4, 9, 14, 0),\n", - " datetime.datetime(2018, 4, 9, 15, 0),\n", - " datetime.datetime(2018, 4, 9, 16, 0),\n", - " datetime.datetime(2018, 4, 9, 17, 0),\n", - " datetime.datetime(2018, 4, 9, 18, 0),\n", - " datetime.datetime(2018, 4, 9, 19, 0),\n", - " datetime.datetime(2018, 4, 9, 20, 0),\n", - " datetime.datetime(2018, 4, 9, 21, 0),\n", - " datetime.datetime(2018, 4, 9, 22, 0),\n", - " datetime.datetime(2018, 4, 9, 23, 0),\n", - " datetime.datetime(2018, 4, 10, 0, 0),\n", - " datetime.datetime(2018, 4, 10, 1, 0),\n", - " datetime.datetime(2018, 4, 10, 2, 0),\n", - " datetime.datetime(2018, 4, 10, 3, 0),\n", - " datetime.datetime(2018, 4, 10, 4, 0),\n", - " datetime.datetime(2018, 4, 10, 5, 0),\n", - " datetime.datetime(2018, 4, 10, 6, 0),\n", - " datetime.datetime(2018, 4, 10, 7, 0),\n", - " datetime.datetime(2018, 4, 10, 8, 0),\n", - " datetime.datetime(2018, 4, 10, 9, 0),\n", - " datetime.datetime(2018, 4, 10, 10, 0),\n", - " datetime.datetime(2018, 4, 10, 11, 0),\n", - " datetime.datetime(2018, 4, 10, 12, 0),\n", - " datetime.datetime(2018, 4, 10, 13, 0),\n", - " datetime.datetime(2018, 4, 10, 14, 0),\n", - " datetime.datetime(2018, 4, 10, 15, 0),\n", - " datetime.datetime(2018, 4, 10, 16, 0),\n", - " datetime.datetime(2018, 4, 10, 17, 0),\n", - " datetime.datetime(2018, 4, 10, 18, 0),\n", - " datetime.datetime(2018, 4, 10, 19, 0),\n", - " datetime.datetime(2018, 4, 10, 20, 0),\n", - " datetime.datetime(2018, 4, 10, 21, 0),\n", - " datetime.datetime(2018, 4, 10, 22, 0),\n", - " datetime.datetime(2018, 4, 10, 23, 0),\n", - " datetime.datetime(2018, 4, 11, 0, 0),\n", - " datetime.datetime(2018, 4, 11, 1, 0),\n", - " datetime.datetime(2018, 4, 11, 2, 0),\n", - " datetime.datetime(2018, 4, 11, 3, 0),\n", - " datetime.datetime(2018, 4, 11, 4, 0),\n", - " datetime.datetime(2018, 4, 11, 5, 0),\n", - " datetime.datetime(2018, 4, 11, 6, 0),\n", - " datetime.datetime(2018, 4, 11, 7, 0),\n", - " datetime.datetime(2018, 4, 11, 8, 0),\n", - " datetime.datetime(2018, 4, 11, 9, 0),\n", - " datetime.datetime(2018, 4, 11, 10, 0),\n", - " datetime.datetime(2018, 4, 11, 11, 0),\n", - " datetime.datetime(2018, 4, 11, 12, 0),\n", - " datetime.datetime(2018, 4, 11, 13, 0),\n", - " datetime.datetime(2018, 4, 11, 14, 0),\n", - " datetime.datetime(2018, 4, 11, 15, 0),\n", - " datetime.datetime(2018, 4, 11, 16, 0),\n", - " datetime.datetime(2018, 4, 11, 17, 0),\n", - " datetime.datetime(2018, 4, 11, 18, 0),\n", - " datetime.datetime(2018, 4, 11, 19, 0),\n", - " datetime.datetime(2018, 4, 11, 20, 0),\n", - " datetime.datetime(2018, 4, 11, 21, 0),\n", - " datetime.datetime(2018, 4, 11, 22, 0),\n", - " datetime.datetime(2018, 4, 11, 23, 0),\n", - " datetime.datetime(2018, 4, 12, 0, 0),\n", - " datetime.datetime(2018, 4, 12, 1, 0),\n", - " datetime.datetime(2018, 4, 12, 2, 0),\n", - " datetime.datetime(2018, 4, 12, 3, 0),\n", - " datetime.datetime(2018, 4, 12, 4, 0),\n", - " datetime.datetime(2018, 4, 12, 5, 0),\n", - " datetime.datetime(2018, 4, 12, 6, 0),\n", - " datetime.datetime(2018, 4, 12, 7, 0),\n", - " datetime.datetime(2018, 4, 12, 8, 0),\n", - " datetime.datetime(2018, 4, 12, 9, 0),\n", - " datetime.datetime(2018, 4, 12, 10, 0),\n", - " datetime.datetime(2018, 4, 12, 11, 0),\n", - " datetime.datetime(2018, 4, 12, 12, 0),\n", - " datetime.datetime(2018, 4, 12, 13, 0),\n", - " datetime.datetime(2018, 4, 12, 14, 0),\n", - " datetime.datetime(2018, 4, 12, 15, 0),\n", - " datetime.datetime(2018, 4, 12, 16, 0),\n", - " datetime.datetime(2018, 4, 12, 17, 0),\n", - " datetime.datetime(2018, 4, 12, 18, 0),\n", - " datetime.datetime(2018, 4, 12, 19, 0),\n", - " datetime.datetime(2018, 4, 12, 20, 0),\n", - " datetime.datetime(2018, 4, 12, 21, 0),\n", - " datetime.datetime(2018, 4, 12, 22, 0),\n", - " datetime.datetime(2018, 4, 12, 23, 0),\n", - " datetime.datetime(2018, 4, 13, 0, 0),\n", - " datetime.datetime(2018, 4, 13, 1, 0),\n", - " datetime.datetime(2018, 4, 13, 2, 0),\n", - " datetime.datetime(2018, 4, 13, 3, 0),\n", - " datetime.datetime(2018, 4, 13, 4, 0),\n", - " datetime.datetime(2018, 4, 13, 5, 0),\n", - " datetime.datetime(2018, 4, 13, 6, 0),\n", - " datetime.datetime(2018, 4, 13, 7, 0),\n", - " datetime.datetime(2018, 4, 13, 8, 0),\n", - " datetime.datetime(2018, 4, 13, 9, 0),\n", - " datetime.datetime(2018, 4, 13, 10, 0),\n", - " datetime.datetime(2018, 4, 13, 11, 0),\n", - " datetime.datetime(2018, 4, 13, 12, 0),\n", - " datetime.datetime(2018, 4, 13, 13, 0),\n", - " datetime.datetime(2018, 4, 13, 14, 0),\n", - " datetime.datetime(2018, 4, 13, 15, 0),\n", - " datetime.datetime(2018, 4, 13, 16, 0),\n", - " datetime.datetime(2018, 4, 13, 17, 0),\n", - " datetime.datetime(2018, 4, 13, 18, 0),\n", - " datetime.datetime(2018, 4, 13, 19, 0),\n", - " datetime.datetime(2018, 4, 13, 20, 0),\n", - " datetime.datetime(2018, 4, 13, 21, 0),\n", - " datetime.datetime(2018, 4, 13, 22, 0),\n", - " datetime.datetime(2018, 4, 13, 23, 0),\n", - " datetime.datetime(2018, 4, 14, 0, 0),\n", - " datetime.datetime(2018, 4, 14, 1, 0),\n", - " datetime.datetime(2018, 4, 14, 2, 0),\n", - " datetime.datetime(2018, 4, 14, 3, 0),\n", - " datetime.datetime(2018, 4, 14, 4, 0),\n", - " datetime.datetime(2018, 4, 14, 5, 0),\n", - " datetime.datetime(2018, 4, 14, 6, 0),\n", - " datetime.datetime(2018, 4, 14, 7, 0),\n", - " datetime.datetime(2018, 4, 14, 8, 0),\n", - " datetime.datetime(2018, 4, 14, 9, 0),\n", - " datetime.datetime(2018, 4, 14, 10, 0),\n", - " datetime.datetime(2018, 4, 14, 11, 0),\n", - " datetime.datetime(2018, 4, 14, 12, 0),\n", - " datetime.datetime(2018, 4, 14, 13, 0),\n", - " datetime.datetime(2018, 4, 14, 14, 0),\n", - " datetime.datetime(2018, 4, 14, 15, 0),\n", - " datetime.datetime(2018, 4, 14, 16, 0),\n", - " datetime.datetime(2018, 4, 14, 17, 0),\n", - " datetime.datetime(2018, 4, 14, 18, 0),\n", - " datetime.datetime(2018, 4, 14, 19, 0),\n", - " datetime.datetime(2018, 4, 14, 20, 0),\n", - " datetime.datetime(2018, 4, 14, 21, 0),\n", - " datetime.datetime(2018, 4, 14, 22, 0),\n", - " datetime.datetime(2018, 4, 14, 23, 0),\n", - " datetime.datetime(2018, 4, 15, 0, 0),\n", - " datetime.datetime(2018, 4, 15, 1, 0),\n", - " datetime.datetime(2018, 4, 15, 2, 0),\n", - " datetime.datetime(2018, 4, 15, 3, 0),\n", - " datetime.datetime(2018, 4, 15, 4, 0),\n", - " datetime.datetime(2018, 4, 15, 5, 0),\n", - " datetime.datetime(2018, 4, 15, 6, 0),\n", - " datetime.datetime(2018, 4, 15, 7, 0),\n", - " datetime.datetime(2018, 4, 15, 8, 0),\n", - " datetime.datetime(2018, 4, 15, 9, 0),\n", - " datetime.datetime(2018, 4, 15, 10, 0),\n", - " datetime.datetime(2018, 4, 15, 11, 0),\n", - " datetime.datetime(2018, 4, 15, 12, 0),\n", - " datetime.datetime(2018, 4, 15, 13, 0),\n", - " datetime.datetime(2018, 4, 15, 14, 0),\n", - " datetime.datetime(2018, 4, 15, 15, 0),\n", - " datetime.datetime(2018, 4, 15, 16, 0),\n", - " datetime.datetime(2018, 4, 15, 17, 0),\n", - " datetime.datetime(2018, 4, 15, 18, 0),\n", - " datetime.datetime(2018, 4, 15, 19, 0),\n", - " datetime.datetime(2018, 4, 15, 20, 0),\n", - " datetime.datetime(2018, 4, 15, 21, 0),\n", - " datetime.datetime(2018, 4, 15, 22, 0),\n", - " datetime.datetime(2018, 4, 15, 23, 0),\n", - " datetime.datetime(2018, 4, 16, 0, 0),\n", - " datetime.datetime(2018, 4, 16, 1, 0),\n", - " datetime.datetime(2018, 4, 16, 2, 0),\n", - " datetime.datetime(2018, 4, 16, 3, 0),\n", - " datetime.datetime(2018, 4, 16, 4, 0),\n", - " datetime.datetime(2018, 4, 16, 5, 0),\n", - " datetime.datetime(2018, 4, 16, 6, 0),\n", - " datetime.datetime(2018, 4, 16, 7, 0),\n", - " datetime.datetime(2018, 4, 16, 8, 0),\n", - " datetime.datetime(2018, 4, 16, 9, 0),\n", - " datetime.datetime(2018, 4, 16, 10, 0),\n", - " datetime.datetime(2018, 4, 16, 11, 0),\n", - " datetime.datetime(2018, 4, 16, 12, 0),\n", - " datetime.datetime(2018, 4, 16, 13, 0),\n", - " datetime.datetime(2018, 4, 16, 14, 0),\n", - " datetime.datetime(2018, 4, 16, 15, 0),\n", - " datetime.datetime(2018, 4, 16, 16, 0),\n", - " datetime.datetime(2018, 4, 16, 17, 0),\n", - " datetime.datetime(2018, 4, 16, 18, 0),\n", - " datetime.datetime(2018, 4, 16, 19, 0),\n", - " datetime.datetime(2018, 4, 16, 20, 0),\n", - " datetime.datetime(2018, 4, 16, 21, 0),\n", - " datetime.datetime(2018, 4, 16, 22, 0),\n", - " datetime.datetime(2018, 4, 16, 23, 0),\n", - " datetime.datetime(2018, 4, 17, 0, 0),\n", - " datetime.datetime(2018, 4, 17, 1, 0),\n", - " datetime.datetime(2018, 4, 17, 2, 0),\n", - " datetime.datetime(2018, 4, 17, 3, 0),\n", - " datetime.datetime(2018, 4, 17, 4, 0),\n", - " datetime.datetime(2018, 4, 17, 5, 0),\n", - " datetime.datetime(2018, 4, 17, 6, 0),\n", - " datetime.datetime(2018, 4, 17, 7, 0),\n", - " datetime.datetime(2018, 4, 17, 8, 0),\n", - " datetime.datetime(2018, 4, 17, 9, 0),\n", - " datetime.datetime(2018, 4, 17, 10, 0),\n", - " datetime.datetime(2018, 4, 17, 11, 0),\n", - " datetime.datetime(2018, 4, 17, 12, 0),\n", - " datetime.datetime(2018, 4, 17, 13, 0),\n", - " datetime.datetime(2018, 4, 17, 14, 0),\n", - " datetime.datetime(2018, 4, 17, 15, 0),\n", - " datetime.datetime(2018, 4, 17, 16, 0),\n", - " datetime.datetime(2018, 4, 17, 17, 0),\n", - " datetime.datetime(2018, 4, 17, 18, 0),\n", - " datetime.datetime(2018, 4, 17, 19, 0),\n", - " datetime.datetime(2018, 4, 17, 20, 0),\n", - " datetime.datetime(2018, 4, 17, 21, 0),\n", - " datetime.datetime(2018, 4, 17, 22, 0),\n", - " datetime.datetime(2018, 4, 17, 23, 0),\n", - " datetime.datetime(2018, 4, 18, 0, 0),\n", - " datetime.datetime(2018, 4, 18, 1, 0),\n", - " datetime.datetime(2018, 4, 18, 2, 0),\n", - " datetime.datetime(2018, 4, 18, 3, 0),\n", - " datetime.datetime(2018, 4, 18, 4, 0),\n", - " datetime.datetime(2018, 4, 18, 5, 0),\n", - " datetime.datetime(2018, 4, 18, 6, 0),\n", - " datetime.datetime(2018, 4, 18, 7, 0),\n", - " datetime.datetime(2018, 4, 18, 8, 0),\n", - " datetime.datetime(2018, 4, 18, 9, 0),\n", - " datetime.datetime(2018, 4, 18, 10, 0),\n", - " datetime.datetime(2018, 4, 18, 11, 0),\n", - " datetime.datetime(2018, 4, 18, 12, 0),\n", - " datetime.datetime(2018, 4, 18, 13, 0),\n", - " datetime.datetime(2018, 4, 18, 14, 0),\n", - " datetime.datetime(2018, 4, 18, 15, 0),\n", - " datetime.datetime(2018, 4, 18, 16, 0),\n", - " datetime.datetime(2018, 4, 18, 17, 0),\n", - " datetime.datetime(2018, 4, 18, 18, 0),\n", - " datetime.datetime(2018, 4, 18, 19, 0),\n", - " datetime.datetime(2018, 4, 18, 20, 0),\n", - " datetime.datetime(2018, 4, 18, 21, 0),\n", - " datetime.datetime(2018, 4, 18, 22, 0),\n", - " datetime.datetime(2018, 4, 18, 23, 0),\n", - " datetime.datetime(2018, 4, 19, 0, 0),\n", - " datetime.datetime(2018, 4, 19, 1, 0),\n", - " datetime.datetime(2018, 4, 19, 2, 0),\n", - " datetime.datetime(2018, 4, 19, 3, 0),\n", - " datetime.datetime(2018, 4, 19, 4, 0),\n", - " datetime.datetime(2018, 4, 19, 5, 0),\n", - " datetime.datetime(2018, 4, 19, 6, 0),\n", - " datetime.datetime(2018, 4, 19, 7, 0),\n", - " datetime.datetime(2018, 4, 19, 8, 0),\n", - " datetime.datetime(2018, 4, 19, 9, 0),\n", - " datetime.datetime(2018, 4, 19, 10, 0),\n", - " datetime.datetime(2018, 4, 19, 11, 0),\n", - " datetime.datetime(2018, 4, 19, 12, 0),\n", - " datetime.datetime(2018, 4, 19, 13, 0),\n", - " datetime.datetime(2018, 4, 19, 14, 0),\n", - " datetime.datetime(2018, 4, 19, 15, 0),\n", - " datetime.datetime(2018, 4, 19, 16, 0),\n", - " datetime.datetime(2018, 4, 19, 17, 0),\n", - " datetime.datetime(2018, 4, 19, 18, 0),\n", - " datetime.datetime(2018, 4, 19, 19, 0),\n", - " datetime.datetime(2018, 4, 19, 20, 0),\n", - " datetime.datetime(2018, 4, 19, 21, 0),\n", - " datetime.datetime(2018, 4, 19, 22, 0),\n", - " datetime.datetime(2018, 4, 19, 23, 0),\n", - " datetime.datetime(2018, 4, 20, 0, 0),\n", - " datetime.datetime(2018, 4, 20, 1, 0),\n", - " datetime.datetime(2018, 4, 20, 2, 0),\n", - " datetime.datetime(2018, 4, 20, 3, 0),\n", - " datetime.datetime(2018, 4, 20, 4, 0),\n", - " datetime.datetime(2018, 4, 20, 5, 0),\n", - " datetime.datetime(2018, 4, 20, 6, 0),\n", - " datetime.datetime(2018, 4, 20, 7, 0),\n", - " datetime.datetime(2018, 4, 20, 8, 0),\n", - " datetime.datetime(2018, 4, 20, 9, 0),\n", - " datetime.datetime(2018, 4, 20, 10, 0),\n", - " datetime.datetime(2018, 4, 20, 11, 0),\n", - " datetime.datetime(2018, 4, 20, 12, 0),\n", - " datetime.datetime(2018, 4, 20, 13, 0),\n", - " datetime.datetime(2018, 4, 20, 14, 0),\n", - " datetime.datetime(2018, 4, 20, 15, 0),\n", - " datetime.datetime(2018, 4, 20, 16, 0),\n", - " datetime.datetime(2018, 4, 20, 17, 0),\n", - " datetime.datetime(2018, 4, 20, 18, 0),\n", - " datetime.datetime(2018, 4, 20, 19, 0),\n", - " datetime.datetime(2018, 4, 20, 20, 0),\n", - " datetime.datetime(2018, 4, 20, 21, 0),\n", - " datetime.datetime(2018, 4, 20, 22, 0),\n", - " datetime.datetime(2018, 4, 20, 23, 0),\n", - " datetime.datetime(2018, 4, 21, 0, 0),\n", - " datetime.datetime(2018, 4, 21, 1, 0),\n", - " datetime.datetime(2018, 4, 21, 2, 0),\n", - " datetime.datetime(2018, 4, 21, 3, 0),\n", - " datetime.datetime(2018, 4, 21, 4, 0),\n", - " datetime.datetime(2018, 4, 21, 5, 0),\n", - " datetime.datetime(2018, 4, 21, 6, 0),\n", - " datetime.datetime(2018, 4, 21, 7, 0),\n", - " datetime.datetime(2018, 4, 21, 8, 0),\n", - " datetime.datetime(2018, 4, 21, 9, 0),\n", - " datetime.datetime(2018, 4, 21, 10, 0),\n", - " datetime.datetime(2018, 4, 21, 11, 0),\n", - " datetime.datetime(2018, 4, 21, 12, 0),\n", - " datetime.datetime(2018, 4, 21, 13, 0),\n", - " datetime.datetime(2018, 4, 21, 14, 0),\n", - " datetime.datetime(2018, 4, 21, 15, 0),\n", - " datetime.datetime(2018, 4, 21, 16, 0),\n", - " datetime.datetime(2018, 4, 21, 17, 0),\n", - " datetime.datetime(2018, 4, 21, 18, 0),\n", - " datetime.datetime(2018, 4, 21, 19, 0),\n", - " datetime.datetime(2018, 4, 21, 20, 0),\n", - " datetime.datetime(2018, 4, 21, 21, 0),\n", - " datetime.datetime(2018, 4, 21, 22, 0),\n", - " datetime.datetime(2018, 4, 21, 23, 0),\n", - " datetime.datetime(2018, 4, 22, 0, 0),\n", - " datetime.datetime(2018, 4, 22, 1, 0),\n", - " datetime.datetime(2018, 4, 22, 2, 0),\n", - " datetime.datetime(2018, 4, 22, 3, 0),\n", - " datetime.datetime(2018, 4, 22, 4, 0),\n", - " datetime.datetime(2018, 4, 22, 5, 0),\n", - " datetime.datetime(2018, 4, 22, 6, 0),\n", - " datetime.datetime(2018, 4, 22, 7, 0),\n", - " datetime.datetime(2018, 4, 22, 8, 0),\n", - " datetime.datetime(2018, 4, 22, 9, 0),\n", - " datetime.datetime(2018, 4, 22, 10, 0),\n", - " datetime.datetime(2018, 4, 22, 11, 0),\n", - " datetime.datetime(2018, 4, 22, 12, 0),\n", - " datetime.datetime(2018, 4, 22, 13, 0),\n", - " datetime.datetime(2018, 4, 22, 14, 0),\n", - " datetime.datetime(2018, 4, 22, 15, 0),\n", - " datetime.datetime(2018, 4, 22, 16, 0),\n", - " datetime.datetime(2018, 4, 22, 17, 0),\n", - " datetime.datetime(2018, 4, 22, 18, 0),\n", - " datetime.datetime(2018, 4, 22, 19, 0),\n", - " datetime.datetime(2018, 4, 22, 20, 0),\n", - " datetime.datetime(2018, 4, 22, 21, 0),\n", - " datetime.datetime(2018, 4, 22, 22, 0),\n", - " datetime.datetime(2018, 4, 22, 23, 0),\n", - " datetime.datetime(2018, 4, 23, 0, 0),\n", - " datetime.datetime(2018, 4, 23, 1, 0),\n", - " datetime.datetime(2018, 4, 23, 2, 0),\n", - " datetime.datetime(2018, 4, 23, 3, 0),\n", - " datetime.datetime(2018, 4, 23, 4, 0),\n", - " datetime.datetime(2018, 4, 23, 5, 0),\n", - " datetime.datetime(2018, 4, 23, 6, 0),\n", - " datetime.datetime(2018, 4, 23, 7, 0),\n", - " datetime.datetime(2018, 4, 23, 8, 0),\n", - " datetime.datetime(2018, 4, 23, 9, 0),\n", - " datetime.datetime(2018, 4, 23, 10, 0),\n", - " datetime.datetime(2018, 4, 23, 11, 0),\n", - " datetime.datetime(2018, 4, 23, 12, 0),\n", - " datetime.datetime(2018, 4, 23, 13, 0),\n", - " datetime.datetime(2018, 4, 23, 14, 0),\n", - " datetime.datetime(2018, 4, 23, 15, 0),\n", - " datetime.datetime(2018, 4, 23, 16, 0),\n", - " datetime.datetime(2018, 4, 23, 17, 0),\n", - " datetime.datetime(2018, 4, 23, 18, 0),\n", - " datetime.datetime(2018, 4, 23, 19, 0),\n", - " datetime.datetime(2018, 4, 23, 20, 0),\n", - " datetime.datetime(2018, 4, 23, 21, 0),\n", - " datetime.datetime(2018, 4, 23, 22, 0),\n", - " datetime.datetime(2018, 4, 23, 23, 0),\n", - " datetime.datetime(2018, 4, 24, 0, 0),\n", - " datetime.datetime(2018, 4, 24, 1, 0),\n", - " datetime.datetime(2018, 4, 24, 2, 0),\n", - " datetime.datetime(2018, 4, 24, 3, 0),\n", - " datetime.datetime(2018, 4, 24, 4, 0),\n", - " datetime.datetime(2018, 4, 24, 5, 0),\n", - " datetime.datetime(2018, 4, 24, 6, 0),\n", - " datetime.datetime(2018, 4, 24, 7, 0),\n", - " datetime.datetime(2018, 4, 24, 8, 0),\n", - " datetime.datetime(2018, 4, 24, 9, 0),\n", - " datetime.datetime(2018, 4, 24, 10, 0),\n", - " datetime.datetime(2018, 4, 24, 11, 0),\n", - " datetime.datetime(2018, 4, 24, 12, 0),\n", - " datetime.datetime(2018, 4, 24, 13, 0),\n", - " datetime.datetime(2018, 4, 24, 14, 0),\n", - " datetime.datetime(2018, 4, 24, 15, 0),\n", - " datetime.datetime(2018, 4, 24, 16, 0),\n", - " datetime.datetime(2018, 4, 24, 17, 0),\n", - " datetime.datetime(2018, 4, 24, 18, 0),\n", - " datetime.datetime(2018, 4, 24, 19, 0),\n", - " datetime.datetime(2018, 4, 24, 20, 0),\n", - " datetime.datetime(2018, 4, 24, 21, 0),\n", - " datetime.datetime(2018, 4, 24, 22, 0),\n", - " datetime.datetime(2018, 4, 24, 23, 0),\n", - " datetime.datetime(2018, 4, 25, 0, 0),\n", - " datetime.datetime(2018, 4, 25, 1, 0),\n", - " datetime.datetime(2018, 4, 25, 2, 0),\n", - " datetime.datetime(2018, 4, 25, 3, 0),\n", - " datetime.datetime(2018, 4, 25, 4, 0),\n", - " datetime.datetime(2018, 4, 25, 5, 0),\n", - " datetime.datetime(2018, 4, 25, 6, 0),\n", - " datetime.datetime(2018, 4, 25, 7, 0),\n", - " datetime.datetime(2018, 4, 25, 8, 0),\n", - " datetime.datetime(2018, 4, 25, 9, 0),\n", - " datetime.datetime(2018, 4, 25, 10, 0),\n", - " datetime.datetime(2018, 4, 25, 11, 0),\n", - " datetime.datetime(2018, 4, 25, 12, 0),\n", - " datetime.datetime(2018, 4, 25, 13, 0),\n", - " datetime.datetime(2018, 4, 25, 14, 0),\n", - " datetime.datetime(2018, 4, 25, 15, 0),\n", - " datetime.datetime(2018, 4, 25, 16, 0),\n", - " datetime.datetime(2018, 4, 25, 17, 0),\n", - " datetime.datetime(2018, 4, 25, 18, 0),\n", - " datetime.datetime(2018, 4, 25, 19, 0),\n", - " datetime.datetime(2018, 4, 25, 20, 0),\n", - " datetime.datetime(2018, 4, 25, 21, 0),\n", - " datetime.datetime(2018, 4, 25, 22, 0),\n", - " datetime.datetime(2018, 4, 25, 23, 0),\n", - " datetime.datetime(2018, 4, 26, 0, 0),\n", - " datetime.datetime(2018, 4, 26, 1, 0),\n", - " datetime.datetime(2018, 4, 26, 2, 0),\n", - " datetime.datetime(2018, 4, 26, 3, 0),\n", - " datetime.datetime(2018, 4, 26, 4, 0),\n", - " datetime.datetime(2018, 4, 26, 5, 0),\n", - " datetime.datetime(2018, 4, 26, 6, 0),\n", - " datetime.datetime(2018, 4, 26, 7, 0),\n", - " datetime.datetime(2018, 4, 26, 8, 0),\n", - " datetime.datetime(2018, 4, 26, 9, 0),\n", - " datetime.datetime(2018, 4, 26, 10, 0),\n", - " datetime.datetime(2018, 4, 26, 11, 0),\n", - " datetime.datetime(2018, 4, 26, 12, 0),\n", - " datetime.datetime(2018, 4, 26, 13, 0),\n", - " datetime.datetime(2018, 4, 26, 14, 0),\n", - " datetime.datetime(2018, 4, 26, 15, 0),\n", - " datetime.datetime(2018, 4, 26, 16, 0),\n", - " datetime.datetime(2018, 4, 26, 17, 0),\n", - " datetime.datetime(2018, 4, 26, 18, 0),\n", - " datetime.datetime(2018, 4, 26, 19, 0),\n", - " datetime.datetime(2018, 4, 26, 20, 0),\n", - " datetime.datetime(2018, 4, 26, 21, 0),\n", - " datetime.datetime(2018, 4, 26, 22, 0),\n", - " datetime.datetime(2018, 4, 26, 23, 0),\n", - " datetime.datetime(2018, 4, 27, 0, 0),\n", - " datetime.datetime(2018, 4, 27, 1, 0),\n", - " datetime.datetime(2018, 4, 27, 2, 0),\n", - " datetime.datetime(2018, 4, 27, 3, 0),\n", - " datetime.datetime(2018, 4, 27, 4, 0),\n", - " datetime.datetime(2018, 4, 27, 5, 0),\n", - " datetime.datetime(2018, 4, 27, 6, 0),\n", - " datetime.datetime(2018, 4, 27, 7, 0),\n", - " datetime.datetime(2018, 4, 27, 8, 0),\n", - " datetime.datetime(2018, 4, 27, 9, 0),\n", - " datetime.datetime(2018, 4, 27, 10, 0),\n", - " datetime.datetime(2018, 4, 27, 11, 0),\n", - " datetime.datetime(2018, 4, 27, 12, 0),\n", - " datetime.datetime(2018, 4, 27, 13, 0),\n", - " datetime.datetime(2018, 4, 27, 14, 0),\n", - " datetime.datetime(2018, 4, 27, 15, 0),\n", - " datetime.datetime(2018, 4, 27, 16, 0),\n", - " datetime.datetime(2018, 4, 27, 17, 0),\n", - " datetime.datetime(2018, 4, 27, 18, 0),\n", - " datetime.datetime(2018, 4, 27, 19, 0),\n", - " datetime.datetime(2018, 4, 27, 20, 0),\n", - " datetime.datetime(2018, 4, 27, 21, 0),\n", - " datetime.datetime(2018, 4, 27, 22, 0),\n", - " datetime.datetime(2018, 4, 27, 23, 0),\n", - " datetime.datetime(2018, 4, 28, 0, 0),\n", - " datetime.datetime(2018, 4, 28, 1, 0),\n", - " datetime.datetime(2018, 4, 28, 2, 0),\n", - " datetime.datetime(2018, 4, 28, 3, 0),\n", - " datetime.datetime(2018, 4, 28, 4, 0),\n", - " datetime.datetime(2018, 4, 28, 5, 0),\n", - " datetime.datetime(2018, 4, 28, 6, 0),\n", - " datetime.datetime(2018, 4, 28, 7, 0),\n", - " datetime.datetime(2018, 4, 28, 8, 0),\n", - " datetime.datetime(2018, 4, 28, 9, 0),\n", - " datetime.datetime(2018, 4, 28, 10, 0),\n", - " datetime.datetime(2018, 4, 28, 11, 0),\n", - " datetime.datetime(2018, 4, 28, 12, 0),\n", - " datetime.datetime(2018, 4, 28, 13, 0),\n", - " datetime.datetime(2018, 4, 28, 14, 0),\n", - " datetime.datetime(2018, 4, 28, 15, 0),\n", - " datetime.datetime(2018, 4, 28, 16, 0),\n", - " datetime.datetime(2018, 4, 28, 17, 0),\n", - " datetime.datetime(2018, 4, 28, 18, 0),\n", - " datetime.datetime(2018, 4, 28, 19, 0),\n", - " datetime.datetime(2018, 4, 28, 20, 0),\n", - " datetime.datetime(2018, 4, 28, 21, 0),\n", - " datetime.datetime(2018, 4, 28, 22, 0),\n", - " datetime.datetime(2018, 4, 28, 23, 0),\n", - " datetime.datetime(2018, 4, 29, 0, 0),\n", - " datetime.datetime(2018, 4, 29, 1, 0),\n", - " datetime.datetime(2018, 4, 29, 2, 0),\n", - " datetime.datetime(2018, 4, 29, 3, 0),\n", - " datetime.datetime(2018, 4, 29, 4, 0),\n", - " datetime.datetime(2018, 4, 29, 5, 0),\n", - " datetime.datetime(2018, 4, 29, 6, 0),\n", - " datetime.datetime(2018, 4, 29, 7, 0),\n", - " datetime.datetime(2018, 4, 29, 8, 0),\n", - " datetime.datetime(2018, 4, 29, 9, 0),\n", - " datetime.datetime(2018, 4, 29, 10, 0),\n", - " datetime.datetime(2018, 4, 29, 11, 0),\n", - " datetime.datetime(2018, 4, 29, 12, 0),\n", - " datetime.datetime(2018, 4, 29, 13, 0),\n", - " datetime.datetime(2018, 4, 29, 14, 0),\n", - " datetime.datetime(2018, 4, 29, 15, 0),\n", - " datetime.datetime(2018, 4, 29, 16, 0),\n", - " datetime.datetime(2018, 4, 29, 17, 0),\n", - " datetime.datetime(2018, 4, 29, 18, 0),\n", - " datetime.datetime(2018, 4, 29, 19, 0),\n", - " datetime.datetime(2018, 4, 29, 20, 0),\n", - " datetime.datetime(2018, 4, 29, 21, 0),\n", - " datetime.datetime(2018, 4, 29, 22, 0),\n", - " datetime.datetime(2018, 4, 29, 23, 0),\n", - " datetime.datetime(2018, 4, 30, 0, 0),\n", - " datetime.datetime(2018, 4, 30, 1, 0),\n", - " datetime.datetime(2018, 4, 30, 2, 0),\n", - " datetime.datetime(2018, 4, 30, 3, 0),\n", - " datetime.datetime(2018, 4, 30, 4, 0),\n", - " datetime.datetime(2018, 4, 30, 5, 0),\n", - " datetime.datetime(2018, 4, 30, 6, 0),\n", - " datetime.datetime(2018, 4, 30, 7, 0),\n", - " datetime.datetime(2018, 4, 30, 8, 0),\n", - " datetime.datetime(2018, 4, 30, 9, 0),\n", - " datetime.datetime(2018, 4, 30, 10, 0),\n", - " datetime.datetime(2018, 4, 30, 11, 0),\n", - " datetime.datetime(2018, 4, 30, 12, 0),\n", - " datetime.datetime(2018, 4, 30, 13, 0),\n", - " datetime.datetime(2018, 4, 30, 14, 0),\n", - " datetime.datetime(2018, 4, 30, 15, 0),\n", - " datetime.datetime(2018, 4, 30, 16, 0),\n", - " datetime.datetime(2018, 4, 30, 17, 0),\n", - " datetime.datetime(2018, 4, 30, 18, 0),\n", - " datetime.datetime(2018, 4, 30, 19, 0),\n", - " datetime.datetime(2018, 4, 30, 20, 0),\n", - " datetime.datetime(2018, 4, 30, 21, 0),\n", - " datetime.datetime(2018, 4, 30, 22, 0),\n", - " datetime.datetime(2018, 4, 30, 23, 0)]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "nessy_1.time" + "#obs_nes.time" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -7069,18 +6339,18 @@ "{'data': array([0]), 'units': ''}" ] }, - "execution_count": 5, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_1.lev" + "obs_nes.lev" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -7138,18 +6408,18 @@ " 'axis': 'Y'}" ] }, - "execution_count": 6, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_1.lat" + "obs_nes.lat" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -7221,18 +6491,18 @@ " 'axis': 'X'}" ] }, - "execution_count": 7, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_1.lon" + "obs_nes.lon" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -7593,7 +6863,7 @@ } ], "source": [ - "nessy_1.load()" + "obs_nes.load()" ] }, { @@ -7605,14 +6875,14 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Rank 000: Creating providentia_obs_file.nc\n", + "Rank 000: Creating prv_obs_file.nc\n", "Rank 000: NetCDF ready to write\n", "Rank 000: Dimensions done\n", "Rank 000: Writing ASTER_v3_altitude var (1/175)\n", @@ -7940,7 +7210,21 @@ "Rank 000: Var country data (81/175)\n", "Rank 000: Var country completed (81/175)\n", "Rank 000: Writing daily_native_max_gap_percent var (82/175)\n", - "Rank 000: Var daily_native_max_gap_percent created (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/175)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_ghost.py:570: UserWarning: WARNING!!! GHOST datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "Rank 000: Var daily_native_max_gap_percent data (82/175)\n", "Rank 000: Var daily_native_max_gap_percent completed (82/175)\n", "Rank 000: Writing daily_native_representativity_percent var (83/175)\n", @@ -8319,35 +7603,12 @@ } ], "source": [ - "nessy_1.to_netcdf('providentia_obs_file.nc', info=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Experiments dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Read" + "obs_nes.to_netcdf('prv_obs_file.nc', info=True)" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -8705,140 +7966,6619 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
    +       "Dimensions:                                                           (time: 720, station: 168, N_flag_codes: 186, N_qa_codes: 77)\n",
            "Coordinates:\n",
    -       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    -       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    -       "Data variables:\n",
    -       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    -       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    -       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    -       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    -       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    -       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    -       "    sconco3                 (station, time) float32 ...\n",
    -       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "  * time                                                              (time) datetime64[ns] ...\n",
    +       "  * station                                                           (station) float64 ...\n",
    +       "Dimensions without coordinates: N_flag_codes, N_qa_codes\n",
    +       "Data variables: (12/179)\n",
    +       "    latitude                                                          (station) float64 ...\n",
    +       "    longitude                                                         (station) float64 ...\n",
    +       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
    +       "    ...                                                                ...\n",
    +       "    street_width                                                      (station) float32 ...\n",
    +       "    terrain                                                           (station) object ...\n",
    +       "    vertical_datum                                                    (station) object ...\n",
    +       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
    +       "    flag                                                              (station, time, N_flag_codes) int64 ...\n",
    +       "    qa                                                                (station, time, N_qa_codes) int64 ...\n",
            "Attributes:\n",
    -       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
    -       "    institution:    Barcelona Supercomputing Center\n",
    -       "    source:         Experiment cams61_chimere_ph2\n",
    -       "    creator_name:   Dene R. Bowdalo\n",
    -       "    creator_email:  dene.bowdalo@bsc.es\n",
    -       "    conventions:    CF-1.7\n",
    -       "    data_version:   1.0\n",
    -       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    -       "    NCO:            4.7.2
  • title :
    Inverse distance weighting (4 neighbours) interpolated cams61_chimere_ph2 experiment data for the component sconco3 with reference to the measurement stations in the EBAS network in 2018-04.
    institution :
    Barcelona Supercomputing Center
    source :
    Experiment cams61_chimere_ph2
    creator_name :
    Dene R. Bowdalo
    creator_email :
    dene.bowdalo@bsc.es
    conventions :
    CF-1.7
    data_version :
    1.0
    history :
    Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc /gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc
    NCO :
    4.7.2
  • " ], "text/plain": [ - "\n", - "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "\n", + "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes = open_netcdf(path=exp_path, info=True, parallel_method='X')\n", + "exp_interp_nes" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "#exp_interp_nes.time" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, -22.10333333, -31.66861111,\n", + " 47.76666641, 46.677778 , 48.721111 , 47.529167 ,\n", + " 47.05407 , 46.693611 , 47.348056 , 47.973056 ,\n", + " 48.878611 , 48.106111 , 48.371111 , 48.334722 ,\n", + " 48.050833 , 47.838611 , 47.040277 , 47.06694444,\n", + " 49.877778 , 50.629421 , 50.503333 , 41.695833 ,\n", + " 32.27000046, 80.05000305, 46.5475 , 46.813056 ,\n", + " 47.479722 , 47.049722 , 47.0675 , 47.18961391,\n", + " -30.17254 , 16.86403 , 35.0381 , 49.73508444,\n", + " 49.573394 , 49.066667 , 54.925556 , 52.802222 ,\n", + " 47.914722 , 53.166667 , 50.65 , 54.4368 ,\n", + " 47.80149841, 47.4165 , -70.666 , 54.746495 ,\n", + " 81.6 , 55.693588 , 72.58000183, 56.290424 ,\n", + " 59.5 , 58.383333 , 39.54694 , 42.72056 ,\n", + " 39.87528 , 37.23722 , 43.43917 , 41.27417 ,\n", + " 42.31917 , 38.47278 , 39.08278 , 41.23889 ,\n", + " 41.39389 , 42.63472 , 37.05194 , 28.309 ,\n", + " 59.779167 , 60.53002 , 66.320278 , 67.97333333,\n", + " 48.5 , 49.9 , 47.266667 , 43.616667 ,\n", + " 47.3 , 46.65 , 45. , 45.8 ,\n", + " 48.633333 , 42.936667 , 48.70861111, 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , -75.62 , 51.149617 ,\n", + " 38.366667 , 35.316667 , 46.966667 , 46.91 ,\n", + " -0.20194 , 51.939722 , 53.32583 , 45.8 ,\n", + " 44.183333 , 37.571111 , 35.5182 , 42.805462 ,\n", + " -69.005 , 39.0319 , 24.2883 , 24.466941 ,\n", + " 36.53833389, 33.293917 , 55.37611111, 56.161944 ,\n", + " 57.135278 , 41.536111 , 36.0722 , 52.083333 ,\n", + " 53.333889 , 51.541111 , 52.3 , 51.974444 ,\n", + " 58.38853 , 65.833333 , 62.783333 , 78.90715 ,\n", + " 59. , 69.45 , 59.2 , 60.372386 ,\n", + " -72.0117 , 59.2 , -41.40819168, -77.83200073,\n", + " -45.0379982 , 51.814408 , 50.736444 , 54.753894 ,\n", + " 54.15 , 43.4 , 71.58616638, 63.85 ,\n", + " 67.883333 , 57.394 , 57.1645 , 57.9525 ,\n", + " 56.0429 , 60.0858 , 57.816667 , 64.25 ,\n", + " 59.728 , 45.566667 , 46.428611 , 46.299444 ,\n", + " 48.933333 , 49.15 , 49.05 , 47.96 ,\n", + " 71.32301331, 40.12498 , 19.53623009, -89.99694824,\n", + " 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'long_name': 'latitude',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", + " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", + " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", + " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", + " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", + " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", + " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", + " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", + " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", + " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", + " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", + " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", + " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", + " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", + " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", + " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", + " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", + " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", + " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", + " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", + " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", + " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", + " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", + " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", + " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", + " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", + " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", + " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", + " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", + " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", + " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", + " 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'long_name': 'longitude',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading grid_edge_latitude var (1/6)\n", + "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", + "Rank 000: Loading grid_edge_longitude var (2/6)\n", + "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", + "Rank 000: Loading model_centre_latitude var (3/6)\n", + "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", + "Rank 000: Loading model_centre_longitude var (4/6)\n", + "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", + "Rank 000: Loading sconco3 var (5/6)\n", + "Rank 000: Loaded sconco3 var ((175, 720))\n", + "Rank 000: Loading station_reference var (6/6)\n", + "Rank 000: Loaded station_reference var ((175,))\n" + ] + } + ], + "source": [ + "exp_interp_nes.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating prv_exp_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconco3 var (1/2)\n", + "Rank 000: Var sconco3 created (1/2)\n", + "Rank 000: Var sconco3 data (1/2)\n", + "Rank 000: Var sconco3 completed (1/2)\n", + "Rank 000: Writing station_reference var (2/2)\n", + "Rank 000: Var station_reference created (2/2)\n", + "Rank 000: Var station_reference data (2/2)\n", + "Rank 000: Var station_reference completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:587: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "exp_interp_nes.to_netcdf('prv_exp_file.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n",
            "Coordinates:\n",
            "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    -       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    +       "  * station                 (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
            "Data variables:\n",
    -       "    grid_edge_latitude      (grid_edge) float64 ...\n",
    -       "    grid_edge_longitude     (grid_edge) float64 ...\n",
    -       "    latitude                (station) float64 ...\n",
    -       "    longitude               (station) float64 ...\n",
    -       "    model_centre_latitude   (model_latitude, model_longitude) float64 ...\n",
    -       "    model_centre_longitude  (model_latitude, model_longitude) float64 ...\n",
    +       "    lat                     (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    lon                     (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
            "    sconco3                 (station, time) float32 ...\n",
    -       "    station_reference       (station) object ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
            "Attributes:\n",
            "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
            "    institution:    Barcelona Supercomputing Center\n",
    @@ -8890,1043 +15302,197 @@
            "    conventions:    CF-1.7\n",
            "    data_version:   1.0\n",
            "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    -       "    NCO:            4.7.2"
    -      ]
    -     },
    -     "execution_count": 18,
    -     "metadata": {},
    -     "output_type": "execute_result"
    -    }
    -   ],
    -   "source": [
    -    "xr.open_dataset(exp_path)"
    -   ]
    -  },
    -  {
    -   "cell_type": "code",
    -   "execution_count": 11,
    -   "metadata": {},
    -   "outputs": [
    -    {
    -     "data": {
    -      "text/plain": [
    -       ""
    -      ]
    -     },
    -     "execution_count": 11,
    -     "metadata": {},
    -     "output_type": "execute_result"
    -    }
    -   ],
    -   "source": [
    -    "nessy_2 = open_netcdf(path=exp_path, info=True, parallel_method='X')\n",
    -    "nessy_2"
    -   ]
    -  },
    -  {
    -   "cell_type": "code",
    -   "execution_count": 12,
    -   "metadata": {},
    -   "outputs": [
    -    {
    -     "data": {
    -      "text/plain": [
    -       "[datetime.datetime(2018, 4, 1, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 1, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 2, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 3, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 4, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 5, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 6, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 7, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 8, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 9, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 10, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 11, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 12, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 13, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 14, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 15, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 16, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 17, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 18, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 19, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 20, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 21, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 22, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 23, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 24, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 25, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 26, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 27, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 28, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 29, 23, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 0, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 1, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 2, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 3, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 4, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 5, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 6, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 7, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 8, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 9, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 10, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 11, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 12, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 13, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 14, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 15, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 16, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 17, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 18, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 19, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 20, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 21, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 22, 0),\n",
    -       " datetime.datetime(2018, 4, 30, 23, 0)]"
    -      ]
    -     },
    -     "execution_count": 12,
    -     "metadata": {},
    -     "output_type": "execute_result"
    -    }
    -   ],
    -   "source": [
    -    "nessy_2.time"
    -   ]
    -  },
    -  {
    -   "cell_type": "code",
    -   "execution_count": 13,
    -   "metadata": {},
    -   "outputs": [
    -    {
    -     "data": {
    -      "text/plain": [
    -       "{'data': array([0]), 'units': ''}"
    -      ]
    -     },
    -     "execution_count": 13,
    -     "metadata": {},
    -     "output_type": "execute_result"
    -    }
    -   ],
    -   "source": [
    -    "nessy_2.lev"
    -   ]
    -  },
    -  {
    -   "cell_type": "code",
    -   "execution_count": 14,
    -   "metadata": {},
    -   "outputs": [
    -    {
    -     "data": {
    -      "text/plain": [
    -       "{'data': masked_array(data=[-64.24006   , -54.84846497, -22.10333333, -31.66861111,\n",
    -       "                     47.76666641,  46.677778  ,  48.721111  ,  47.529167  ,\n",
    -       "                     47.05407   ,  46.693611  ,  47.348056  ,  47.973056  ,\n",
    -       "                     48.878611  ,  48.106111  ,  48.371111  ,  48.334722  ,\n",
    -       "                     48.050833  ,  47.838611  ,  47.040277  ,  47.06694444,\n",
    -       "                     49.877778  ,  50.629421  ,  50.503333  ,  41.695833  ,\n",
    -       "                     32.27000046,  80.05000305,  46.5475    ,  46.813056  ,\n",
    -       "                     47.479722  ,  47.049722  ,  47.0675    ,  47.18961391,\n",
    -       "                    -30.17254   ,  16.86403   ,  35.0381    ,  49.73508444,\n",
    -       "                     49.573394  ,  49.066667  ,  54.925556  ,  52.802222  ,\n",
    -       "                     47.914722  ,  53.166667  ,  50.65      ,  54.4368    ,\n",
    -       "                     47.80149841,  47.4165    , -70.666     ,  54.746495  ,\n",
    -       "                     81.6       ,  55.693588  ,  72.58000183,  56.290424  ,\n",
    -       "                     59.5       ,  58.383333  ,  39.54694   ,  42.72056   ,\n",
    -       "                     39.87528   ,  37.23722   ,  43.43917   ,  41.27417   ,\n",
    -       "                     42.31917   ,  38.47278   ,  39.08278   ,  41.23889   ,\n",
    -       "                     41.39389   ,  42.63472   ,  37.05194   ,  28.309     ,\n",
    -       "                     59.779167  ,  60.53002   ,  66.320278  ,  67.97333333,\n",
    -       "                     48.5       ,  49.9       ,  47.266667  ,  43.616667  ,\n",
    -       "                     47.3       ,  46.65      ,  45.        ,  45.8       ,\n",
    -       "                     48.633333  ,  42.936667  ,  48.70861111,  44.56944444,\n",
    -       "                     46.81472778,  45.772223  ,  55.313056  ,  54.443056  ,\n",
    -       "                     50.596389  ,  54.334444  ,  57.734444  ,  52.503889  ,\n",
    -       "                     55.858611  ,  53.398889  ,  50.792778  ,  52.293889  ,\n",
    -       "                     51.781784  ,  52.298333  ,  55.79216   ,  52.950556  ,\n",
    -       "                     51.778056  ,  60.13922   , -75.62      ,  51.149617  ,\n",
    -       "                     38.366667  ,  35.316667  ,  46.966667  ,  46.91      ,\n",
    -       "                     -0.20194   ,  51.939722  ,  53.32583   ,  45.8       ,\n",
    -       "                     44.183333  ,  37.571111  ,  35.5182    ,  42.805462  ,\n",
    -       "                    -69.005     ,  39.0319    ,  24.2883    ,  24.466941  ,\n",
    -       "                     36.53833389,  33.293917  ,  55.37611111,  56.161944  ,\n",
    -       "                     57.135278  ,  41.536111  ,  36.0722    ,  52.083333  ,\n",
    -       "                     53.333889  ,  51.541111  ,  52.3       ,  51.974444  ,\n",
    -       "                     58.38853   ,  65.833333  ,  62.783333  ,  78.90715   ,\n",
    -       "                     59.        ,  69.45      ,  59.2       ,  60.372386  ,\n",
    -       "                    -72.0117    ,  59.2       , -41.40819168, -77.83200073,\n",
    -       "                    -45.0379982 ,  51.814408  ,  50.736444  ,  54.753894  ,\n",
    -       "                     54.15      ,  43.4       ,  71.58616638,  63.85      ,\n",
    -       "                     67.883333  ,  57.394     ,  57.1645    ,  57.9525    ,\n",
    -       "                     56.0429    ,  60.0858    ,  57.816667  ,  64.25      ,\n",
    -       "                     59.728     ,  45.566667  ,  46.428611  ,  46.299444  ,\n",
    -       "                     48.933333  ,  49.15      ,  49.05      ,  47.96      ,\n",
    -       "                     71.32301331,  40.12498   ,  19.53623009, -89.99694824,\n",
    -       "                     41.05410004,  21.5731    , -34.35348   ],\n",
    -       "              mask=False,\n",
    -       "        fill_value=1e+20),\n",
    -       " 'dimensions': ('station',),\n",
    -       " 'standard_name': 'latitude',\n",
    -       " 'units': 'decimal degrees North',\n",
    -       " 'long_name': 'latitude',\n",
    -       " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n",
    -       " 'axis': 'Y'}"
    -      ]
    -     },
    -     "execution_count": 14,
    -     "metadata": {},
    -     "output_type": "execute_result"
    -    }
    -   ],
    -   "source": [
    -    "nessy_2.lat"
    -   ]
    -  },
    -  {
    -   "cell_type": "code",
    -   "execution_count": 15,
    -   "metadata": {},
    -   "outputs": [
    -    {
    -     "data": {
    +       "    NCO:            4.7.2\n",
    +       "    Conventions:    CF-1.7
    " + ], "text/plain": [ - "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", - " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", - " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", - " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", - " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", - " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", - " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", - " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", - " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", - " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", - " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", - " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", - " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", - " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", - " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", - " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", - " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", - " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", - " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", - " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", - " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", - " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", - " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", - " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", - " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", - " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", - " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", - " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", - " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", - " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", - " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", - " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", - " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", - " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", - " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", - " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", - " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", - " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", - " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", - " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", - " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", - " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", - " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", - " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", - " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", - " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", - " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", - " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", - " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", - " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", - " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", - " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", - " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", - " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", - " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", - " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", - " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", - " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", - " 1.84896800e+01],\n", - " mask=False,\n", - " fill_value=1e+20),\n", - " 'dimensions': ('station',),\n", - " 'standard_name': 'longitude',\n", - " 'units': 'decimal degrees East',\n", - " 'long_name': 'longitude',\n", - " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", - " 'axis': 'X'}" + "\n", + "Dimensions: (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + " * station (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2\n", + " Conventions: CF-1.7" ] }, - "execution_count": 15, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_2.lon" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Loading grid_edge_latitude var (1/6)\n", - "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", - "Rank 000: Loading grid_edge_longitude var (2/6)\n", - "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", - "Rank 000: Loading model_centre_latitude var (3/6)\n", - "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", - "Rank 000: Loading model_centre_longitude var (4/6)\n", - "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", - "Rank 000: Loading sconco3 var (5/6)\n", - "Rank 000: Loaded sconco3 var ((175, 720))\n", - "Rank 000: Loading station_reference var (6/6)\n", - "Rank 000: Loaded station_reference var ((175,))\n" - ] - } - ], - "source": [ - "nessy_2.load()" + "xr.open_dataset('prv_exp_file.nc')" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Write" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating providentia_exp_file.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n", - "Rank 000: Writing grid_edge_latitude var (1/6)\n", - "**ERROR** an error has occurred while writing the 'grid_edge_latitude' variable\n" - ] - }, - { - "ename": "ValueError", - "evalue": "cannot find dimension grid_edge in this group or parent groups", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0mdim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdimensions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mdimname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dimensions'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'parent'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnessy_2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'providentia_exp_file.nc'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1431\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1432\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1433\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1434\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1435\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1381\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1382\u001b[0m \u001b[0;31m# Create variables\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1383\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_variables\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1384\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1385\u001b[0m \u001b[0;31m# Create metadata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 439\u001b[0m \u001b[0;31m# print(\"**ERROR** an error hase occurred while writing the '{0}' variable\".format(var_name),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 440\u001b[0m \u001b[0;31m# file=sys.stderr)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 442\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 443\u001b[0m \u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'WARNING!!! '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 379\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 380\u001b[0m var = netcdf.createVariable(var_name, var_dtype, var_dims,\n\u001b[0;32m--> 381\u001b[0;31m zlib=self.zip_lvl > 0, complevel=self.zip_lvl)\n\u001b[0m\u001b[1;32m 382\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaster\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.createVariable\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Variable.__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"cannot find dimension %s in this group or parent groups\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mdimname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: cannot find dimension grid_edge in this group or parent groups" - ] - } - ], - "source": [ - "nessy_2.to_netcdf('providentia_exp_file.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/Jupyter_notebooks/4.1-to_providentia_latlon.ipynb b/Jupyter_notebooks/4.1-to_providentia_latlon.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..143d3eb02bb44a1323b3955112ee347176faf0c1 --- /dev/null +++ b/Jupyter_notebooks/4.1-to_providentia_latlon.ipynb @@ -0,0 +1,2009 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:            (station: 175, time: 720)\n",
    +       "Coordinates:\n",
    +       "  * time               (time) datetime64[ns] 2018-04-01 ... 2018-04-30T23:00:00\n",
    +       "Dimensions without coordinates: station\n",
    +       "Data variables:\n",
    +       "    latitude           (station) float64 -64.24 -54.85 -22.1 ... 21.57 -34.35\n",
    +       "    longitude          (station) float64 -56.62 -68.31 -65.6 ... 103.5 18.49\n",
    +       "    sconco3            (station, time) float32 ...\n",
    +       "    station_reference  (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment cams61_chimere_ph2\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            4.7.2
    " + ], + "text/plain": [ + "\n", + "Dimensions: (station: 175, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T23:00:00\n", + "Dimensions without coordinates: station\n", + "Data variables:\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'\n", + "exp_ds = xr.open_dataset(exp_path).drop(['grid_edge_latitude', 'grid_edge_longitude', \n", + " 'model_centre_latitude', 'model_centre_longitude'])\n", + "exp_ds.to_netcdf('exp_path_interp_points.nc')\n", + "exp_ds" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## After \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    +       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    +       "Data variables:\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment cams61_chimere_ph2\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            4.7.2
    " + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Procedure" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment before interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_before_path = \"/esarchive/recon/ecmwf/cams61/cams61_chimere_ph2/eu/hourly/sconco3/sconco3_201804.nc\"\n", + "var_ds_before = open_netcdf(var_ds_before_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_before.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_before" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment after interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/load_nes.py:69: UserWarning: Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\n", + " warnings.warn(\"Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\")\n" + ] + } + ], + "source": [ + "var_ds_after_path = 'exp_path_interp_points.nc'\n", + "var_ds_after = open_netcdf(var_ds_after_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Convert to Providentia format" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "model_centre_lat, model_centre_lon = var_ds_before.create_providentia_exp_centre_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "grid_edge_lat, grid_edge_lon = var_ds_before.create_providentia_exp_grid_edge_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after = var_ds_after.to_providentia(model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat)\n", + "var_ds_after" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_after.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[30. , 30. , 30. , ..., 30. , 30. , 30. ],\n", + " [30.2, 30.2, 30.2, ..., 30.2, 30.2, 30.2],\n", + " [30.4, 30.4, 30.4, ..., 30.4, 30.4, 30.4],\n", + " ...,\n", + " [71.6, 71.6, 71.6, ..., 71.6, 71.6, 71.6],\n", + " [71.8, 71.8, 71.8, ..., 71.8, 71.8, 71.8],\n", + " [72. , 72. , 72. , ..., 72. , 72. , 72. ]],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ],\n", + " [-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ],\n", + " [-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ],\n", + " ...,\n", + " [-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ],\n", + " [-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ],\n", + " [-25. , -24.8, -24.6, ..., 44.6, 44.8, 45. ]],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[29.9, 30.1, 30.3, ..., 29.9, 29.9, 29.9],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-25.1, -25.1, -25.1, ..., -24.7, -24.9, -25.1],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating test_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconco3 var (1/2)\n", + "Rank 000: Var sconco3 created (1/2)\n", + "Rank 000: Var sconco3 data (1/2)\n", + "Rank 000: Var sconco3 completed (1/2)\n", + "Rank 000: Writing station_reference var (2/2)\n", + "Rank 000: Var station_reference created (2/2)\n", + "Rank 000: Var station_reference data (2/2)\n", + "Rank 000: Var station_reference completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:585: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "var_ds_after.to_netcdf('test_1.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    +       "  * station                 (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
    +       "Data variables:\n",
    +       "    lat                     (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    lon                     (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + " * station (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('test_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Difference between grid model edges from NES and Providentia IT" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([-3.81469725e-07, 3.81469729e-07, -7.62939450e-07, ...,\n", + " -3.81469725e-07, -3.81469725e-07, -3.81469725e-07])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path).grid_edge_latitude.values - var_ds_after.grid_edge_lat['data'].data" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/4.2-to_providentia_rotated.ipynb b/Jupyter_notebooks/4.2-to_providentia_rotated.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..bec4f6c93a8092bdeb8c73a2615ab5b518804157 --- /dev/null +++ b/Jupyter_notebooks/4.2-to_providentia_rotated.ipynb @@ -0,0 +1,2473 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:            (station: 173, time: 744)\n",
    +       "Coordinates:\n",
    +       "  * time               (time) datetime64[ns] 2018-01-01 ... 2018-01-31T23:00:00\n",
    +       "Dimensions without coordinates: station\n",
    +       "Data variables:\n",
    +       "    latitude           (station) float64 -64.24 -54.85 -22.1 ... 21.57 -34.35\n",
    +       "    longitude          (station) float64 -56.62 -68.31 -65.6 ... 103.5 18.49\n",
    +       "    sconco3            (station, time) float32 ...\n",
    +       "    station_reference  (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated a2...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment a2yb\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Mon Mar  1 13:45:41 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            netCDF Operators version 4.9.2 (Homepage = http://nco.sf....
    " + ], + "text/plain": [ + "\n", + "Dimensions: (station: 173, time: 744)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-01-01 ... 2018-01-31T23:00:00\n", + "Dimensions without coordinates: station\n", + "Data variables:\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated a2...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment a2yb\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Mon Mar 1 13:45:41 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: netCDF Operators version 4.9.2 (Homepage = http://nco.sf...." + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Experiment after interpolating with SNES (ideal)\n", + "exp_path = '/esarchive/recon/prov_interp/1.3.3/a2yb-regional-000/hourly/sconco3/EBAS/sconco3_201801.nc'\n", + "exp_ds = xr.open_dataset(exp_path).drop(['grid_edge_latitude', 'grid_edge_longitude', \n", + " 'model_centre_latitude', 'model_centre_longitude'])\n", + "exp_ds.to_netcdf('exp_path_interp_points_rotated.nc')\n", + "exp_ds" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## After \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (grid_edge: 1245, station: 173, model_latitude: 271, model_longitude: 351, time: 744)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-01-01 ... 2018-01-31T2...\n",
    +       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    +       "Data variables:\n",
    +       "    grid_edge_latitude      (grid_edge) float64 16.22 16.4 16.57 ... 16.3 16.22\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -22.21 -22.31 ... -22.05 -22.21\n",
    +       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 16.35 ....\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -22.18 ...\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated a2...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment a2yb\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Mon Mar  1 13:45:41 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            netCDF Operators version 4.9.2 (Homepage = http://nco.sf....
    " + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1245, station: 173, model_latitude: 271, model_longitude: 351, time: 744)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-01-01 ... 2018-01-31T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated a2...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment a2yb\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Mon Mar 1 13:45:41 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: netCDF Operators version 4.9.2 (Homepage = http://nco.sf...." + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Experiment after applying to_providentia\n", + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Procedure" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment before interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Only one timestep\n", + "var_ds_before_path = '/esarchive/exp/monarch/a2yb/regional/hourly/sconco3/sconco3-000_2018010100.nc'\n", + "var_ds_before = open_netcdf(var_ds_before_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_before.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_before" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 25, rlat: 271, rlon: 351, lev: 1)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 2018-01-01 ... 2018-01-02\n",
    +       "    lat           (rlat, rlon) float32 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
    +       "    lon           (rlat, rlon) float32 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
    +       "  * rlat          (rlat) float32 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
    +       "  * rlon          (rlon) float32 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
    +       "  * lev           (lev) float32 0.0\n",
    +       "Data variables:\n",
    +       "    sconco3       (time, lev, rlat, rlon) float32 ...\n",
    +       "    rotated_pole  |S1 b''
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 25, rlat: 271, rlon: 351, lev: 1)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-01-01 ... 2018-01-02\n", + " lat (rlat, rlon) float32 ...\n", + " lon (rlat, rlon) float32 ...\n", + " * rlat (rlat) float32 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float32 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + " * lev (lev) float32 0.0\n", + "Data variables:\n", + " sconco3 (time, lev, rlat, rlon) float32 ...\n", + " rotated_pole |S1 ..." + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(var_ds_before_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment after interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/load_nes.py:69: UserWarning: Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\n", + " warnings.warn(\"Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\")\n" + ] + } + ], + "source": [ + "var_ds_after_path = 'exp_path_interp_points_rotated.nc'\n", + "var_ds_after = open_netcdf(var_ds_after_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Convert to Providentia format" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "model_centre_lat, model_centre_lon = var_ds_before.create_providentia_exp_centre_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "grid_edge_lat, grid_edge_lon = var_ds_before.create_providentia_exp_grid_edge_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after = var_ds_after.to_providentia(model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat)\n", + "var_ds_after" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_after.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[16.350338, 16.43293 , 16.515146, ..., 16.515146, 16.43293 ,\n", + " 16.350338],\n", + " [16.527426, 16.610239, 16.692677, ..., 16.692677, 16.610243,\n", + " 16.527426],\n", + " [16.704472, 16.787508, 16.870167, ..., 16.870167, 16.78751 ,\n", + " 16.704472],\n", + " ...,\n", + " [58.32095 , 58.472683, 58.62431 , ..., 58.62431 , 58.472683,\n", + " 58.32095 ],\n", + " [58.426285, 58.5782 , 58.730026, ..., 58.730026, 58.5782 ,\n", + " 58.426285],\n", + " [58.530792, 58.6829 , 58.83492 , ..., 58.83492 , 58.682903,\n", + " 58.530792]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.181265, -22.016672, -21.851799, ..., 41.851795, 42.016666,\n", + " 42.18126 ],\n", + " [-22.27818 , -22.113186, -21.947905, ..., 41.9479 , 42.113174,\n", + " 42.27817 ],\n", + " [-22.375267, -22.209873, -22.04419 , ..., 42.044186, 42.209873,\n", + " 42.375263],\n", + " ...,\n", + " [-67.57767 , -67.397064, -67.21535 , ..., 87.21534 , 87.39706 ,\n", + " 87.57766 ],\n", + " [-67.90188 , -67.72247 , -67.54194 , ..., 87.54194 , 87.72246 ,\n", + " 87.90187 ],\n", + " [-68.228035, -68.04982 , -67.870514, ..., 87.87051 , 88.04982 ,\n", + " 88.228035]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[16.220402, 16.397398, 16.574354, ..., 16.38536 ,\n", + " 16.30307 , 16.220402],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32)}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-22.214966, -22.312012, -22.40918 , ..., -21.88617 ,\n", + " -22.05072 , -22.214966],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32)}" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating test_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconco3 var (1/2)\n", + "Rank 000: Var sconco3 created (1/2)\n", + "Rank 000: Var sconco3 data (1/2)\n", + "Rank 000: Var sconco3 completed (1/2)\n", + "Rank 000: Writing station_reference var (2/2)\n", + "Rank 000: Var station_reference created (2/2)\n", + "Rank 000: Var station_reference data (2/2)\n", + "Rank 000: Var station_reference completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:585: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "var_ds_after.to_netcdf('test_2.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 744, station: 173, model_latitude: 271, model_longitude: 351, grid_edge: 1245)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-01-01 ... 2018-01-31T2...\n",
    +       "  * station                 (station) float64 0.0 1.0 2.0 ... 170.0 171.0 172.0\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
    +       "Data variables:\n",
    +       "    lat                     (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    lon                     (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -22.18 ...\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 16.35 ....\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -22.21 -22.31 ... -22.05 -22.21\n",
    +       "    grid_edge_latitude      (grid_edge) float64 16.22 16.4 16.57 ... 16.3 16.22\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 744, station: 173, model_latitude: 271, model_longitude: 351, grid_edge: 1245)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-01-01 ... 2018-01-31T2...\n", + " * station (station) float64 0.0 1.0 2.0 ... 170.0 171.0 172.0\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('test_2.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Difference between grid model edges from NES and Providentia IT" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([-3.56718087e-06, -1.55645110e-06, -2.37911228e-06, ...,\n", + " -7.87491363e-07, -1.22642964e-06, -3.56718087e-06])" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path).grid_edge_latitude.values - var_ds_after.grid_edge_lat['data'].data" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/4.3-to_providentia_lcc.ipynb b/Jupyter_notebooks/4.3-to_providentia_lcc.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c70f84783aec0ab537b7fda98f15735a917ac839 --- /dev/null +++ b/Jupyter_notebooks/4.3-to_providentia_lcc.ipynb @@ -0,0 +1,2098 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:            (station: 8643, time: 744)\n",
    +       "Coordinates:\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-31T23:00:00\n",
    +       "Dimensions without coordinates: station\n",
    +       "Data variables:\n",
    +       "    latitude           (station) float64 42.51 42.52 42.53 ... 40.95 28.09 28.48\n",
    +       "    longitude          (station) float64 1.539 1.565 1.717 ... -17.12 -16.26\n",
    +       "    sconco3            (station, time) float32 ...\n",
    +       "    station_reference  (station) object 'escaldes-engordany' ... 'palmetum'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated b0...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment b075\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Mon Feb 22 18:52:51 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            netCDF Operators version 4.9.2 (Homepage = http://nco.sf....
    " + ], + "text/plain": [ + "\n", + "Dimensions: (station: 8643, time: 744)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-31T23:00:00\n", + "Dimensions without coordinates: station\n", + "Data variables:\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated b0...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment b075\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Mon Feb 22 18:52:51 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: netCDF Operators version 4.9.2 (Homepage = http://nco.sf...." + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Experiment after interpolating with SNES (ideal)\n", + "exp_path = '/esarchive/recon/prov_interp/1.3.3/b075-eu-000/hourly/sconco3/*EEA/sconco3_202101.nc'\n", + "exp_ds = xr.open_dataset(exp_path).drop(['grid_edge_latitude', 'grid_edge_longitude', \n", + " 'model_centre_latitude', 'model_centre_longitude'])\n", + "exp_ds.to_netcdf('exp_path_interp_points_lcc.nc')\n", + "exp_ds" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## After \"to_providentia\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (grid_edge: 1753, station: 8643, model_latitude: 398, model_longitude: 478, time: 744)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2021-01-01 ... 2021-01-31T2...\n",
    +       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    +       "Data variables:\n",
    +       "    grid_edge_latitude      (grid_edge) float64 19.59 19.69 ... 19.61 19.59\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -22.33 -22.35 ... -22.23 -22.33\n",
    +       "    latitude                (station) float64 42.51 42.52 42.53 ... 28.09 28.48\n",
    +       "    longitude               (station) float64 1.539 1.565 ... -17.12 -16.26\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 ...\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 ...\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'escaldes-engordany' ... 'palmetum'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated b0...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment b075\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Mon Feb 22 18:52:51 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            netCDF Operators version 4.9.2 (Homepage = http://nco.sf....
    " + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1753, station: 8643, model_latitude: 398, model_longitude: 478, time: 744)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-31T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated b0...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment b075\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Mon Feb 22 18:52:51 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: netCDF Operators version 4.9.2 (Homepage = http://nco.sf...." + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Experiment after applying to_providentia\n", + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Procedure" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment before interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Only one timestep\n", + "var_ds_before_path = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/sconco3/sconco3_2021010100.nc'\n", + "var_ds_before = open_netcdf(var_ds_before_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_before.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_before" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:            (time: 48, y: 398, x: 478, lev: 1)\n",
    +       "Coordinates:\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n",
    +       "    lat                (y, x) float32 ...\n",
    +       "    lon                (y, x) float32 ...\n",
    +       "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
    +       "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
    +       "  * lev                (lev) float32 0.0\n",
    +       "Data variables:\n",
    +       "    sconco3            (time, lev, y, x) float32 ...\n",
    +       "    Lambert_conformal  int32 -2147483647
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 48, y: 398, x: 478, lev: 1)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n", + " lat (y, x) float32 ...\n", + " lon (y, x) float32 ...\n", + " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", + " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", + " * lev (lev) float32 0.0\n", + "Data variables:\n", + " sconco3 (time, lev, y, x) float32 ...\n", + " Lambert_conformal int32 ..." + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(var_ds_before_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Experiment after interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/load_nes.py:69: UserWarning: Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\n", + " warnings.warn(\"Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\")\n" + ] + } + ], + "source": [ + "var_ds_after_path = 'exp_path_interp_points_lcc.nc'\n", + "var_ds_after = open_netcdf(var_ds_after_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Convert to Providentia format" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "model_centre_lat, model_centre_lon = var_ds_before.create_providentia_exp_centre_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "grid_edge_lat, grid_edge_lon = var_ds_before.create_providentia_exp_grid_edge_coordinates()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after = var_ds_after.to_providentia(model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat)\n", + "var_ds_after" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "var_ds_after.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[19.706436, 19.728317, 19.750084, ..., 16.264694, 16.22998 ,\n", + " 16.19516 ],\n", + " [19.805984, 19.827904, 19.849716, ..., 16.358276, 16.323502,\n", + " 16.288643],\n", + " [19.905594, 19.927544, 19.949394, ..., 16.45192 , 16.417084,\n", + " 16.382141],\n", + " ...,\n", + " [59.66961 , 59.70968 , 59.74953 , ..., 53.457195, 53.39534 ,\n", + " 53.33333 ],\n", + " [59.76223 , 59.802353, 59.842262, ..., 53.541912, 53.47999 ,\n", + " 53.417908],\n", + " [59.854744, 59.89492 , 59.934883, ..., 53.62653 , 53.56453 ,\n", + " 53.502373]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.32898 , -22.223236, -22.117432, ..., 28.619202, 28.716614,\n", + " 28.813965],\n", + " [-22.352325, -22.24646 , -22.140533, ..., 28.655334, 28.752869,\n", + " 28.850311],\n", + " [-22.375702, -22.269714, -22.163696, ..., 28.69159 , 28.789185,\n", + " 28.886719],\n", + " ...,\n", + " [-39.438995, -39.25531 , -39.07132 , ..., 53.106964, 53.249176,\n", + " 53.391052],\n", + " [-39.518707, -39.334717, -39.15039 , ..., 53.210876, 53.35321 ,\n", + " 53.49518 ],\n", + " [-39.598724, -39.41443 , -39.229828, ..., 53.315125, 53.45755 ,\n", + " 53.59964 ]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.model_centre_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[19.60175375, 19.701436 , 19.80117422, ...,\n", + " 19.64562332, 19.62374577, 19.60175375],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lat" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-22.40084818, -22.4242827 , -22.44777427, ...,\n", + " -22.18916008, -22.29503477, -22.40084818],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.grid_edge_lon" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(8643, 744)" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "var_ds_after.variables['sconco3']['data'].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:585: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating test_3.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconco3 var (1/2)\n", + "Rank 000: Var sconco3 created (1/2)\n", + "Rank 000: Var sconco3 data (1/2)\n", + "Rank 000: Var sconco3 completed (1/2)\n", + "Rank 000: Writing station_reference var (2/2)\n", + "Rank 000: Var station_reference created (2/2)\n", + "Rank 000: Var station_reference data (2/2)\n", + "Rank 000: Var station_reference completed (2/2)\n" + ] + } + ], + "source": [ + "var_ds_after.to_netcdf('test_3.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 744, station: 8643, model_latitude: 398, model_longitude: 478, grid_edge: 1753)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2021-01-01 ... 2021-01-31T2...\n",
    +       "  * station                 (station) float64 0.0 1.0 ... 8.641e+03 8.642e+03\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
    +       "Data variables:\n",
    +       "    lat                     (station) float64 42.51 42.52 42.53 ... 28.09 28.48\n",
    +       "    lon                     (station) float64 1.539 1.565 ... -17.12 -16.26\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 ...\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 ...\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -22.4 -22.42 ... -22.3 -22.4\n",
    +       "    grid_edge_latitude      (grid_edge) float64 19.6 19.7 19.8 ... 19.62 19.6\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'escaldes-engordany' ... 'palmetum'\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 744, station: 8643, model_latitude: 398, model_longitude: 478, grid_edge: 1753)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-31T2...\n", + " * station (station) float64 0.0 1.0 ... 8.641e+03 8.642e+03\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('test_3.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Difference between grid model edges from NES and Providentia IT" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([-0.0111262 , -0.01086253, -0.01059908, ..., -0.01117357,\n", + " -0.01114973, -0.0111262 ])" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path).grid_edge_latitude.values - var_ds_after.grid_edge_lat['data'].data" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd index 20819e939a150aab82341bed9e2f6ca797fa45d6..e0dd738c9111e89b23df78b9391b9bf9a031efdd 100644 --- a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd +++ b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd @@ -32,8 +32,9 @@ module load cfunits/1.8-foss-2019b-Python-3.7.4 module load filelock/3.7.1-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load NES/0.9.0-foss-2019b-Python-3.7.4 -export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} +#export PYTHONPATH=/esarchive/scratch/avilanova/software/NES:${PYTHONPATH} # DON'T USE ADDRESS BELOW. # DO USE TOKEN BELOW diff --git a/nes/create_nes.py b/nes/create_nes.py index 6c321a36a074179925118e9df300f39147db19de..41c42884f08a54b551a790b637ae58af0b55da1d 100644 --- a/nes/create_nes.py +++ b/nes/create_nes.py @@ -7,8 +7,35 @@ from .nc_projections import * def create_nes(comm=None, info=False, projection=None, parallel_method='Y', balanced=False, - strlen=75, times=None, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, **kwargs): - + strlen=75, times=None, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, + **kwargs): + """ + Create a Nes class from scratch. + + Parameters + ---------- + comm: MPI.Communicator + MPI Communicator. + info: bool + Indicates if you want to get reading/writing info. + parallel_method : str + Indicates the parallelization method that you want. Default: 'Y'. + accepted values: ['X', 'Y', 'T']. + balanced : bool + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use. + last_level : int, None + Index of the last level to use. None if it is the last. + kwargs : + Projection dependent parameters to create it from scratch. + """ + if comm is None: comm = MPI.COMM_WORLD else: diff --git a/nes/interpolation/horizontal_interpolation.py b/nes/interpolation/horizontal_interpolation.py index e2aff54c1589fa1d7e1fedae54ca68dba7bb11cf..906ba5a085d881730bd496501632e69c3a7c8357 100644 --- a/nes/interpolation/horizontal_interpolation.py +++ b/nes/interpolation/horizontal_interpolation.py @@ -1,6 +1,7 @@ #!/usr/bin/env python import sys +import warnings import numpy as np import os import nes @@ -12,25 +13,26 @@ from warnings import warn def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='NearestNeighbour', n_neighbours=4, - info=False): + info=False, to_providentia=False): """ Horizontal interpolation from one grid to another one. Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour']. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. info: bool Indicates if you want to print extra info during the interpolation process. """ + # Obtain weight matrix if self.parallel_method == 'T': weights, idx = get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours) @@ -79,15 +81,26 @@ def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='Neares final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].reshape( (src_shape[0], idx.shape[-1])) if isinstance(dst_grid, nes.PointsNesGHOST): - final_dst.erase_flags() - print('pre final shape:', final_dst.variables[var_name]['data'].shape) - final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].T - # final_dst.variables[var_name]['dtype'] = final_dst.variables[var_name]['data'].dtype - final_dst.variables[var_name]['dimensions'] = ('station', 'time') - print('final shape:', final_dst.variables[var_name]['data'].shape) - + final_dst = final_dst.to_points() + final_dst.global_attrs = self.global_attrs + if to_providentia: + # self = experiment to interpolate (regular, rotated, etc.) + # final_dst = interpolated experiment (points) + if isinstance(final_dst, nes.PointsNes): + model_centre_lat, model_centre_lon = self.create_providentia_exp_centre_coordinates() + grid_edge_lat, grid_edge_lon = self.create_providentia_exp_grid_edge_coordinates() + final_dst.to_providentia(model_centre_lon=model_centre_lon, + model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, + grid_edge_lat=grid_edge_lat, + ) + final_dst.load() + else: + msg = "The final projection must be points to interpolate an experiment and get it in Providentia format." + warnings.warn(msg) + return final_dst @@ -97,20 +110,20 @@ def get_src_data(comm, var_data, idx, parallel_method): Parameters ---------- - comm : MPI.Communicator - + comm : MPI.Communicator. var_data : np.array Rank source data. idx : np.array - Index of the needed data in a 2D flatten way + Index of the needed data in a 2D flatten way. parallel_method: str - Source parallel method + Source parallel method. Returns ------- np.array - Flatten source needed data + Flatten source needed data. """ + if parallel_method == 'T': var_data = var_data.flatten() else: @@ -128,6 +141,7 @@ def get_src_data(comm, var_data, idx, parallel_method): var_data = comm.bcast(var_data) var_data = np.take(var_data, idx) + return var_data @@ -139,22 +153,22 @@ def get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbour Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour']. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. Returns ------- tuple - weights and source data index - + Weights and source data index. """ + if weight_matrix_path is not None: with FileLock(weight_matrix_path.replace('.nc', '.lock')): if self.master: @@ -209,22 +223,22 @@ def get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbou Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour'] n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. Returns ------- tuple - weights and source data index - + Weights and source data index. """ + if isinstance(dst_grid, nes.PointsNes) and weight_matrix_path is not None: if self.master: warn("To point weight matrix cannot be saved.") @@ -290,21 +304,23 @@ def get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbou def read_weight_matrix(weight_matrix_path, comm=None, parallel_method='T'): """ + Read weight matrix. Parameters ---------- weight_matrix_path : str - Path of the weight matrix + Path of the weight matrix. comm : MPI.Communicator - Communicator to read the weight matrix + Communicator to read the weight matrix. parallel_method : str Nes parallel method to read the weight matrix. Returns ------- nes.Nes - Weight matrix + Weight matrix. """ + weight_matrix = nes.open_netcdf(path=weight_matrix_path, comm=comm, parallel_method=parallel_method, balanced=True) weight_matrix.load() @@ -318,20 +334,20 @@ def create_nn_weight_matrix(self, dst_grid, n_neighbours=4, info=False): Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. info: bool Indicates if you want to print extra info during the interpolation process. Returns ------- nes.Nes - Weight Matrix - + Weight matrix. """ + if info and self.master: print("\tCreating Nearest Neighbour Weight Matrix with {0} neighbours".format(n_neighbours)) sys.stdout.flush() @@ -392,22 +408,24 @@ def create_nn_weight_matrix(self, dst_grid, n_neighbours=4, info=False): weight_matrix.variables['idx'] = {'data': idx_transf, 'units': ''} weight_matrix.lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} weight_matrix._lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} + return weight_matrix def lon_lat_to_cartesian(lon, lat, radius=1.0): """ - To calculate lon, lat coordinates of a point on a sphere + Calculate lon, lat coordinates of a point on a sphere. Parameters ---------- lon : np.array - Longitude values + Longitude values. lat : np.array - Latitude values + Latitude values. radius : float Radius of the sphere to get the distances. """ + lon_r = np.radians(lon) lat_r = np.radians(lat) diff --git a/nes/interpolation/vertical_interpolation.py b/nes/interpolation/vertical_interpolation.py index 30eff96dd75ecd92ba2a2f6279a668ff04a72b8c..ebcb8f7b3a48d7c0709321530a08817a88d8df15 100644 --- a/nes/interpolation/vertical_interpolation.py +++ b/nes/interpolation/vertical_interpolation.py @@ -14,11 +14,12 @@ def add_4d_vertical_info(self, info_to_add): Parameters ---------- self : nes.Nes - Source Nes object + Source Nes object. info_to_add : nes.Nes, str Nes object with the vertical information as variable or str with the path to the NetCDF file that contains the vertical data. """ + vertical_var = list(self.concatenate(info_to_add)) self.vertical_var_name = vertical_var[0] @@ -27,24 +28,25 @@ def add_4d_vertical_info(self, info_to_add): def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): """ - Vertical interpolation method + Vertical interpolation method. Parameters ---------- self : Nes - Source Nes object + Source Nes object. new_levels : list - List of new vertical levels + List of new vertical levels. new_src_vertical kind : str Vertical interpolation type. extrapolate : None, tuple, str - Extrapolate method (for non linear operations) + Extrapolate method (for non linear operations). info: None, bool - Indicates if you want to print extra information + Indicates if you want to print extra information. """ + if info is None: - info = self.print_info + info = self.info if new_src_vertical is not None: self.add_4d_vertical_info(new_src_vertical) @@ -126,7 +128,7 @@ def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', for t in range(nt): # if info and self.rank == self.size - 1: - if self.print_info and self.master: + if self.info and self.master: print('\t\t{3} time step {0} ({1}/{2}))'.format(self.time[t], t+1, nt, var_name)) sys.stdout.flush() for j in range(ny): diff --git a/nes/load_nes.py b/nes/load_nes.py index 6d43b4d0841581d3d602a52274d86d37cd636c10..525e7267f45428bef4563acea7031071da9c0197 100644 --- a/nes/load_nes.py +++ b/nes/load_nes.py @@ -11,37 +11,38 @@ from .nc_projections import * def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, balanced=False): """ - Open a netCDF file + Open a netCDF file. Parameters ---------- path : str - Path to the NetCDF file to read + Path to the NetCDF file to read. comm : MPI.COMM - MPI communicator to use in that netCDF. Default: MPI.COMM_WORLD + MPI communicator to use in that netCDF. Default: MPI.COMM_WORLD. xarray : bool - (Not working) Indicates if you want to use xarray. Default: False + (Not working) Indicates if you want to use xarray. Default: False. info : bool - Indicates if you want to print (stdout) the reading/writing steps + Indicates if you want to print (stdout) the reading/writing steps. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default: 'Y' (over Y axis) - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T'] balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. Returns ------- Nes - Nes object. Variables read in lazy mode (only metadata) + Nes object. Variables read in lazy mode (only metadata). """ + if comm is None: comm = MPI.COMM_WORLD else: @@ -57,6 +58,7 @@ def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', dataset = Dataset(path, format="NETCDF4", mode='r', parallel=False) else: dataset = Dataset(path, format="NETCDF4", mode='r', parallel=True, comm=comm, info=MPI.Info()) + if __is_rotated(dataset): # Rotated grids nessy = RotatedNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, @@ -72,6 +74,13 @@ def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + elif __is_points_providentia(dataset): + # Points - Providentia + nessy = PointsNesProvidentia(comm=comm, dataset=dataset, xarray=xarray, info=info, + parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, + balanced=balanced,) else: # Points - non-GHOST nessy = PointsNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, @@ -103,13 +112,14 @@ def __is_rotated(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a rotated one + Indicated if the netCDF is a rotated one. """ + if 'rotated_pole' in dataset.variables.keys(): return True else: @@ -123,13 +133,14 @@ def __is_points(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a points non-GHOST one + Indicated if the netCDF is a points non-GHOST one. """ + if 'station' in dataset.dimensions: return True else: @@ -143,19 +154,42 @@ def __is_points_ghost(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a points GHOST one + Indicated if the netCDF is a points GHOST one. """ + if 'N_flag_codes' in dataset.dimensions and 'N_qa_codes' in dataset.dimensions: return True else: return False +def __is_points_providentia(dataset): + """ + Check if the netCDF is a points dataset in Providentia format or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object. + + Returns + ------- + value : bool + Indicated if the netCDF is a points Providentia one. + """ + + if (('grid_edge' in dataset.dimensions) and ('model_latitude' in dataset.dimensions) + and ('model_longitude' in dataset.dimensions)): + return True + else: + return False + + def __is_lcc(dataset): """ Check if the netCDF is in Lambert Conformal Conic (LCC) projection or not. @@ -163,13 +197,14 @@ def __is_lcc(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a LCC one + Indicated if the netCDF is a LCC one. """ + if 'Lambert_conformal' in dataset.variables.keys(): return True else: @@ -183,13 +218,14 @@ def __is_mercator(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a Mercator one + Indicated if the netCDF is a Mercator one. """ + if 'mercator' in dataset.variables.keys(): return True else: @@ -199,33 +235,19 @@ def __is_mercator(dataset): def concatenate_netcdfs(nessy_list, comm=None, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, balanced=False): """ - Concatenate variables form different sources + Concatenate variables form different sources. Parameters ---------- nessy_list : list - List of Nes objects or list of paths to concatenate + List of Nes objects or list of paths to concatenate. comm : MPI.Communicator - info : bool - Indicates if you want to print (stdout) the reading/writing steps - avoid_first_hours : int - Number of hours to remove from first time steps. - avoid_last_hours : int - Number of hours to remove from last time steps. - parallel_method : str - Indicates the parallelization method that you want. Default: 'Y' (over Y axis) - accepted values: ['X', 'Y', 'T'] - balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode - first_level : int - Index of the first level to use - last_level : int, None - Index of the last level to use. None if it is the last. + MPI Communicator. Returns ------- Nes - Nes object with all the variables + Nes object with all the variables. """ if not isinstance(nessy_list, list): raise AttributeError("You must pass a list of NES objects or paths.") @@ -256,4 +278,5 @@ def concatenate_netcdfs(nessy_list, comm=None, info=False, parallel_method='Y', balanced=balanced ) nessy_first.concatenate(aux_nessy) + return nessy_first diff --git a/nes/nc_projections/__init__.py b/nes/nc_projections/__init__.py index 384265791dea5ec6fb4dcada082558f858e5628c..fc6bc15a527b2aa0489f7eaa6294b6f175b0ddb5 100644 --- a/nes/nc_projections/__init__.py +++ b/nes/nc_projections/__init__.py @@ -3,5 +3,6 @@ from .latlon_nes import LatLonNes from .rotated_nes import RotatedNes from .points_nes import PointsNes from .points_nes_ghost import PointsNesGHOST +from .points_nes_providentia import PointsNesProvidentia from .lcc_nes import LCCNes from .mercator_nes import MercatorNes diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py index 25280237464d9ede43933dddff8f7dcb5a01599e..81c47db19497d34fcbb0422a4f24697b0a8cef4b 100644 --- a/nes/nc_projections/default_nes.py +++ b/nes/nc_projections/default_nes.py @@ -21,28 +21,27 @@ class Nes(object): Attributes ---------- - comm : MPI.Communicator + comm : MPI.Communicator. rank : int - MPI rank + MPI rank. master : bool - True when rank == 0 + True when rank == 0. size : int - Size of the communicator - + Size of the communicator. print_info : bool - Indicates if you want to print reading/writing info + Indicates if you want to print reading/writing info. is_xarray : bool - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. __ini_path : str - Path to the original file to read when open_netcdf is called + Path to the original file to read when open_netcdf is called. hours_start : int - Number of hours to avoid from the first original values + Number of hours to avoid from the first original values. hours_end : int - Number of hours to avoid from the last original values + Number of hours to avoid from the last original values. dataset : xr.Dataset - (not working) xArray Dataset + (not working) xArray Dataset. netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. variables : dict Variables information. The variables are stored in a dictionary with the var_name as key and another dictionary with the information. @@ -58,13 +57,13 @@ class Nes(object): Longitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. parallel_method : str Parallel method to read/write. - Can be chosen any of the following axis to parallelize: 'T', 'Y' or 'X' + Can be chosen any of the following axis to parallelize: 'T', 'Y' or 'X'. read_axis_limits : dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. write_axis_limits : dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. time : list List of time steps of the rank data. lev : dict @@ -76,11 +75,11 @@ class Nes(object): global_attrs : dict Global attributes with the attribute name as key and data as values. _var_dim : None, tuple - Tuple with the name of the Y and X dimensions for the variables + Tuple with the name of the Y and X dimensions for the variables. _lat_dim : None, tuple - Tuple with the name of the dimensions of the Latitude values + Tuple with the name of the dimensions of the Latitude values. _lon_dim : None, tuple - Tuple with the name of the dimensions of the Longitude values + Tuple with the name of the dimensions of the Longitude values. """ def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, @@ -91,30 +90,31 @@ class Nes(object): Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + accepted values: ['X', 'Y', 'T']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. times : list, None List of times to substitute the current ones while creation. kwargs : @@ -131,7 +131,7 @@ class Nes(object): self.size = self.comm.Get_size() # General info - self.print_info = info + self.info = info self.is_xarray = xarray self.__ini_path = path @@ -160,7 +160,7 @@ class Nes(object): self._lev = {'data': np.array([0]), 'units': '', 'positive': 'up'} - self._lat, self._lon = self._create_centroids(**kwargs) + self._lat, self._lon = self._create_centre_coordinates(**kwargs) # Set axis limits for parallel reading self.read_axis_limits = self.get_read_axis_limits() @@ -230,45 +230,49 @@ class Nes(object): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + accepted values: ['X', 'Y', 'T']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. """ + new = Nes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create=create_nes, balanced=balanced) + return new def __del__(self): """ - To delete the Nes object and close all the opened Datasets + To delete the Nes object and close all the opened Datasets. """ + self.close() for var_name, var_info in self.variables.items(): del var_info['data'] @@ -298,8 +302,9 @@ class Nes(object): Returns ------- state : dict - Dictionary with the class parameters + Dictionary with the class parameters. """ + d = self.__dict__ state = {k: d[k] for k in d if k not in ['comm', 'variables', 'netcdf']} @@ -307,13 +312,14 @@ class Nes(object): def __setstate__(self, state): """ - Set the state of the class + Set the state of the class. Parameters ---------- state: dict - Dictionary with the class parameters + Dictionary with the class parameters. """ + self.__dict__ = state return None @@ -321,19 +327,19 @@ class Nes(object): def copy(self, copy_vars=False): """ Copy the Nes object. - - The copy will avoid to copy the communicator, dataset and variables by default + The copy will avoid to copy the communicator, dataset and variables by default. Parameters ---------- copy_vars: bool - Indicates if you want to copy the variables (in lazy mode) + Indicates if you want to copy the variables (in lazy mode). Returns ------- nessy : Nes - Copy of the Nes object + Copy of the Nes object. """ + nessy = deepcopy(self) nessy.netcdf = None if copy_vars: @@ -353,6 +359,7 @@ class Nes(object): """ Erase the communicator and the parallelization indexes. """ + self.comm = None self.rank = 0 self.master = 0 @@ -362,13 +369,14 @@ class Nes(object): def set_communicator(self, comm): """ - Set a new communicator and the correspondent parallelization indexes + Set a new communicator and the correspondent parallelization indexes. Parameters ---------- comm: MPI.COMM - Communicator to be set + Communicator to be set. """ + self.comm = comm self.rank = self.comm.Get_rank() self.master = self.rank == 0 @@ -385,8 +393,9 @@ class Nes(object): Parameters ---------- levels : dict - Dictionary with the new level information to be set + Dictionary with the new level information to be set. """ + self._lev = deepcopy(levels) self.lev = deepcopy(levels) @@ -399,8 +408,9 @@ class Nes(object): Parameters ---------- time_bnds : list - List with the new time bounds information to be set + List with the new time bounds information to be set. """ + correct_format = True for time_bnd in np.array(time_bnds).flatten(): if not isinstance(time_bnd, datetime.datetime): @@ -423,6 +433,49 @@ class Nes(object): return None + def create_bounds(self, coordinates, inc, spatial_nv=2, inverse=False): + """ + Calculate the vertices coordinates. + + Parameters + ---------- + coordinates : np.array + Coordinates in degrees (latitude or longitude). + inc : float + Increment between centre values. + spatial_nv : int + Non mandatory parameter that informs the number of vertices that must have the + boundaries. Default: 2. + inverse : bool + For some grid latitudes. + + Returns + ---------- + bounds : np.array + Array with as many elements as vertices for each value of coords. + """ + + # Create new arrays moving the centres half increment less and more. + coords_left = coordinates - inc / 2 + coords_right = coordinates + inc / 2 + + # Defining the number of corners needed. 2 to regular grids and 4 for irregular ones. + if spatial_nv == 2: + # Create an array of N arrays of 2 elements to store the floor and the ceil values for each cell + bounds = np.dstack((coords_left, coords_right)) + bounds = bounds.reshape((len(coordinates), spatial_nv)) + elif spatial_nv == 4: + # Create an array of N arrays of 4 elements to store the corner values for each cell + # It can be stored in clockwise starting form the left-top element, or in inverse mode. + if inverse: + bounds = np.dstack((coords_left, coords_left, coords_right, coords_right)) + else: + bounds = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boundaries must be 2 or 4.') + + return bounds + def free_vars(self, var_list): """ Erase the selected variables from the variables information. @@ -430,8 +483,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if isinstance(var_list, str): var_list = [var_list] @@ -453,8 +507,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if isinstance(var_list, str): var_list = [var_list] @@ -466,13 +521,14 @@ class Nes(object): def get_time_interval(self): """ - Calculate the interrval of hours between time steps + Calculate the interrval of hours between time steps. Returns ------- int - Number of hours between time steps + Number of hours between time steps. """ + time_interval = self._time[1] - self._time[0] time_interval = int(time_interval.seconds // 3600) @@ -480,20 +536,21 @@ class Nes(object): def sel_time(self, time, copy=False): """ - To select only one time step + To select only one time step. Parameters ---------- time : datetime.datetime - Time stamp to select + Time stamp to select. copy : bool - Indicates if you want a copy with the selected time step (True) or to modify te existen one (False) + Indicates if you want a copy with the selected time step (True) or to modify te existing one (False). Returns ------- Nes - Nes object with the data (and metadata) of the selected time step + Nes object with the data (and metadata) of the selected time step. """ + if copy: aux_nessy = self.copy(copy_vars=False) aux_nessy.comm = self.comm @@ -527,8 +584,9 @@ class Nes(object): def last_time_step(self): """ - Modify variables to keep only the last time step + Modify variables to keep only the last time step. """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") aux_time = self._time[0].replace(hour=0, minute=0, second=0, microsecond=0) @@ -549,19 +607,20 @@ class Nes(object): def daily_statistic(self, op, type_op='calendar'): """ - Calculate daily statistic + Calculate daily statistic. Parameters ---------- op : str - Statistic to perform. Accepted values: "max", "mean" and "min" + Statistic to perform. Accepted values: "max", "mean" and "min". type_op : str - Type of statistic to perform. Accepted values: "calendar", "alltsteps", and "withoutt0" + Type of statistic to perform. Accepted values: "calendar", "alltsteps", and "withoutt0". - "calendar": Calculate the statistic using the time metadata. It will avoid single time step by day calculations - "alltsteps": Calculate a single time statistic with all the time steps. - "withoutt0": Calculate a single time statistic with all the time steps avoiding the first one. """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") time_interval = self.get_time_interval() @@ -668,6 +727,7 @@ class Nes(object): # ================================================================================================================== # Reading # ================================================================================================================== + def get_read_axis_limits(self): """ Calculate the 4D reading axis limits depending on if them have to balanced or not. @@ -676,8 +736,9 @@ class Nes(object): ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + if self.balanced: return self.get_read_axis_limits_balanced() else: @@ -685,14 +746,15 @@ class Nes(object): def get_read_axis_limits_unbalanced(self): """ - Calculate the 4D reading axis limits + Calculate the 4D reading axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, @@ -746,14 +808,15 @@ class Nes(object): def get_read_axis_limits_balanced(self): """ - Calculate the 4D reading balanced axis limits + Calculate the 4D reading balanced axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + fid_dist = {} to_add = None if self.parallel_method == 'Y': @@ -843,16 +906,17 @@ class Nes(object): Parameters ---------- hours : int - Number of hours to avoid + Number of hours to avoid. first : bool - Indicates if you want to avoid from the first hours (True) or from the last (False) - Default: True + Indicates if you want to avoid from the first hours (True) or from the last (False). + Default: True. Returns ------- int - Index of the time array + Index of the time array. """ + from datetime import timedelta if first: @@ -864,8 +928,9 @@ class Nes(object): def open(self): """ - Open the NetCDF + Open the NetCDF. """ + if self.is_xarray: self.dataset = self.__open_dataset() self.netcdf = None @@ -877,13 +942,14 @@ class Nes(object): def __open_dataset(self): """ - Open the NetCDF with xarray + Open the NetCDF with xarray. Returns ------- dataset : xr.Dataset - Opened dataset + Opened dataset. """ + if self.master: warnings.filterwarnings('ignore') # Disabling warnings while reading MONARCH original file dataset = open_dataset(self.__ini_path, decode_coords='all') @@ -897,18 +963,19 @@ class Nes(object): def __open_netcdf4(self, mode='r'): """ - Open the NetCDF with netcdf4-python + Open the NetCDF with netcdf4-python. Parameters ---------- mode : str Inheritance from mode parameter from https://unidata.github.io/netcdf4-python/#Dataset.__init__ - Default: 'r' (read-only) + Default: 'r' (read-only). Returns ------- netcdf : Dataset - Opened dataset + Opened dataset. """ + if self.size == 1: netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=False) else: @@ -920,8 +987,9 @@ class Nes(object): def close(self): """ - Close the NetCDF with netcdf4-python + Close the NetCDF with netcdf4-python. """ + if self.netcdf is not None: self.netcdf.close() self.netcdf = None @@ -933,21 +1001,21 @@ class Nes(object): Calculates the number of days since the first date in the 'time' list and store in new list: This is useful when the units are 'months since', - which cannot be transformed to dates using num2date + which cannot be transformed to dates using num2date. Parameter --------- time: list - Original time + Original time. units: str - CF compliant time units + CF compliant time units. calendar: str - Original calendar + Original calendar. Returns ------- time: list - CF compliant time + CF compliant time. """ start_date_str = time.units.split('since')[1].lstrip() @@ -971,17 +1039,17 @@ class Nes(object): def __parse_time(self, time): """ - Parses the time to be CF compliant + Parses the time to be CF compliant. Parameters ---------- time: str - Original time + Original time. Returns ------- time : str - CF compliant time + CF compliant time. """ units = time.units @@ -999,18 +1067,19 @@ class Nes(object): @staticmethod def __parse_time_unit(t_units): """ - Parses the time units to be CF compliant + Parses the time units to be CF compliant. Parameters ---------- t_units : str - Original time units + Original time units. Returns ------- t_units : str - CF compliant time units + CF compliant time units. """ + if 'h @' in t_units: t_units = 'hour since {0}-{1}-{2} {3}:{4}:{5} UTC'.format( t_units[4:8], t_units[8:10], t_units[10:12], t_units[13:15], t_units[15:17], t_units[17:-4]) @@ -1019,13 +1088,14 @@ class Nes(object): def __get_time(self): """ - Get the NetCDF file time values + Get the NetCDF file time values. Returns ------- time : list - List of times (datetime.datetime) of the NetCDF data + List of times (datetime.datetime) of the NetCDF data. """ + if self.is_xarray: time = self.variables['time'] else: @@ -1043,18 +1113,19 @@ class Nes(object): def __get_time_bnds(self, create_nes=False): """ - Get the NetCDF time bounds values + Get the NetCDF time bounds values. Parameters ---------- create_nes : bool - Indicated if the object is created from scratch or from an existing file + Indicates if you want to create the object from scratch (True) or through an existing file. Returns ------- time : list - List of time bounds (datetime) of the NetCDF data + List of time bounds (datetime) of the NetCDF data. """ + if self.is_xarray: time_bnds = self.variables['time_bnds'] else: @@ -1072,24 +1143,26 @@ class Nes(object): time_bnds = None time_bnds = self.comm.bcast(time_bnds, root=0) self.free_vars('time_bnds') + return time_bnds def _get_coordinate_dimension(self, possible_names): """ Read the coordinate dimension data. - This will read the complete data of the coordinate + This will read the complete data of the coordinate. Parameters ---------- possible_names: list, str - List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) + List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']). Returns ------- nc_var : dict Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. """ + if isinstance(possible_names, str): possible_names = [possible_names] @@ -1113,19 +1186,20 @@ class Nes(object): def _get_coordinate_values(self, coordinate_info, coordinate_axis): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['Z', 'Y', 'X'] + Name of the coordinate to extract. Accepted values: ['Z', 'Y', 'X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ + values = deepcopy(coordinate_info) if isinstance(coordinate_info, list): values = {'data': deepcopy(coordinate_info)} @@ -1171,6 +1245,7 @@ class Nes(object): 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, ...} """ + if self.is_xarray: variables = self.dataset.variables else: @@ -1201,13 +1276,14 @@ class Nes(object): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- data: np.array Portion of the variable data corresponding to the rank. """ + nc_var = self.netcdf.variables[var_name] var_dims = nc_var.dimensions @@ -1254,8 +1330,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if self.netcdf is None: self.__open_dataset() close = True @@ -1268,11 +1345,11 @@ class Nes(object): var_list = list(self.variables.keys()) for i, var_name in enumerate(var_list): - if self.print_info: + if self.info: print("Rank {0:03d}: Loading {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(var_list))) if self.variables[var_name]['data'] is None: self.variables[var_name]['data'] = self._read_variable(var_name) - if self.print_info: + if self.info: print("Rank {0:03d}: Loaded {1} var ({2})".format( self.rank, var_name, self.variables[var_name]['data'].shape)) @@ -1289,7 +1366,7 @@ class Nes(object): def concatenate(self, aux_nessy): """ - Concatenate different variables into the same nes object + Concatenate different variables into the same nes object. Parameters ---------- @@ -1299,8 +1376,9 @@ class Nes(object): Returns ------- list - List of var names added + List of var names added. """ + if isinstance(aux_nessy, str): aux_nessy = self.new(path=aux_nessy, comm=self.comm, parallel_method=self.parallel_method, xarray=self.is_xarray, @@ -1326,18 +1404,19 @@ class Nes(object): def __get_global_attributes(self, create_nes=False): """ - Read the netcdf global attributes + Read the netcdf global attributes. Parameters ---------- create_nes : bool - Indicated if the object is created from scratch or from an existing file + Indicates if you want to create the object from scratch (True) or through an existing file. Returns ------- gl_attrs : dict - Dictionary with the netCDF global attributes + Dictionary with the netCDF global attributes. """ + gl_attrs = {} if self.is_xarray: gl_attrs = self.dataset.attrs @@ -1351,6 +1430,7 @@ class Nes(object): # ================================================================================================================== # Writing # ================================================================================================================== + def get_write_axis_limits(self): """ Calculate the 4D writing axis limits depending on if them have to balanced or not. @@ -1359,8 +1439,9 @@ class Nes(object): ------- dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + if self.balanced: return self.get_write_axis_limits_balanced() else: @@ -1368,14 +1449,15 @@ class Nes(object): def get_write_axis_limits_unbalanced(self): """ - Calculate the 4D writing axis limits + Calculate the 4D writing axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, @@ -1404,14 +1486,15 @@ class Nes(object): def get_write_axis_limits_balanced(self): """ - Calculate the 4D reading balanced axis limits + Calculate the 4D reading balanced axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + fid_dist = {} if self.parallel_method == 'Y': len_to_split = self._lat['data'].shape[0] @@ -1469,8 +1552,9 @@ class Nes(object): Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. """ + netcdf.createDimension('time', None) if self._time_bnds is not None: netcdf.createDimension('time_nv', 2) @@ -1487,15 +1571,16 @@ class Nes(object): Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. """ + # TIMES time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) time_var.units = 'hours since {0}'.format( - self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) - time_var.standard_name = "time" + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' time_var.calendar = 'standard' - time_var.long_name = "time" + time_var.long_name = 'time' if self._time_bnds is not None: time_var.bounds = 'time_bnds' if self.size > 1: @@ -1524,41 +1609,42 @@ class Nes(object): lev[:] = self._lev['data'] # LATITUDES - lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" + lat = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' if self.size > 1: - lats.set_collective(True) - lats[:] = self._lat['data'] + lat.set_collective(True) + lat[:] = self._lat['data'] # LONGITUDES - lons = netcdf.createVariable('lon', np.float64, self._lon_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" + lon = netcdf.createVariable('lon', np.float64, self._lon_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' if self.size > 1: - lons.set_collective(True) - lons[:] = self._lon['data'] + lon.set_collective(True) + lon[:] = self._lon['data'] return None def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ + for i, (var_name, var_dict) in enumerate(self.variables.items()): if var_dict['data'] is not None: - if self.print_info: + if self.info: print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) try: if not chunking: @@ -1574,18 +1660,18 @@ class Nes(object): chunk_size = self.comm.bcast(chunk_size, root=0) var = netcdf.createVariable(var_name, var_dict['data'].dtype, ('time', 'lev',) + self._var_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl, chunksizes=chunk_size) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} created ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) for att_name, att_value in var_dict.items(): if att_name == 'data': - if self.print_info: + if self.info: print("Rank {0:03d}: Filling {1})".format(self.rank, var_name)) try: var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], @@ -1607,18 +1693,18 @@ class Nes(object): self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: @@ -1626,66 +1712,73 @@ class Nes(object): msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) warnings.warn(msg) - def _create_centroids(self): + def _create_centre_coordinates(self): """ - Must be implemented on inner class + Must be implemented on inner class. """ + return None def _create_metadata(self, netcdf): """ - Must be implemented on inner class + Must be implemented on inner class. """ + return None def _set_crs(self, netcdf): """ - Must be implemented on inner class + Must be implemented on inner class. Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ + return None @staticmethod def _set_var_crs(var): """ - Must be implemented on inner class + Must be implemented on inner class. Parameters ---------- var : Variable netCDF4-python variable object. """ + return None def __to_netcdf_py(self, path, chunking=False): """ - Create the NetCDF using netcdf4-python methods + Create the NetCDF using netcdf4-python methods. Parameters ---------- path : str Path to the output netCDF file. chunking: bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ + # Open NetCDF - if self.print_info: + if self.info: print("Rank {0:03d}: Creating {1}".format(self.rank, path)) if self.size > 1: netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=True, comm=self.comm, info=MPI.Info()) else: netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=False) - if self.print_info: + if self.info: print("Rank {0:03d}: NetCDF ready to write".format(self.rank)) - # Create Dimensions + + # Create dimensions self._create_dimensions(netcdf) + # Create dimension variables self._create_dimension_variables(netcdf) - if self.print_info: + if self.info: print("Rank {0:03d}: Dimensions done".format(self.rank)) # Create variables @@ -1704,25 +1797,27 @@ class Nes(object): return None - def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + def to_netcdf(self, path, compression_level=0, serial=False, info=False, + chunking=False): """ - Write the netCDF output file + Write the netCDF output file. Parameters ---------- path : str - Path to the output netCDF file + Path to the output netCDF file. compression_level : int - Level of compression (0 to 9) Default: 0 (no compression) + Level of compression (0 to 9) Default: 0 (no compression). serial : bool - Indicates if you want to write in serial or not. Default: False + Indicates if you want to write in serial or not. Default: False. info : bool - Indicates if you want to print the information of each writing step by stdout Default: False + Indicates if you want to print the information of each writing step by stdout Default: False. chunking : bool - Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. """ - old_info = self.print_info - self.print_info = info + + old_info = self.info + self.info = info self.zip_lvl = compression_level if self.is_xarray: @@ -1740,7 +1835,7 @@ class Nes(object): else: self.__to_netcdf_py(path, chunking=chunking) - self.print_info = old_info + self.info = old_info return None @@ -1753,12 +1848,13 @@ class Nes(object): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ + from eccodes import codes_grib_new_from_file from eccodes import codes_keys_iterator_new from eccodes import codes_keys_iterator_next @@ -1829,6 +1925,7 @@ class Nes(object): codes_release(gid) fout.close() fin.close() + return None def to_grib2(self, path, grib_keys, grib_template_path, info=False): @@ -1840,9 +1937,9 @@ class Nes(object): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ @@ -1868,6 +1965,7 @@ class Nes(object): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name in data_list.keys(): try: @@ -1916,8 +2014,8 @@ class Nes(object): else: data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -1936,9 +2034,10 @@ class Nes(object): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name in data_list.keys(): - if self.print_info and self.master: + if self.info and self.master: print("Gathering {0}".format(var_name)) shp_len = len(data_list[var_name]['data'].shape) try: @@ -2000,8 +2099,8 @@ class Nes(object): else: data_list[var_name]['data'] = np.concatenate(recvbuf, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -2014,44 +2113,39 @@ class Nes(object): # ================================================================================================================== # Extra Methods # ================================================================================================================== + def add_4d_vertical_info(self, info_to_add): """ To add the vertical information from other source. Parameters ---------- - self : nes.Nes - info_to_add : nes.Nes, str Nes object with the vertical information as variable or str with the path to the NetCDF file that contains the vertical data. """ + return vertical_interpolation.add_4d_vertical_info(self, info_to_add) def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): """ - Vertical interpolation method + Vertical interpolation method. Parameters ---------- self : Nes - Source Nes object - + Source Nes object. new_levels : list - List of new vertical levels - + List of new vertical levels. new_src_vertical - kind : str Vertical interpolation type. - extrapolate : None, tuple, str - Extrapolate method (for non linear operations) - + Extrapolate method (for non linear operations). info: None, bool - Indicates if you want to print extra information - + Indicates if you want to print extra information. """ + return vertical_interpolation.interpolate_vertical( self, new_levels, new_src_vertical=new_src_vertical, kind=kind, extrapolate=extrapolate, info=info) @@ -2063,15 +2157,16 @@ class Nes(object): Parameters ---------- dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. choices = ['NearestNeighbour']. n_neighbours: int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. info: bool Indicates if you want to print extra info during the interpolation process. """ + return horizontal_interpolation.interpolate_horizontal( self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info) diff --git a/nes/nc_projections/latlon_nes.py b/nes/nc_projections/latlon_nes.py index b24796e66c648dbd8878bd73f6da69a86e5325a7..c31b06d89a6945ce1c59c9301c437ee4912135f7 100644 --- a/nes/nc_projections/latlon_nes.py +++ b/nes/nc_projections/latlon_nes.py @@ -23,23 +23,23 @@ class LatLonNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ - Initialize the LatLonNes class + Initialize the LatLonNes class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -69,63 +69,121 @@ class LatLonNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ new = LatLonNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) return new - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. - Parameters + Returns ---------- - netcdf : Dataset - NetCDF object. + centre_lat : dict + Dictionary with data of centre latitudes in 1D + centre_lon : dict + Dictionary with data of centre longitudes in 1D """ - # Calculate center latitudes + # Calculate centre latitudes lat_c_orig = kwargs['lat_orig'] + (kwargs['inc_lat'] / 2) - self.center_lats = np.linspace( + centre_lat_data = np.linspace( lat_c_orig, lat_c_orig + (kwargs['inc_lat'] * (kwargs['n_lat'] - 1)), kwargs['n_lat']) + centre_lat = {'data': centre_lat_data} - # Calculate center longitudes + # Calculate centre longitudes lon_c_orig = kwargs['lon_orig'] + (kwargs['inc_lon'] / 2) - self.center_lons = np.linspace( + centre_lon_data = np.linspace( lon_c_orig, lon_c_orig + (kwargs['inc_lon'] * (kwargs['n_lon'] - 1)), kwargs['n_lon']) + centre_lon = {'data': centre_lon_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + model_centre_lon_data, model_centre_lat_data = np.meshgrid(self.lon['data'], self.lat['data']) + + # Calculate centre latitudes + model_centre_lat = {'data': model_centre_lat_data} + + # Calculate centre longitudes + model_centre_lon = {'data': model_centre_lon_data} + + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_lon = np.abs(np.mean(np.diff(self.lon['data']))) + inc_lat = np.abs(np.mean(np.diff(self.lat['data']))) + + # Get bounds + lat_bounds = self.create_bounds(self.lat['data'], inc_lat) + lon_bounds = self.create_bounds(self.lon['data'], inc_lon) - return {'data': self.center_lats}, {'data': self.center_lons} + # Get latitudes for grid edge + left_edge_lat = np.append(lat_bounds.flatten()[::2], lat_bounds.flatten()[-1]) + right_edge_lat = np.flip(left_edge_lat, 0) + top_edge_lat = np.repeat(lat_bounds[-1][-1], len(self.lon['data']) - 1) + bottom_edge_lat = np.repeat(lat_bounds[0][0], len(self.lon['data'])) + lat_grid_edge = np.concatenate((left_edge_lat, top_edge_lat, right_edge_lat, bottom_edge_lat)) - def _create_bounds(self, **kwargs): + # Get longitudes for grid edge + left_edge_lon = np.repeat(lon_bounds[0][0], len(self.lat['data']) + 1) + top_edge_lon = lon_bounds.flatten()[1:-1:2] + right_edge_lon = np.repeat(lon_bounds[-1][-1], len(self.lat['data']) + 1) + bottom_edge_lon = np.flip(lon_bounds.flatten()[:-1:2], 0) + lon_grid_edge = np.concatenate((left_edge_lon, top_edge_lon, right_edge_lon, bottom_edge_lon)) - # This function is not being used - spatial_nv = 2 - boundary_lats = self.create_bounds(self.center_lats, kwargs['inc_lat'], spatial_nv) - boundary_lons = self.create_bounds(self.center_lons, kwargs['inc_lon'], spatial_nv) + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((lon_grid_edge, lat_grid_edge)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} - return boundary_lats, boundary_lons + return grid_edge_lat, grid_edge_lon @staticmethod def _set_var_crs(var): @@ -148,7 +206,7 @@ class LatLonNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('crs', 'c') @@ -168,9 +226,9 @@ class LatLonNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ diff --git a/nes/nc_projections/lcc_nes.py b/nes/nc_projections/lcc_nes.py index 6bf09f4c6de858bfeb89cfc430b876577edf9626..235d1be2a9f36d25c021e0cac2f468cadd35e949 100644 --- a/nes/nc_projections/lcc_nes.py +++ b/nes/nc_projections/lcc_nes.py @@ -38,18 +38,18 @@ class LCCNes(Nes): Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -90,27 +90,27 @@ class LCCNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ new = LCCNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, @@ -119,19 +119,19 @@ class LCCNes(Nes): def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- projection : dict - Dictionary with the projection data + Dictionary with the projection data. """ if create_nes: projection = {'data': None, 'dimensions': (), 'grid_mapping_name': 'lambert_conformal_conic', - 'standard_parallel': [kwargs['lat_2'], kwargs['lat_1']], + 'standard_parallel': [kwargs['lat_1'], kwargs['lat_2']], 'longitude_of_central_meridian': kwargs['lon_0'], 'latitude_of_projection_origin': kwargs['lat_0'], } @@ -172,33 +172,33 @@ class LCCNes(Nes): # LCC Y COORDINATES y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) - y.long_name = "y coordinate of projection" + y.long_name = 'y coordinate of projection' if 'units' in self._y.keys(): y.units = Units(self._y['units'], formatted=True).units else: y.units = 'm' - y.standard_name = "projection_y_coordinate" + y.standard_name = 'projection_y_coordinate' if self.size > 1: y.set_collective(True) y[:] = self._y['data'] # LCC X COORDINATES x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) - x.long_name = "x coordinate of projection" + x.long_name = 'x coordinate of projection' if 'units' in self._x.keys(): x.units = Units(self._x['units'], formatted=True).units else: x.units = 'm' - x.standard_name = "projection_x_coordinate" + x.standard_name = 'projection_x_coordinate' if self.size > 1: x.set_collective(True) x[:] = self._x['data'] return None - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. Parameters ---------- @@ -223,7 +223,7 @@ class LCCNes(Nes): projection = Proj( proj='lcc', ellps='WGS84', - R=6370000.00, + R=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) lat_1=kwargs['lat_1'], lat_2=kwargs['lat_2'], lon_0=kwargs['lon_0'], @@ -231,13 +231,94 @@ class LCCNes(Nes): to_meter=1, x_0=0, y_0=0, - a=6370000.00, - k_0=1.0) + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + k_0=1.0 + ) - # Calculate center latitudes and longitudes (UTM to LCC) - self.center_lons, self.center_lats = projection(x, y, inverse=True) + # Calculate centre latitudes and longitudes (UTM to LCC) + centre_lon_data, centre_lat_data = projection(x, y, inverse=True) + centre_lat = {'data': centre_lat_data} + centre_lon = {'data': centre_lon_data} - return {'data': self.center_lats}, {'data': self.center_lons} + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + # Get centre latitudes + model_centre_lat = self.lat + + # Get centre longitudes + model_centre_lon = self.lon + + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_x = np.abs(np.mean(np.diff(self.x['data']))) + inc_y = np.abs(np.mean(np.diff(self.y['data']))) + + # Get bounds for rotated coordinates + y_bounds = self.create_bounds(self.y['data'], inc_y) + x_bounds = self.create_bounds(self.x['data'], inc_x) + + # Get rotated latitudes for grid edge + left_edge_y = np.append(y_bounds.flatten()[::2], y_bounds.flatten()[-1]) + right_edge_y = np.flip(left_edge_y, 0) + top_edge_y = np.repeat(y_bounds[-1][-1], len(self.x['data']) - 1) + bottom_edge_y = np.repeat(y_bounds[0][0], len(self.x['data'])) + y_grid_edge = np.concatenate((left_edge_y, top_edge_y, right_edge_y, bottom_edge_y)) + + # Get rotated longitudes for grid edge + left_edge_x = np.repeat(x_bounds[0][0], len(self.y['data']) + 1) + top_edge_x = x_bounds.flatten()[1:-1:2] + right_edge_x = np.repeat(x_bounds[-1][-1], len(self.y['data']) + 1) + bottom_edge_x = np.flip(x_bounds.flatten()[:-1:2], 0) + x_grid_edge = np.concatenate((left_edge_x, top_edge_x, right_edge_x, bottom_edge_x)) + + # Get edges for regular coordinates + projection = Proj( + proj='lcc', + ellps='WGS84', + R=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_1=float(self.projection_data['standard_parallel'].split(', ')[0]), + lat_2=float(self.projection_data['standard_parallel'].split(', ')[1]), + lon_0=float(self.projection_data['longitude_of_central_meridian']), + lat_0=float(self.projection_data['latitude_of_projection_origin']), + to_meter=1, + x_0=0, + y_0=0, + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + k_0=1.0 + ) + grid_edge_lon_data, grid_edge_lat_data = projection(x_grid_edge, y_grid_edge, inverse=True) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon @staticmethod def _set_var_crs(var): @@ -280,9 +361,9 @@ class LCCNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ diff --git a/nes/nc_projections/mercator_nes.py b/nes/nc_projections/mercator_nes.py index 37dd6fdb166091204a09b2799a4e33508c4805ec..a6e640c3d8506f9a0feb39403e4b52ba958d27f4 100644 --- a/nes/nc_projections/mercator_nes.py +++ b/nes/nc_projections/mercator_nes.py @@ -38,18 +38,18 @@ class MercatorNes(Nes): Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -91,27 +91,27 @@ class MercatorNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ new = MercatorNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, @@ -120,19 +120,19 @@ class MercatorNes(Nes): def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- projection : dict - Dictionary with the projection data + Dictionary with the projection data. """ if create_nes: projection = {'data': None, 'dimensions': (), 'grid_mapping_name': 'mercator', - 'standard_parallel': [kwargs['lat_ts']], # TODO: Check if True + 'standard_parallel': [kwargs['lat_ts']], # TODO: Check if True 'longitude_of_projection_origin': kwargs['lon_0'], } @@ -172,33 +172,33 @@ class MercatorNes(Nes): # MERCATOR Y COORDINATES y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) - y.long_name = "y coordinate of projection" + y.long_name = 'y coordinate of projection' if 'units' in self._y.keys(): y.units = Units(self._y['units'], formatted=True).units else: y.units = 'm' - y.standard_name = "projection_y_coordinate" + y.standard_name = 'projection_y_coordinate' if self.size > 1: y.set_collective(True) y[:] = self._y['data'] # MERCATOR X COORDINATES x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) - x.long_name = "x coordinate of projection" + x.long_name = 'x coordinate of projection' if 'units' in self._x.keys(): x.units = Units(self._x['units'], formatted=True).units else: x.units = 'm' - x.standard_name = "projection_x_coordinate" + x.standard_name = 'projection_x_coordinate' if self.size > 1: x.set_collective(True) x[:] = self._x['data'] return None - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. Parameters ---------- @@ -224,16 +224,89 @@ class MercatorNes(Nes): projection = Proj( proj='merc', - a=6370000.00, - b=6370000.00, + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + b=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) lat_ts=kwargs['lat_ts'], lon_0=kwargs['lon_0'], ) - # Calculate center latitudes and longitudes (UTM to Mercator) - self.center_lons, self.center_lats = projection(x, y, inverse=True) + # Calculate centre latitudes and longitudes (UTM to Mercator) + centre_lon_data, centre_lat_data = projection(x, y, inverse=True) + centre_lat = {'data': centre_lat_data} + centre_lon = {'data': centre_lon_data} - return {'data': self.center_lats}, {'data': self.center_lons} + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + # Get centre latitudes + model_centre_lat = self.lat + + # Get centre longitudes + model_centre_lon = self.lon + + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_x = np.abs(np.mean(np.diff(self.x['data']))) + inc_y = np.abs(np.mean(np.diff(self.y['data']))) + + # Get bounds for rotated coordinates + y_bounds = self.create_bounds(self.y['data'], inc_y) + x_bounds = self.create_bounds(self.x['data'], inc_x) + + # Get rotated latitudes for grid edge + left_edge_y = np.append(y_bounds.flatten()[::2], y_bounds.flatten()[-1]) + right_edge_y = np.flip(left_edge_y, 0) + top_edge_y = np.repeat(y_bounds[-1][-1], len(self.x['data']) - 1) + bottom_edge_y = np.repeat(y_bounds[0][0], len(self.x['data'])) + y_grid_edge = np.concatenate((left_edge_y, top_edge_y, right_edge_y, bottom_edge_y)) + + # Get rotated longitudes for grid edge + left_edge_x = np.repeat(x_bounds[0][0], len(self.y['data']) + 1) + top_edge_x = x_bounds.flatten()[1:-1:2] + right_edge_x = np.repeat(x_bounds[-1][-1], len(self.y['data']) + 1) + bottom_edge_x = np.flip(x_bounds.flatten()[:-1:2], 0) + x_grid_edge = np.concatenate((left_edge_x, top_edge_x, right_edge_x, bottom_edge_x)) + + # Get edges for regular coordinates + projection = Proj( + proj='merc', + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + b=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_ts=float(self.projection_data['standard_parallel']), + lon_0=float(self.projection_data['longitude_of_central_meridian']), + ) + grid_edge_lon_data, grid_edge_lat_data = projection(x_grid_edge, y_grid_edge, inverse=True) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon @staticmethod def _set_var_crs(var): @@ -256,7 +329,7 @@ class MercatorNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('mercator', 'c') @@ -275,9 +348,9 @@ class MercatorNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ diff --git a/nes/nc_projections/points_nes.py b/nes/nc_projections/points_nes.py index 3dd99455534488d692299f711afba6fcedb68d9e..5b81e1c8c016eb692baada60f30a13a2c3c58b9e 100644 --- a/nes/nc_projections/points_nes.py +++ b/nes/nc_projections/points_nes.py @@ -2,12 +2,10 @@ import sys import warnings -from copy import deepcopy - import numpy as np -from netCDF4 import Dataset, date2num, stringtochar +from copy import deepcopy +from netCDF4 import date2num, stringtochar from numpy.ma.core import MaskError - from .default_nes import Nes @@ -33,23 +31,25 @@ class PointsNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, times=None, strlen=75, **kwargs): """ - Initialize the PointsNes class + Initialize the PointsNes class. Parameters ---------- comm: MPI.Communicator - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset, None - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'T'] + Indicates the parallelization method that you want. Default: 'X'. + accepted values: ['X', 'T']. + strlen: int + Maximum length of strings in NetCDF. Default: 75. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -63,20 +63,20 @@ class PointsNes(Nes): times=times, **kwargs) if create_nes: - # Complete dimensions - self._station = {'data': np.arange(len(self._lon['data']))} - # Dimensions screening self.lat = self._get_coordinate_values(self._lat, 'X') self.lon = self._get_coordinate_values(self._lon, 'X') - self.station = deepcopy(self._station) self.strlen = strlen else: - self._station = self._get_coordinate_dimension(['station']) - self.station = self._get_coordinate_values(self._station, 'X') - + # Dimensions screening self.strlen = self._get_strlen() + # Complete dimensions + self._station = {'data': np.arange(len(self._lon['data']))} + + # Dimensions screening + self.station = self._get_coordinate_values(self._station, 'X') + # Set axis limits for parallel writing self.write_axis_limits = self.get_write_axis_limits() @@ -88,27 +88,27 @@ class PointsNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'T'] + Indicates the parallelization method that you want. Default: 'X'. + accepted values: ['X', 'T']. """ new = PointsNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, @@ -125,6 +125,7 @@ class PointsNes(Nes): NetCDF object. """ + # Create time dimension netcdf.createDimension('time', None) if self._time_bnds is not None: netcdf.createDimension('time_nv', 2) @@ -132,6 +133,7 @@ class PointsNes(Nes): # The number of longitudes is equal to the number of stations netcdf.createDimension('station', len(self._lon['data'])) + # Create string length dimension if hasattr(self, 'strlen'): if self.strlen is not None: netcdf.createDimension('strlen', self.strlen) @@ -151,10 +153,10 @@ class PointsNes(Nes): # TIMES time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) time_var.units = 'hours since {0}'.format( - self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) - time_var.standard_name = "time" + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' time_var.calendar = 'standard' - time_var.long_name = "time" + time_var.long_name = 'time' if self._time_bnds is not None: time_var.bounds = 'time_bnds' if self.size > 1: @@ -166,64 +168,60 @@ class PointsNes(Nes): # TIME BOUNDS if self._time_bnds is not None: time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, - complevel=self.zip_lvl) + complevel=self.zip_lvl) if self.size > 1: time_bnds_var.set_collective(True) time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') # STATIONS - stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - stations.units = "" - stations.axis = "X" - stations.long_name = "" - stations.standard_name = "station" + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, + complevel=self.zip_lvl) + stations.units = '' + stations.axis = 'X' + stations.long_name = '' + stations.standard_name = 'station' if self.size > 1: stations.set_collective(True) stations[:] = self._station['data'] - return None - - def _get_coordinate_dimension(self, possible_names): - """ - Read the coordinate dimension data. - - This will read the complete data of the coordinate - - Parameters - ---------- - possible_names: list, str - List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) - - Returns - ------- - nc_var : dict - Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. - """ - - nc_var = super(PointsNes, self)._get_coordinate_dimension(possible_names) - - if isinstance(possible_names, str): - possible_names = [possible_names] + # LATITUDES + lat = netcdf.createVariable('lat', np.float64, self._lat_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' + if self.size > 1: + lat.set_collective(True) + lat[:] = self._lat['data'] - if 'station' in possible_names: - nc_var['data'] = np.arange(len(self._lon['data'])) + # LONGITUDES + lon = netcdf.createVariable('lon', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' + if self.size > 1: + lon.set_collective(True) + lon[:] = self._lon['data'] - return nc_var + return None def _get_coordinate_values(self, coordinate_info, coordinate_axis): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['X'] + Name of the coordinate to extract. Accepted values: ['X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ values = deepcopy(coordinate_info) @@ -243,50 +241,6 @@ class PointsNes(Nes): return values - def _get_lazy_variables_not_used(self): - """ - Get all the variables information. - - Returns - ------- - variables : dict - Dictionary with the variable name as key and another dictionary as value. - De value dictionary will have the 'data' key with None as value and all the variable attributes as the - other keys. - e.g. - {'var_name_1': {'data': None, 'attr_1': value_1_1, 'attr_2': value_1_2, ...}, - 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, - ...} - """ - - if self.is_xarray: - variables = self.dataset.variables - else: - if self.master: - variables = {} - for var_name, var_info in self.netcdf.variables.items(): - variables[var_name] = {} - variables[var_name]['data'] = None - # Remove strlen as a dimension - if 'strlen' in var_info.dimensions: - variables[var_name]['dimensions'] = tuple([dim for dim in var_info.dimensions - if dim != 'strlen']) - else: - variables[var_name]['dimensions'] = var_info.dimensions - - for attrname in var_info.ncattrs(): - # Avoiding some attributes - if attrname not in ['missing_value', '_FillValue']: - value = getattr(var_info, attrname) - if value in ['unitless', '-']: - value = '' - variables[var_name][attrname] = value - else: - variables = None - variables = self.comm.bcast(variables, root=0) - - return variables - def _get_strlen(self): """ Read the string length dimension of some variables. @@ -294,7 +248,7 @@ class PointsNes(Nes): Returns ------- int, None - String length. None means no string data + String length. None means no string data. """ if 'strlen' in self.netcdf.dimensions: @@ -311,7 +265,7 @@ class PointsNes(Nes): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- @@ -333,7 +287,7 @@ class PointsNes(Nes): data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] else: - raise NotImplementedError('Error with {0}. Only can be read netCDF with 2 dimensions or less'.format( + raise NotImplementedError("Error with {0}. Only can be read netCDF with 2 dimensions or less".format( var_name)) # Missing to nan @@ -346,24 +300,43 @@ class PointsNes(Nes): def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ if self.variables is not None: for i, (var_name, var_dict) in enumerate(self.variables.items()): - if var_dict['data'] is not None: - + + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + if var_dtype != var_dict['data'].dtype: + msg = "WARNING!!! " + msg += "Different data types for variable {0}".format(var_name) + msg += "Input dtype={0}, data dtype={1}".format(var_dtype, + var_dict['data'].dtype) + warnings.warn(msg) + try: + var_dict['data'] = var_dict['data'].astype(var_dtype) + except Exception as e: # TODO: Detect exception + raise e("It was not possible to cast the data to the input dtype.") + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + # Get dimensions when reading datasets if 'dimensions' in var_dict.keys(): - # Get dimensions var_dims = var_dict['dimensions'] # Get dimensions when creating new datasets else: @@ -373,18 +346,19 @@ class PointsNes(Nes): else: # For data that is dependent on time and station (e.g. PM10) var_dims = ('time',) + self._var_dim - - if var_dict['data'].dtype == np.str: - # Add strlen as a dimension if needed - var_dims += ('strlen',) - # Convert list of strings to chars + # Convert list of strings to chars for parallelization try: unicode_type = len(max(var_dict['data'], key=len)) if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl, chunksizes=chunk_size) - - if self.print_info: - print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + + if self.info: + print('Rank {0:03d}: Var {1} created ({2}/{3})'.format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: - print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + if self.info: + print('Rank {0:03d}: Var {1} collective ({2}/{3})'.format( self.rank, var_name, i + 1, len(self.variables))) - + for att_name, att_value in var_dict.items(): if att_name == 'data': if len(att_value.shape) == 1: @@ -457,7 +410,7 @@ class PointsNes(Nes): var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) elif len(att_value.shape) == 2: - if 'strlen' in var_dict['dimensions']: + if 'strlen' in var_dims: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value except IndexError: @@ -482,23 +435,23 @@ class PointsNes(Nes): var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) - if self.print_info: - print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + if self.info: + print('Rank {0:03d}: Var {1} data ({2}/{3})'.format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions', 'dtype']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: - print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + if self.info: + print('Rank {0:03d}: Var {1} completed ({2}/{3})'.format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: - msg = 'WARNING!!! ' - msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + msg = "WARNING!!! " + msg += "Variable {0} was not loaded. It will not be written.".format(var_name) warnings.warn(msg) def _gather_data(self): @@ -530,8 +483,8 @@ class PointsNes(Nes): # dimensions = (time, station) axis = 1 else: - msg = 'The points NetCDF must have ' - msg += 'surface values (without levels).' + msg = "The points NetCDF must have " + msg += "surface values (without levels)." raise NotImplementedError(msg) elif self.parallel_method == 'T': # concatenate over time @@ -548,15 +501,15 @@ class PointsNes(Nes): # dimensions = (time, station) axis = 0 else: - raise NotImplementedError('The points NetCDF must only have surface values (without levels).') + raise NotImplementedError("The points NetCDF must only have surface values (without levels).") else: raise NotImplementedError( "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( meth=self.parallel_method, accept=['X', 'T'])) data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -566,9 +519,9 @@ class PointsNes(Nes): return data_list - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from points. + Calculate centre latitudes and longitudes from points. Parameters ---------- @@ -576,13 +529,15 @@ class PointsNes(Nes): NetCDF object. """ - # Calculate center latitudes - self.center_lats = kwargs['lat'] + # Calculate centre latitudes + centre_lat_data = kwargs['lat'] + centre_lat = {'data': centre_lat_data} - # Calculate center longitudes - self.center_lons = kwargs['lon'] + # Calculate centre longitudes + centre_lon_data = kwargs['lon'] + centre_lon = {'data': centre_lon_data} - return {'data': self.center_lats}, {'data': self.center_lons} + return centre_lat, centre_lon def _create_metadata(self, netcdf): """ @@ -594,27 +549,6 @@ class PointsNes(Nes): NetCDF object. """ - # LATITUDES - lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if self.size > 1: - lats.set_collective(True) - lats[:] = self._lat['data'] - - # LONGITUDES - lons = netcdf.createVariable('lon', np.float64, self._lon_dim, - zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if self.size > 1: - lons.set_collective(True) - lons[:] = self._lon['data'] - return None def to_grib2(self, path, grib_keys, grib_template_path, info=False): @@ -626,10 +560,43 @@ class PointsNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ raise NotImplementedError("Grib2 format cannot be write with point data.") + + def to_providentia(self, model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat): + """ + Transform a PointsNes into a PointsNesProvidentia object + + Returns + ---------- + points_nes_providentia : nes.Nes + Points Nes Providentia Object + """ + + from .points_nes_providentia import PointsNesProvidentia + + points_nes_providentia = PointsNesProvidentia(comm=self.comm, + info=self.info, + dataset=self.netcdf, + balanced=self.balanced, + parallel_method=self.parallel_method, + avoid_first_hours=self.hours_start, + avoid_last_hours=self.hours_end, + first_level=self.first_level, + last_level=self.last_level, + create_nes=True, + times=self.time, + model_centre_lon=model_centre_lon, + model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, + grid_edge_lat=grid_edge_lat, + lat=self.lat['data'], + lon=self.lon['data'] + ) + + return points_nes_providentia \ No newline at end of file diff --git a/nes/nc_projections/points_nes_ghost.py b/nes/nc_projections/points_nes_ghost.py index 9b8ab4c75d4af6660b6fcc2eaf2dcf40b95c7f39..84ceb1fd7c1a69e9800ffa5b156facb2698288f2 100644 --- a/nes/nc_projections/points_nes_ghost.py +++ b/nes/nc_projections/points_nes_ghost.py @@ -4,6 +4,7 @@ import sys import warnings import numpy as np from numpy.ma.core import MaskError +from netCDF4 import stringtochar, date2num from copy import deepcopy from .points_nes import PointsNes @@ -13,51 +14,56 @@ class PointsNesGHOST(PointsNes): Attributes ---------- - _qa : tuple - Tuple with the name of the dimensions of the quality assurance (qa) flag values. - ('qa',) for a points grid. - _flag : tuple - Tuple with the name of the dimensions of the data flag values. - ('flag',) for a points grid. + _qa : dict + Quality flags (GHOST checks) dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _flag : dict + Data flags (given by data provider) dictionary with the complete 'data' key for all the values and the rest of + the attributes. + _qa : dict + Quality flags (GHOST checks) dictionary with the portion of 'data' corresponding to the rank values. + _flag : dict + Data flags (given by data provider) dictionary with the portion of 'data' corresponding to the rank values. """ def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, times=None, **kwargs): """ - Initialize the PointsNesGHOST class + Initialize the PointsNesGHOST class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X'] + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. times : list, None List of times to substitute the current ones while creation. kwargs : - Projection dependent parameters to create it from scratch + Projection dependent parameters to create it from scratch. """ super(PointsNesGHOST, self).__init__(comm=comm, path=path, info=info, dataset=dataset, @@ -65,50 +71,53 @@ class PointsNesGHOST(PointsNes): avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=create_nes, times=times, **kwargs) - + + # Complete dimensions self._flag = self._get_coordinate_dimension(['flag']) - self.flag = self._get_coordinate_values(self._flag, 'X') - self._qa = self._get_coordinate_dimension(['qa']) + + # Dimensions screening + self.flag = self._get_coordinate_values(self._flag, 'X') self.qa = self._get_coordinate_values(self._qa, 'X') @staticmethod def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the PointsNesGHOST class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X'] + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file.:q + Indicates if you want to create the object from scratch (True) or through an existing file. """ new = PointsNesGHOST(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, @@ -143,31 +152,61 @@ class PointsNesGHOST(PointsNes): NetCDF object. """ - super(PointsNesGHOST, self)._create_dimension_variables(netcdf) + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + time_var.units = 'hours since {0}'.format( + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' + time_var.calendar = 'standard' + time_var.long_name = 'time' + if self._time_bnds is not None: + time_var.bounds = 'time_bnds' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + # TIME BOUNDS + if self._time_bnds is not None: + time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, + complevel=self.zip_lvl) + if self.size > 1: + time_bnds_var.set_collective(True) + time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') + + # STATIONS + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, + complevel=self.zip_lvl) + stations.units = '' + stations.axis = 'X' + stations.long_name = '' + stations.standard_name = 'station' + if self.size > 1: + stations.set_collective(True) + stations[:] = self._station['data'] - # N FLAG CODES - flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + # LATITUDES + lat = netcdf.createVariable('latitude', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - flag.units = "" - flag.axis = "" - flag.long_name = "" - flag.standard_name = "flag" + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' if self.size > 1: - flag.set_collective(True) - flag[:] = self._flag['data'] + lat.set_collective(True) + lat[:] = self._lat['data'] - # N QA CODES - qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), - zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - qa.units = "" - qa.axis = "" - qa.long_name = "" - qa.standard_name = "N_qa_codes" + # LONGITUDES + lon = netcdf.createVariable('longitude', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' if self.size > 1: - qa.set_collective(True) - qa[:] = self._qa['data'] - - self.free_vars(('flag', 'qa')) + lon.set_collective(True) + lon[:] = self._lon['data'] def erase_flags(self): first_time_idx = self.get_time_id(self.hours_start, first=True) @@ -180,18 +219,18 @@ class PointsNesGHOST(PointsNes): def _get_coordinate_values(self, coordinate_info, coordinate_axis): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['X'] + Name of the coordinate to extract. Accepted values: ['X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ values = deepcopy(coordinate_info) @@ -221,7 +260,7 @@ class PointsNesGHOST(PointsNes): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- @@ -232,13 +271,7 @@ class PointsNesGHOST(PointsNes): nc_var = self.netcdf.variables[var_name] var_dims = nc_var.dimensions - # Remove strlen (maximum number of characters that a string can have) from dimensions and join characters - if 'strlen' in nc_var.dimensions: - nc_var = np.array([''.join(i) for i in np.char.decode(nc_var[:].data)]) - var_dims = tuple([', '.join(dim for dim in var_dims if dim != 'strlen')]) - # Read data in 1 or 2 dimensions - # TODO: Ask Dene why x, t instead of t, x if len(var_dims) < 2: data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] elif len(var_dims) == 2: @@ -262,41 +295,57 @@ class PointsNesGHOST(PointsNes): def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ if self.variables is not None: for i, (var_name, var_dict) in enumerate(self.variables.items()): if var_dict['data'] is not None: - # Define dimensions depending on the type of variable + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + else: + var_dtype = var_dict['data'].dtype + + # Get dimensions if len(var_dict['data'].shape) == 1: # Metadata var_dims = self._var_dim elif len(var_dict['data'].shape) == 2: # Different from metadata (e.g. concentrations of pm10) var_dims = self._var_dim + ('time',) - else: - # Flags and qa variables - if var_name == 'flag': - var_dims = self._var_dim + ('time', 'N_flag_codes',) - elif var_name == 'qa': - var_dims = self._var_dim + ('time', 'N_qa_codes',) - - # ESDAC iwahashi landform and other vars are given as objects, transform to strings - if var_dict['data'].dtype == np.dtype(object): - var_dtype = np.dtype(str) - else: - var_dtype = var_dict['data'].dtype - if self.print_info: + # Convert list of strings to chars for parallelization + try: + unicode_type = len(max(var_dict['data'], key=len)) + if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl, chunksizes=chunk_size) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} created ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) for att_name, att_value in var_dict.items(): if att_name == 'data': - print(att_value) - print(att_value.shape) if len(att_value.shape) == 1: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value @@ -338,19 +385,31 @@ class PointsNesGHOST(PointsNes): var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) elif len(att_value.shape) == 2: - try: - var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value - except IndexError: - raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( - var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, - att_value.shape)) - except ValueError: - raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + if 'strlen' in var_dims: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) elif len(att_value.shape) == 3: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], @@ -369,53 +428,25 @@ class PointsNesGHOST(PointsNes): :].shape, att_value.shape)) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: msg = 'WARNING!!! ' msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) warnings.warn(msg) - - def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): - """ - Write the netCDF output file - - Parameters - ---------- - path : str - Path to the output netCDF file - compression_level : int - Level of compression (0 to 9) Default: 0 (no compression) - serial : bool - Indicates if you want to write in serial or not. Default: False - info : bool - Indicates if you want to print the information of each writing step by stdout Default: False - chunking : bool - Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False - """ - - if not serial: - msg = 'WARNING!!! ' - msg += 'GHOST datasets cannot be written in parallel yet.' - msg += 'Changing to serial mode.' - warnings.warn(msg) - super(PointsNesGHOST, self).to_netcdf(path, compression_level=compression_level, - serial=True, info=info, chunking=chunking) - - return None - + def _gather_data(self): """ Gather all the variable data into the MPI rank 0 to perform a serial write. @@ -469,8 +500,8 @@ class PointsNesGHOST(PointsNes): meth=self.parallel_method, accept=['X', 'T'])) data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -478,4 +509,231 @@ class PointsNesGHOST(PointsNes): self.comm.Abort(1) raise e - return data_list \ No newline at end of file + return data_list + + def _create_metadata(self, netcdf): + """ + Create metadata variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # N FLAG CODES + flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + flag.units = '' + flag.axis = '' + flag.long_name = '' + flag.standard_name = 'flag' + if self.size > 1: + flag.set_collective(True) + flag[:] = self._flag['data'] + + # N QA CODES + qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + qa.units = '' + qa.axis = '' + qa.long_name = '' + qa.standard_name = 'N_qa_codes' + if self.size > 1: + qa.set_collective(True) + qa[:] = self._qa['data'] + + return None + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file. + + Parameters + ---------- + path : str + Path to the output netCDF file. + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression). + serial : bool + Indicates if you want to write in serial or not. Default: False. + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False. + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. + """ + + if not serial: + msg = 'WARNING!!! ' + msg += 'GHOST datasets cannot be written in parallel yet. ' + msg += 'Changing to serial mode.' + warnings.warn(msg) + super(PointsNesGHOST, self).to_netcdf(path, compression_level=compression_level, + serial=True, info=info, chunking=chunking) + + return None + + def to_points(self): + """ + Transform a PointsNesGHOST into a PointsNes object + + Returns + ---------- + points_nes : nes.Nes + Points Nes Object (without GHOST metadata variables) + """ + + points_nes = PointsNes(comm=self.comm, + info=self.info, + balanced=self.balanced, + parallel_method=self.parallel_method, + avoid_first_hours=self.hours_start, + avoid_last_hours=self.hours_end, + first_level=self.first_level, + last_level=self.last_level, + create_nes=True, + lat=self.lat['data'], + lon=self.lon['data'], + times=self.time + ) + + points_nes.variables = deepcopy(self.variables) + GHOST_version = str(float(np.unique(points_nes.variables['GHOST_version']['data']))) + metadata_variables = self.get_standard_metadata(GHOST_version) + points_nes.free_vars(metadata_variables) + + return points_nes + + def get_standard_metadata(self, GHOST_version): + """ + Get all possible GHOST variables for each version. + + Parameters + ---------- + GHOST_version : str + Version of GHOST file. + + Returns + ---------- + metadata_variables[GHOST_version] : list + List of metadata variables for a certain GHOST version + """ + + # This metadata variables are + metadata_variables = {'1.4': ['GHOST_version', 'station_reference', 'station_timezone', 'latitude', 'longitude', + 'altitude', 'sampling_height', 'measurement_altitude', 'ellipsoid', + 'horizontal_datum', 'vertical_datum', 'projection', 'distance_to_building', + 'distance_to_kerb', 'distance_to_junction', 'distance_to_source', 'street_width', + 'street_type', 'daytime_traffic_speed', 'daily_passing_vehicles', 'data_level', + 'climatology', 'station_name', 'city', 'country', + 'administrative_country_division_1', 'administrative_country_division_2', + 'population', 'representative_radius', 'network', 'associated_networks', + 'area_classification', 'station_classification', 'main_emission_source', + 'land_use', 'terrain', 'measurement_scale', + 'ESDAC_Iwahashi_landform_classification', + 'ESDAC_modal_Iwahashi_landform_classification_5km', + 'ESDAC_modal_Iwahashi_landform_classification_25km', + 'ESDAC_Meybeck_landform_classification', + 'ESDAC_modal_Meybeck_landform_classification_5km', + 'ESDAC_modal_Meybeck_landform_classification_25km', + 'GHSL_settlement_model_classification', + 'GHSL_modal_settlement_model_classification_5km', + 'GHSL_modal_settlement_model_classification_25km', + 'Joly-Peuch_classification_code', 'Koppen-Geiger_classification', + 'Koppen-Geiger_modal_classification_5km', + 'Koppen-Geiger_modal_classification_25km', + 'MODIS_MCD12C1_v6_IGBP_land_use', 'MODIS_MCD12C1_v6_modal_IGBP_land_use_5km', + 'MODIS_MCD12C1_v6_modal_IGBP_land_use_25km', 'MODIS_MCD12C1_v6_UMD_land_use', + 'MODIS_MCD12C1_v6_modal_UMD_land_use_5km', + 'MODIS_MCD12C1_v6_modal_UMD_land_use_25km', 'MODIS_MCD12C1_v6_LAI', + 'MODIS_MCD12C1_v6_modal_LAI_5km', 'MODIS_MCD12C1_v6_modal_LAI_25km', + 'WMO_region', 'WWF_TEOW_terrestrial_ecoregion', 'WWF_TEOW_biogeographical_realm', + 'WWF_TEOW_biome', 'UMBC_anthrome_classification', + 'UMBC_modal_anthrome_classification_5km', + 'UMBC_modal_anthrome_classification_25km', + 'EDGAR_v4.3.2_annual_average_BC_emissions', + 'EDGAR_v4.3.2_annual_average_CO_emissions', + 'EDGAR_v4.3.2_annual_average_NH3_emissions', + 'EDGAR_v4.3.2_annual_average_NMVOC_emissions', + 'EDGAR_v4.3.2_annual_average_NOx_emissions', + 'EDGAR_v4.3.2_annual_average_OC_emissions', + 'EDGAR_v4.3.2_annual_average_PM10_emissions', + 'EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions', + 'EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions', + 'EDGAR_v4.3.2_annual_average_SO2_emissions', 'ASTER_v3_altitude', + 'ETOPO1_altitude', 'ETOPO1_max_altitude_difference_5km', + 'GHSL_built_up_area_density', 'GHSL_average_built_up_area_density_5km', + 'GHSL_average_built_up_area_density_25km', 'GHSL_max_built_up_area_density_5km', + 'GHSL_max_built_up_area_density_25km', 'GHSL_population_density', + 'GHSL_average_population_density_5km', 'GHSL_average_population_density_25km', + 'GHSL_max_population_density_5km', 'GHSL_max_population_density_25km', + 'GPW_population_density', 'GPW_average_population_density_5km', + 'GPW_average_population_density_25km', 'GPW_max_population_density_5km', + 'GPW_max_population_density_25km', + 'NOAA-DMSP-OLS_v4_nighttime_stable_lights', + 'NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km', + 'NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km', + 'NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km', + 'NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km', + 'OMI_level3_column_annual_average_NO2', + 'OMI_level3_column_cloud_screened_annual_average_NO2', + 'OMI_level3_tropospheric_column_annual_average_NO2', + 'OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2', + 'GSFC_coastline_proximity', 'primary_sampling_type', + 'primary_sampling_instrument_name', + 'primary_sampling_instrument_documented_flow_rate', + 'primary_sampling_instrument_reported_flow_rate', + 'primary_sampling_process_details', 'primary_sampling_instrument_manual_name', + 'primary_sampling_further_details', 'sample_preparation_types', + 'sample_preparation_techniques', 'sample_preparation_process_details', + 'sample_preparation_further_details', 'measurement_methodology', + 'measuring_instrument_name', 'measuring_instrument_sampling_type', + 'measuring_instrument_documented_flow_rate', + 'measuring_instrument_reported_flow_rate', 'measuring_instrument_process_details', + 'measuring_instrument_process_details', 'measuring_instrument_manual_name', + 'measuring_instrument_further_details', 'measuring_instrument_reported_units', + 'measuring_instrument_reported_lower_limit_of_detection', + 'measuring_instrument_documented_lower_limit_of_detection', + 'measuring_instrument_reported_upper_limit_of_detection', + 'measuring_instrument_documented_upper_limit_of_detection', + 'measuring_instrument_reported_uncertainty', + 'measuring_instrument_documented_uncertainty', + 'measuring_instrument_reported_accuracy', + 'measuring_instrument_documented_accuracy', + 'measuring_instrument_reported_precision', + 'measuring_instrument_documented_precision', + 'measuring_instrument_reported_zero_drift', + 'measuring_instrument_documented_zero_drift', + 'measuring_instrument_reported_span_drift', + 'measuring_instrument_documented_span_drift', + 'measuring_instrument_reported_zonal_drift', + 'measuring_instrument_documented_zonal_drift', + 'measuring_instrument_reported_measurement_resolution', + 'measuring_instrument_documented_measurement_resolution', + 'measuring_instrument_reported_absorption_cross_section', + 'measuring_instrument_documented_absorption_cross_section', + 'measuring_instrument_inlet_information', + 'measuring_instrument_calibration_scale', + 'network_provided_volume_standard_temperature', + 'network_provided_volume_standard_pressure', 'retrieval_algorithm', + 'principal_investigator_name', 'principal_investigator_institution', + 'principal_investigator_email_address', 'contact_name', + 'contact_institution', 'contact_email_address', 'meta_update_stamp', + 'data_download_stamp', 'data_revision_stamp', 'network_sampling_details', + 'network_uncertainty_details', 'network_maintenance_details', + 'network_qa_details', 'network_miscellaneous_details', 'data_licence', + 'process_warnings', 'temporal_resolution', + 'reported_lower_limit_of_detection_per_measurement', + 'reported_upper_limit_of_detection_per_measurement', + 'reported_uncertainty_per_measurement', 'derived_uncertainty_per_measurement', + 'day_night_code', 'weekday_weekend_code', 'season_code', + 'hourly_native_representativity_percent', 'hourly_native_max_gap_percent', + 'daily_native_representativity_percent', 'daily_representativity_percent', + 'daily_native_max_gap_percent', 'daily_max_gap_percent', + 'monthly_native_representativity_percent', 'monthly_representativity_percent', + 'monthly_native_max_gap_percent', 'monthly_max_gap_percent', + 'annual_native_representativity_percent', 'annual_native_max_gap_percent', + 'all_representativity_percent', 'all_max_gap_percent'], + } + + return metadata_variables[GHOST_version] diff --git a/nes/nc_projections/points_nes_providentia.py b/nes/nc_projections/points_nes_providentia.py new file mode 100644 index 0000000000000000000000000000000000000000..6d44b7f295a2b126ddff3c2beb3d7c5c542f57b5 --- /dev/null +++ b/nes/nc_projections/points_nes_providentia.py @@ -0,0 +1,592 @@ +#!/usr/bin/env python + +import sys +import warnings +import numpy as np +from copy import deepcopy +from numpy.ma.core import MaskError +from netCDF4 import stringtochar +from .points_nes import PointsNes + + +class PointsNesProvidentia(PointsNes): + """ + + Attributes + ---------- + _model_centre_lon : dict + Model centre longitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _model_centre_lat : dict + Model centre latitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _grid_edge_lon : dict + Grid edge longitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _grid_edge_lat : dict + Grid edge latitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + times=None, model_centre_lon=None, model_centre_lat=None, grid_edge_lon=None, grid_edge_lat=None, + **kwargs): + """ + Initialize the PointsNesProvidentia class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator. + path: str + Path to the NetCDF to initialize the object. + info: bool + Indicates if you want to get reading/writing info. + dataset: Dataset + NetCDF4-python Dataset to initialize the class. + xarray: bool: + (Not working) Indicates if you want to use xarray as default. + parallel_method : str + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + balanced : bool + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. + first_level : int + Index of the first level to use. + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if you want to create the object from scratch (True) or through an existing file. + times : list, None + List of times to substitute the current ones while creation. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + kwargs : + Projection dependent parameters to create it from scratch. + """ + + super(PointsNesProvidentia, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, + create_nes=create_nes, times=times, **kwargs) + + if create_nes: + # Complete dimensions + self._model_centre_lon = model_centre_lon + self._model_centre_lat = model_centre_lat + self._grid_edge_lon = grid_edge_lon + self._grid_edge_lat = grid_edge_lat + else: + # Complete dimensions + self._model_centre_lon = self._get_coordinate_dimension(['model_centre_longitude']) + self._model_centre_lat = self._get_coordinate_dimension(['model_centre_latitude']) + self._grid_edge_lon = self._get_coordinate_dimension(['grid_edge_longitude']) + self._grid_edge_lat = self._get_coordinate_dimension(['grid_edge_latitude']) + + # Dimensions screening + self.model_centre_lon = self._get_coordinate_values(self._model_centre_lon, '') + self.model_centre_lat = self._get_coordinate_values(self._model_centre_lat, '') + self.grid_edge_lon = self._get_coordinate_values(self._grid_edge_lon, '') + self.grid_edge_lat = self._get_coordinate_values(self._grid_edge_lat, '') + + # Set strlen to be None (avoid default strlen inherited from points) + self.strlen = None + + # Force NES to have a netcdf, despite create_nes=True to read the variables information. + # If we pass the variables with points_nes_providentia.variables = deepcopy(self.variables) inside points_nes.py + # we get wrong the data in the wrong coordinates, i.e. for concentrations in points NES the coordinates are + # (time, station), but for Providentia points NES they are (station, time) as in GHOST + self.netcdf = dataset + self.variables = self._get_lazy_variables() + self.free_vars(['latitude', 'longitude', 'time']) + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, + parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, + model_centre_lon=None, model_centre_lat=None, grid_edge_lon=None, grid_edge_lat=None): + """ + Initialize the PointsNesProvidentia class. + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator. + path: str + Path to the NetCDF to initialize the object. + info: bool + Indicates if you want to get reading/writing info. + dataset: Dataset + NetCDF4-python Dataset to initialize the class. + xarray: bool: + (Not working) Indicates if you want to use xarray as default. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if you want to create the object from scratch (True) or through an existing file. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + """ + new = PointsNesProvidentia(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, + model_centre_lon=model_centre_lon, model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, grid_edge_lat=grid_edge_lat) + return new + + def _create_dimensions(self, netcdf): + """ + Create the 'grid_edge', 'model_latitude', 'model_longitude' dimensions. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesProvidentia, self)._create_dimensions(netcdf) + + netcdf.createDimension('grid_edge', len(self._grid_edge_lon['data'])) + netcdf.createDimension('model_latitude', self._model_centre_lon['data'].shape[0]) + netcdf.createDimension('model_longitude', self._model_centre_lon['data'].shape[1]) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'model_centre_lon', model_centre_lat', 'grid_edge_lon' and 'grid_edge_lat' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesProvidentia, self)._create_dimension_variables(netcdf) + + # MODEL CENTRE LONGITUDES + model_centre_lon = netcdf.createVariable('model_centre_longitude', 'f8', + ('model_latitude', 'model_longitude',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + model_centre_lon.units = 'degrees_east' + model_centre_lon.axis = 'X' + model_centre_lon.long_name = 'model centre longitude' + model_centre_lon.standard_name = 'model centre longitude' + if self.size > 1: + model_centre_lon.set_collective(True) + msg = '2D meshed grid centre longitudes with ' + msg += '{} longitudes in {} bands of latitude'.format(self._model_centre_lon['data'].shape[1], + self._model_centre_lat['data'].shape[0]) + model_centre_lon.description = msg + model_centre_lon[:] = self._model_centre_lon['data'] + + # MODEL CENTRE LATITUDES + model_centre_lat = netcdf.createVariable('model_centre_latitude', 'f8', + ('model_latitude','model_longitude',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + model_centre_lat.units = 'degrees_north' + model_centre_lat.axis = 'Y' + model_centre_lat.long_name = 'model centre latitude' + model_centre_lat.standard_name = 'model centre latitude' + if self.size > 1: + model_centre_lat.set_collective(True) + msg = '2D meshed grid centre longitudes with ' + msg += '{} longitudes in {} bands of latitude'.format(self._model_centre_lon['data'].shape[1], + self._model_centre_lat['data'].shape[0]) + model_centre_lat[:] = self._model_centre_lat['data'] + + # GRID EDGE DOMAIN LONGITUDES + grid_edge_lon = netcdf.createVariable('grid_edge_longitude', 'f8', ('grid_edge')) + grid_edge_lon.units = 'degrees_east' + grid_edge_lon.axis = 'X' + grid_edge_lon.long_name = 'grid edge longitude' + grid_edge_lon.standard_name = 'grid edge longitude' + if self.size > 1: + grid_edge_lon.set_collective(True) + msg = 'Longitude coordinate along edge of grid domain ' + msg += '(going clockwise around grid boundary from bottom-left corner).' + grid_edge_lon.description = msg + grid_edge_lon[:] = self._grid_edge_lon['data'] + + # GRID EDGE DOMAIN LATITUDES + grid_edge_lat = netcdf.createVariable('grid_edge_latitude', 'f8', ('grid_edge')) + grid_edge_lat.units = 'degrees_north' + grid_edge_lat.axis = 'Y' + grid_edge_lat.long_name = 'grid edge latitude' + grid_edge_lat.standard_name = 'grid edge latitude' + if self.size > 1: + grid_edge_lat.set_collective(True) + msg = 'Latitude coordinate along edge of grid domain ' + msg += '(going clockwise around grid boundary from bottom-left corner).' + grid_edge_lat.description = msg + grid_edge_lat[:] = self._grid_edge_lat['data'] + + self.free_vars(('model_centre_longitude', 'model_centre_latitude', 'grid_edge_longitude', 'grid_edge_latitude')) + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion. + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X']. + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank. + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif coordinate_len == 3: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], :] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + elif coordinate_axis == '': + # pass for 'model_centre_lon', 'model_centre_lat', 'grid_edge_lon' and 'grid_edge_lat' + pass + + return values + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read. + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Read data in 1 or 2 dimensions + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif len(var_dims) == 3: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + :] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 3 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset. + chunking : bool + Indicates if you want to chunk the output netCDF. + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + else: + var_dtype = var_dict['data'].dtype + + # Get dimensions + if len(var_dict['data'].shape) == 1: + # Metadata + var_dims = self._var_dim + elif len(var_dict['data'].shape) == 2: + # Different from metadata (e.g. concentrations of pm10) + var_dims = self._var_dim + ('time',) + + # Convert list of strings to chars for parallelization + try: + unicode_type = len(max(var_dict['data'], key=len)) + if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, zlib=self.zip_lvl > 0, + complevel=self.zip_lvl, chunksizes=chunk_size) + + if self.info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if len(att_value.shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 2: + if 'strlen' in var_dims: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 3: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + + if self.info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + + data_list = deepcopy(self.variables) + for var_name, var_info in data_list.items(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + # concatenate over station + if self.parallel_method == 'X': + if shp_len == 1: + # dimensions = (station) + axis = 0 + elif shp_len == 2: + # dimensions = (station, strlen) or + # dimensions = (station, time) + axis = 0 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + elif self.parallel_method == 'T': + # concatenate over time + if shp_len == 1: + # dimensions = (station) + axis = None + continue + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # dimensions = (station, strlen) + axis = None + continue + else: + # dimensions = (station, time) + axis = 1 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'T'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file. + + Parameters + ---------- + path : str + Path to the output netCDF file. + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression). + serial : bool + Indicates if you want to write in serial or not. Default: False. + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False. + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. + """ + + if not serial: + msg = 'WARNING!!! ' + msg += 'Providentia datasets cannot be written in parallel yet. ' + msg += 'Changing to serial mode.' + warnings.warn(msg) + + super(PointsNesProvidentia, self).to_netcdf(path, compression_level=compression_level, + serial=True, info=info, chunking=chunking) + + return None diff --git a/nes/nc_projections/rotated_nes.py b/nes/nc_projections/rotated_nes.py index bdeb992ff771233477826c41ac16d8dc0c6112a5..ae3abb2a394fb6b24d10614ec77a2036ac37d092 100644 --- a/nes/nc_projections/rotated_nes.py +++ b/nes/nc_projections/rotated_nes.py @@ -21,7 +21,7 @@ class RotatedNes(Nes): Rotated longitudes dictionary with the portion of 'data' corresponding to the rank values. projection_data : dict Dictionary with the projection information. - 'grid_north_pole_latitude' and 'grid_north_pole_longitude' keys + 'grid_north_pole_latitude' and 'grid_north_pole_longitude' keys. _var_dim : tuple Tuple with the name of the Y and X dimensions for the variables. ('rlat', 'rlon') for a rotated projection. @@ -36,23 +36,23 @@ class RotatedNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ - Initialize the RotatedNes class + Initialize the RotatedNes class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -92,27 +92,27 @@ class RotatedNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ new = RotatedNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, @@ -121,12 +121,12 @@ class RotatedNes(Nes): def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- projection : dict - Dictionary with the projection data + Dictionary with the projection data. """ if create_nes: @@ -209,14 +209,14 @@ class RotatedNes(Nes): # Calculate rotated latitudes self.n_lat = int((abs(kwargs['south_boundary']) / kwargs['inc_rlat']) * 2 + 1) - self.rotated_lats = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + + self.rotated_lat = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + (kwargs['inc_rlat'] * (self.n_lat - 1)), self.n_lat) # Calculate rotated longitudes self.n_lon = int((abs(kwargs['west_boundary']) / kwargs['inc_rlon']) * 2 + 1) - self.rotated_lons = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + + self.rotated_lon = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + (kwargs['inc_rlon'] * (self.n_lon - 1)), self.n_lon) - return {'data': self.rotated_lats}, {'data': self.rotated_lons} + return {'data': self.rotated_lat}, {'data': self.rotated_lon} def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180, **kwargs): """ @@ -228,19 +228,29 @@ class RotatedNes(Nes): :param lat_deg: Rotated latitude coordinate. :type lat_deg: numpy.array - :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360) + :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360). :type lon_min: float - :return: Unrotated coordinates. Longitudes, Latitudes + :return: Unrotated coordinates. Longitudes, Latitudes. :rtype: tuple(numpy.array, numpy.array) """ + if 'centre_lat' in kwargs: + centre_lat = kwargs['centre_lat'] + else: + centre_lat = 90 - float(self.projection_data['grid_north_pole_latitude']) + + if 'centre_lon' in kwargs: + centre_lon = kwargs['centre_lon'] + else: + centre_lon = float(self.projection_data['grid_north_pole_longitude']) + 180 + degrees_to_radians = math.pi / 180. - tph0 = kwargs['centre_lat'] * degrees_to_radians + tph0 = centre_lat * degrees_to_radians tlm = lon_deg * degrees_to_radians tph = lat_deg * degrees_to_radians - tlm0d = -180 + kwargs['centre_lon'] + tlm0d = -180 + centre_lon ctph0 = np.cos(tph0) stph0 = np.sin(tph0) @@ -266,25 +276,94 @@ class RotatedNes(Nes): return almd, aphd - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. - Parameters + Returns ---------- - netcdf : Dataset - NetCDF object. + centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). """ # Complete dimensions self._rlat, self._rlon = self._create_rotated_coordinates(**kwargs) - # Calculate center latitudes and longitudes (1D to 2D) - self.center_lons, self.center_lats = self.rotated2latlon(np.array([self.rotated_lons] * len(self.rotated_lats)), - np.array([self.rotated_lats] * len(self.rotated_lons)).T, - **kwargs) + # Calculate centre latitudes and longitudes (1D to 2D) + centre_lon_data, centre_lat_data = self.rotated2latlon(np.array([self.rotated_lon] * len(self.rotated_lat)), + np.array([self.rotated_lat] * len(self.rotated_lon)).T, + **kwargs) + centre_lon = {'data': centre_lon_data} + centre_lat = {'data': centre_lat_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. - return {'data': self.center_lats}, {'data': self.center_lons} + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + # Get centre latitudes + model_centre_lat = self.lat + + # Get centre longitudes + model_centre_lon = self.lon + + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_rlon = np.abs(np.mean(np.diff(self.rlon['data']))) + inc_rlat = np.abs(np.mean(np.diff(self.rlat['data']))) + + # Get bounds for rotated coordinates + rlat_bounds = self.create_bounds(self.rlat['data'], inc_rlat) + rlon_bounds = self.create_bounds(self.rlon['data'], inc_rlon) + + # Get rotated latitudes for grid edge + left_edge_rlat = np.append(rlat_bounds.flatten()[::2], rlat_bounds.flatten()[-1]) + right_edge_rlat = np.flip(left_edge_rlat, 0) + top_edge_rlat = np.repeat(rlat_bounds[-1][-1], len(self.rlon['data']) - 1) + bottom_edge_rlat = np.repeat(rlat_bounds[0][0], len(self.rlon['data'])) + rlat_grid_edge = np.concatenate((left_edge_rlat, top_edge_rlat, right_edge_rlat, bottom_edge_rlat)) + + # Get rotated longitudes for grid edge + left_edge_rlon = np.repeat(rlon_bounds[0][0], len(self.rlat['data']) + 1) + top_edge_rlon = rlon_bounds.flatten()[1:-1:2] + right_edge_rlon = np.repeat(rlon_bounds[-1][-1], len(self.rlat['data']) + 1) + bottom_edge_rlon = np.flip(rlon_bounds.flatten()[:-1:2], 0) + rlon_grid_edge = np.concatenate((left_edge_rlon, top_edge_rlon, right_edge_rlon, bottom_edge_rlon)) + + # Get edges for regular coordinates + grid_edge_lon_data, grid_edge_lat_data = self.rotated2latlon(rlon_grid_edge, rlat_grid_edge) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon @staticmethod def _set_var_crs(var): @@ -307,7 +386,7 @@ class RotatedNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('rotated_pole', 'c') @@ -326,9 +405,9 @@ class RotatedNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ diff --git a/tests/2-nes_tests_by_projection.py b/tests/2-nes_tests_by_projection.py index 93b4b4734b92f93f74313317e6f12c12aff0613d..12ef38e30ef6c7a2e903f945554e3efff247b8dd 100644 --- a/tests/2-nes_tests_by_projection.py +++ b/tests/2-nes_tests_by_projection.py @@ -20,7 +20,7 @@ paths = {'regular_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/or 'points_ghost_file': {'path': '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc', 'projection': 'points_ghost', 'variables': [], # all - 'parallel_methods': ['X']}, + 'parallel_methods': ['X', 'T']}, 'lcc_file': {'path': '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc', 'projection': 'lcc', 'variables': [], # all diff --git a/tests/test_bash_mn4.cmd b/tests/test_bash_mn4.cmd index f3d2da7e3665ebc2cead9c78ff006bc10ed11701..1aa487e0eec2726d3b903659b1ac88a48203b796 100644 --- a/tests/test_bash_mn4.cmd +++ b/tests/test_bash_mn4.cmd @@ -26,6 +26,6 @@ module load pyproj/2.5.0-foss-2019b-Python-3.7.4 export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} -cd /gpfs/scratch/bsc32/bsc32538/NES_tests/NES/tests +cd /gpfs/projects/bsc32/models/NES/tests mpirun --mca mpi_warn_on_fork 0 -np 4 python basic_nes_tests.py diff --git a/tests/test_bash_nord3v2.cmd b/tests/test_bash_nord3v2.cmd index 8dd85bb8db5201407c6decd780c6644dce6fa3dc..c1776988be0cd24788ac45594a85edba82906e2b 100644 --- a/tests/test_bash_nord3v2.cmd +++ b/tests/test_bash_nord3v2.cmd @@ -25,7 +25,6 @@ module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} - -cd /esarchive/scratch/avilanova/software/NES/tests +cd /gpfs/projects/bsc32/models/NES/tests mpirun --mca mpi_warn_on_fork 0 -np 4 python 2-nes_tests_by_projection.py