diff --git a/CHANGELOG.md b/CHANGELOG.md index 16777801e873500ffb73768bfadaac2700d660ff..28733133b81841ea706b37a6be2cd195c7f51d97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ # NES CHANGELOG -### 0.9.0 -* Release date: 2022/08/12 +### 1.0.0 +* Release date: 2022/11/24 * Changes and new features: * First beta release * Open: @@ -11,8 +11,8 @@ * Lambert Conformal Conic * Mercator * Points - * Points * Points in GHOST format + * Points in Providentia format * Parallelization: * Balanced / Unbalanced * By time axis @@ -25,13 +25,17 @@ * Lambert Conformal Conic * Mercator * Points + * Shapefile * Write: * NetCDF + * CAMS REANALYSIS format * Grib2 + * Shapefile * Interpolation: * Vertical interpolation * Horizontal interpolation * Nearest Neighbours + * Providentia interpolation * Statistics: * Daily_mean * Daily_max diff --git a/Jupyter_notebooks/2-create_nes.ipynb b/Jupyter_notebooks/2-create_nes.ipynb deleted file mode 100644 index e69ddcb5f9f1492aeac2196540200ca6cd3d85cc..0000000000000000000000000000000000000000 --- a/Jupyter_notebooks/2-create_nes.ipynb +++ /dev/null @@ -1,3655 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# How to create regular, rotated, points, LCC and Mercator grids" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "import pandas as pd\n", - "import numpy as np\n", - "from nes import *" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 1. Create regular grid" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "lat_orig = 41.1\n", - "lon_orig = 1.8\n", - "inc_lat = 0.1\n", - "inc_lon = 0.1\n", - "n_lat = 10\n", - "n_lon = 10\n", - "regular_grid = create_nes(comm=None, info=False, projection='regular',\n", - " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", - " n_lat=n_lat, n_lon=n_lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating regular_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "regular_grid.to_netcdf('regular_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:  (time: 1, lev: 1, lat: 10, lon: 10)\n",
-       "Coordinates:\n",
-       "  * time     (time) datetime64[ns] 1996-12-31\n",
-       "  * lev      (lev) float64 0.0\n",
-       "  * lat      (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n",
-       "  * lon      (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n",
-       "Data variables:\n",
-       "    crs      |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, lat: 10, lon: 10)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * lat (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n", - " * lon (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n", - "Data variables:\n", - " crs |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('regular_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2. Create rotated grid" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "centre_lat = 51\n", - "centre_lon = 10\n", - "west_boundary = -35\n", - "south_boundary = -27\n", - "inc_rlat = 0.2\n", - "inc_rlon = 0.2\n", - "rotated_grid = create_nes(comm=None, info=False, projection='rotated',\n", - " centre_lat=centre_lat, centre_lon=centre_lon,\n", - " west_boundary=west_boundary, south_boundary=south_boundary,\n", - " inc_rlat=inc_rlat, inc_rlon=inc_rlon)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating rotated_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "rotated_grid.to_netcdf('rotated_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:       (time: 1, lev: 1, rlat: 271, rlon: 351)\n",
-       "Coordinates:\n",
-       "  * time          (time) datetime64[ns] 1996-12-31\n",
-       "  * lev           (lev) float64 0.0\n",
-       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
-       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
-       "Data variables:\n",
-       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
-       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
-       "    rotated_pole  |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, rlat: 271, rlon: 351)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", - " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", - "Data variables:\n", - " lat (rlat, rlon) float64 ...\n", - " lon (rlat, rlon) float64 ...\n", - " rotated_pole |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('rotated_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 3. Create grid from points" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### File 1" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
station.codelatlonstandardised_network_provided_area_classification
0ES0266A41.3793222.086140urban-centre
1ES0392A41.7277041.838531urban-suburban
2ES0395A41.5678242.014598urban-centre
3ES0559A41.3874242.164918urban-centre
4ES0567A41.3849062.119574urban-centre
...............
129ES2087A41.9292832.257302NaN
130ES2091A40.5799000.553500NaN
131ES2088A41.7710602.250647NaN
132ES1908A41.2390691.856564NaN
133ES9994A42.3583631.459455NaN
\n", - "

134 rows × 4 columns

\n", - "
" - ], - "text/plain": [ - " station.code lat lon \\\n", - "0 ES0266A 41.379322 2.086140 \n", - "1 ES0392A 41.727704 1.838531 \n", - "2 ES0395A 41.567824 2.014598 \n", - "3 ES0559A 41.387424 2.164918 \n", - "4 ES0567A 41.384906 2.119574 \n", - ".. ... ... ... \n", - "129 ES2087A 41.929283 2.257302 \n", - "130 ES2091A 40.579900 0.553500 \n", - "131 ES2088A 41.771060 2.250647 \n", - "132 ES1908A 41.239069 1.856564 \n", - "133 ES9994A 42.358363 1.459455 \n", - "\n", - " standardised_network_provided_area_classification \n", - "0 urban-centre \n", - "1 urban-suburban \n", - "2 urban-centre \n", - "3 urban-centre \n", - "4 urban-centre \n", - ".. ... \n", - "129 NaN \n", - "130 NaN \n", - "131 NaN \n", - "132 NaN \n", - "133 NaN \n", - "\n", - "[134 rows x 4 columns]" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/XVPCA_info.csv'\n", - "df = pd.read_csv(file_path)\n", - "df" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "lat = df['lat']\n", - "lon = df['lon']\n", - "metadata = {'station_code': {'data': df['station.code'],\n", - " 'dimensions': ('station',),\n", - " 'dtype': str},\n", - " 'area_classification': {'data': df['standardised_network_provided_area_classification'],\n", - " 'dimensions': ('station',),\n", - " 'dtype': str}}" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", - " lat=lat, lon=lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "points_grid.variables = metadata" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating points_grid_1.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n", - "Rank 000: Writing station_code var (1/2)\n", - "Rank 000: Var station_code created (1/2)\n", - "Rank 000: Var station_code data (1/2)\n", - "Rank 000: Var station_code completed (1/2)\n", - "Rank 000: Writing area_classification var (2/2)\n", - "Rank 000: Var area_classification created (2/2)\n", - "Rank 000: Var area_classification data (2/2)\n", - "Rank 000: Var area_classification completed (2/2)\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", - " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", - " warnings.warn(msg)\n" - ] - } - ], - "source": [ - "points_grid.to_netcdf('points_grid_1.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:              (time: 1, station: 134)\n",
-       "Coordinates:\n",
-       "  * time                 (time) datetime64[ns] 1996-12-31\n",
-       "  * station              (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n",
-       "Data variables:\n",
-       "    station_code         (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n",
-       "    area_classification  (station) object 'urban-centre' ... 'nan'\n",
-       "    lat                  (station) float64 41.38 41.73 41.57 ... 41.24 42.36\n",
-       "    lon                  (station) float64 2.086 1.839 2.015 ... 1.857 1.459\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, station: 134)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", - "Data variables:\n", - " station_code (station) object ...\n", - " area_classification (station) object ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('points_grid_1.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### File 2" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
EstacióPM10 Barcelona (Eixample)PM10 Badalona (guàrdia urbana)PM10 Badalona (Assamblea de Catalunya)PM10 Barcelona (Pl. de la Universitat)PM10 Barcelona (Poblenou)PM10 Barcelona (Zona Universitària)PM10 Barcelona (el Port Vell)PM10 Barcelona (IES Goya)PM10 Barcelona (IES Verdaguer)...PM10 Constantí (Gaudí)PM10 Vila-seca (RENFE)PM10 Sitges (Vallcarca-oficines)PM10 Sant Vicenç dels Horts (Àlaba)PM10 Montsec (OAM)PM10 Montseny (la Castanya)PM10 Caldes de Montbui (Ajuntament)PM10 Sant Feliu de Llobregat (Eugeni d'Ors)PM 10 La Seu d'Urgell (CC Les Monges)PM10 Vic (Centre Cívic Santa Anna)
0Codi europeuES1438AES1928AES2027AES0559AES0691AES0567AES1870AES1852AES1900A...ES1123AES1117AES2033AES2011AES1982AES1778AES1680AES1362AES9994AES1874A
1DiaValor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)...Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)
22017-01-01 00:00:0019.6NaN2020.225.616.529NaN23.8...12.97NaN1122.499.5002997.936455NaNNaNNaNNaN
32017-01-02 00:00:0027.220.862331.63522.82817.232.4...NaN25.382625.391.8296189.7870043222.06NaNNaN
42017-01-03 00:00:0035.7NaN323736.230.931NaN35.8...21.836.494830.658.09460716.978294335.84NaNNaN
..................................................................
3622017-12-27 00:00:0017.57.591016.91413.121NaN20.8...1222.95NaNNaN13.066751NaN10.3NaNNaN
3632017-12-28 00:00:0017NaN1417.915NaN1314.516...NaN6.5NaN9.97613.351872NaN26.81NaNNaN
3642017-12-29 00:00:0024.6212423.225.815.321NaN25.9...8.869.56NaN23.7614.219732NaN14.09NaNNaN
3652017-12-30 00:00:0027.4NaN1522.316.611.21610.718.8...NaNNaNNaN19.041.0911874.713029NaNNaNNaNNaN
3662017-12-31 00:00:0017.312.51316.317.69.914NaN17.4...12.77NaNNaN15.232.156595.024302NaNNaNNaNNaN
\n", - "

367 rows × 84 columns

\n", - "
" - ], - "text/plain": [ - " Estació PM10 Barcelona (Eixample) \\\n", - "0 Codi europeu ES1438A \n", - "1 Dia Valor (µg/m3) \n", - "2 2017-01-01 00:00:00 19.6 \n", - "3 2017-01-02 00:00:00 27.2 \n", - "4 2017-01-03 00:00:00 35.7 \n", - ".. ... ... \n", - "362 2017-12-27 00:00:00 17.5 \n", - "363 2017-12-28 00:00:00 17 \n", - "364 2017-12-29 00:00:00 24.6 \n", - "365 2017-12-30 00:00:00 27.4 \n", - "366 2017-12-31 00:00:00 17.3 \n", - "\n", - " PM10 Badalona (guàrdia urbana) PM10 Badalona (Assamblea de Catalunya) \\\n", - "0 ES1928A ES2027A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 NaN 20 \n", - "3 20.86 23 \n", - "4 NaN 32 \n", - ".. ... ... \n", - "362 7.59 10 \n", - "363 NaN 14 \n", - "364 21 24 \n", - "365 NaN 15 \n", - "366 12.5 13 \n", - "\n", - " PM10 Barcelona (Pl. de la Universitat) PM10 Barcelona (Poblenou) \\\n", - "0 ES0559A ES0691A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 20.2 25.6 \n", - "3 31.6 35 \n", - "4 37 36.2 \n", - ".. ... ... \n", - "362 16.9 14 \n", - "363 17.9 15 \n", - "364 23.2 25.8 \n", - "365 22.3 16.6 \n", - "366 16.3 17.6 \n", - "\n", - " PM10 Barcelona (Zona Universitària) PM10 Barcelona (el Port Vell) \\\n", - "0 ES0567A ES1870A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 16.5 29 \n", - "3 22.8 28 \n", - "4 30.9 31 \n", - ".. ... ... \n", - "362 13.1 21 \n", - "363 NaN 13 \n", - "364 15.3 21 \n", - "365 11.2 16 \n", - "366 9.9 14 \n", - "\n", - " PM10 Barcelona (IES Goya) PM10 Barcelona (IES Verdaguer) ... \\\n", - "0 ES1852A ES1900A ... \n", - "1 Valor (µg/m3) Valor (µg/m3) ... \n", - "2 NaN 23.8 ... \n", - "3 17.2 32.4 ... \n", - "4 NaN 35.8 ... \n", - ".. ... ... ... \n", - "362 NaN 20.8 ... \n", - "363 14.5 16 ... \n", - "364 NaN 25.9 ... \n", - "365 10.7 18.8 ... \n", - "366 NaN 17.4 ... \n", - "\n", - " PM10 Constantí (Gaudí) PM10 Vila-seca (RENFE) \\\n", - "0 ES1123A ES1117A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 12.97 NaN \n", - "3 NaN 25.38 \n", - "4 21.8 36.49 \n", - ".. ... ... \n", - "362 12 22.95 \n", - "363 NaN 6.5 \n", - "364 8.86 9.56 \n", - "365 NaN NaN \n", - "366 12.77 NaN \n", - "\n", - " PM10 Sitges (Vallcarca-oficines) PM10 Sant Vicenç dels Horts (Àlaba) \\\n", - "0 ES2033A ES2011A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 11 22.49 \n", - "3 26 25.39 \n", - "4 48 30.65 \n", - ".. ... ... \n", - "362 NaN NaN \n", - "363 NaN 9.976 \n", - "364 NaN 23.76 \n", - "365 NaN 19.04 \n", - "366 NaN 15.23 \n", - "\n", - " PM10 Montsec (OAM) PM10 Montseny (la Castanya) \\\n", - "0 ES1982A ES1778A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 9.500299 7.936455 \n", - "3 1.829618 9.787004 \n", - "4 8.094607 16.97829 \n", - ".. ... ... \n", - "362 1 3.066751 \n", - "363 1 3.351872 \n", - "364 1 4.219732 \n", - "365 1.091187 4.713029 \n", - "366 2.15659 5.024302 \n", - "\n", - " PM10 Caldes de Montbui (Ajuntament) \\\n", - "0 ES1680A \n", - "1 Valor (µg/m3) \n", - "2 NaN \n", - "3 32 \n", - "4 43 \n", - ".. ... \n", - "362 NaN \n", - "363 NaN \n", - "364 NaN \n", - "365 NaN \n", - "366 NaN \n", - "\n", - " PM10 Sant Feliu de Llobregat (Eugeni d'Ors) \\\n", - "0 ES1362A \n", - "1 Valor (µg/m3) \n", - "2 NaN \n", - "3 22.06 \n", - "4 35.84 \n", - ".. ... \n", - "362 10.3 \n", - "363 26.81 \n", - "364 14.09 \n", - "365 NaN \n", - "366 NaN \n", - "\n", - " PM 10 La Seu d'Urgell (CC Les Monges) PM10 Vic (Centre Cívic Santa Anna) \n", - "0 ES9994A ES1874A \n", - "1 Valor (µg/m3) Valor (µg/m3) \n", - "2 NaN NaN \n", - "3 NaN NaN \n", - "4 NaN NaN \n", - ".. ... ... \n", - "362 NaN NaN \n", - "363 NaN NaN \n", - "364 NaN NaN \n", - "365 NaN NaN \n", - "366 NaN NaN \n", - "\n", - "[367 rows x 84 columns]" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/Dades_2017.xlsx'\n", - "df_2 = pd.read_excel(file_path)\n", - "df_2" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "times = df_2['Estació'].iloc[2:]\n", - "lat = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", - "lon = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", - "metadata = {'station_name': {'data': df_2.columns.str.replace('PM10 ', '').str.replace('PM 10 ', '').to_numpy()[1:],\n", - " 'dimensions': ('station',),\n", - " 'dtype': str},\n", - " 'station_code': {'data': df_2.iloc[0].values[1:],\n", - " 'dimensions': ('station',),\n", - " 'dtype': str},\n", - " 'pm10': {'data': df_2.iloc[2:, 1:].to_numpy().T,\n", - " 'dimensions': ('station', 'time',),\n", - " 'dtype': float}}" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", - " lat=lat, lon=lon, times=times)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "points_grid.variables = metadata" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating points_grid_2.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n", - "Rank 000: Writing station_name var (1/3)\n", - "Rank 000: Var station_name created (1/3)\n", - "Rank 000: Var station_name data (1/3)\n", - "Rank 000: Var station_name completed (1/3)\n", - "Rank 000: Writing station_code var (2/3)\n", - "Rank 000: Var station_code created (2/3)\n", - "Rank 000: Var station_code data (2/3)\n", - "Rank 000: Var station_code completed (2/3)\n", - "Rank 000: Writing pm10 var (3/3)\n", - "Rank 000: Var pm10 created (3/3)\n", - "Rank 000: Var pm10 data (3/3)\n", - "Rank 000: Var pm10 completed (3/3)\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", - " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", - " warnings.warn(msg)\n", - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", - " warnings.warn(msg)\n" - ] - } - ], - "source": [ - "points_grid.to_netcdf('points_grid_2.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:       (time: 365, station: 83)\n",
-       "Coordinates:\n",
-       "  * time          (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n",
-       "  * station       (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n",
-       "Data variables:\n",
-       "    station_name  (station) object 'Barcelona (Eixample)' ... 'Vic (Centre Cí...\n",
-       "    station_code  (station) object 'ES1438A' 'ES1928A' ... 'ES9994A' 'ES1874A'\n",
-       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
-       "    lat           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
-       "    lon           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 365, station: 83)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n", - " * station (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n", - "Data variables:\n", - " station_name (station) object ...\n", - " station_code (station) object ...\n", - " pm10 (station, time) float64 ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('points_grid_2.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 4. Create LCC grid" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [], - "source": [ - "lat_1 = 37\n", - "lat_2 = 43\n", - "lon_0 = -3\n", - "lat_0 = 40\n", - "nx = 397\n", - "ny = 397\n", - "inc_x = 4000\n", - "inc_y = 4000\n", - "x_0 = -807847.688\n", - "y_0 = -797137.125\n", - "lcc_grid = create_nes(comm=None, info=False, projection='lcc',\n", - " lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, lat_0=lat_0, \n", - " nx=nx, ny=ny, inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating lcc_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "lcc_grid.to_netcdf('lcc_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:            (time: 1, lev: 1, y: 397, x: 397)\n",
-       "Coordinates:\n",
-       "  * time               (time) datetime64[ns] 1996-12-31\n",
-       "  * lev                (lev) float64 0.0\n",
-       "  * y                  (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n",
-       "  * x                  (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n",
-       "Data variables:\n",
-       "    lat                (y, x) float64 ...\n",
-       "    lon                (y, x) float64 ...\n",
-       "    Lambert_conformal  |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, y: 397, x: 397)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * y (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n", - " * x (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n", - "Data variables:\n", - " lat (y, x) float64 ...\n", - " lon (y, x) float64 ...\n", - " Lambert_conformal |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('lcc_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 5. Create Mercator grid" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "lat_ts = -1.5\n", - "lon_0 = -18.0\n", - "nx = 210\n", - "ny = 236\n", - "inc_x = 50000\n", - "inc_y = 50000\n", - "x_0 = -126017.5\n", - "y_0 = -5407460.0\n", - "mercator_grid = create_nes(comm=None, info=False, projection='mercator',\n", - " lat_ts=lat_ts, lon_0=lon_0, nx=nx, ny=ny, \n", - " inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating mercator_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "mercator_grid.to_netcdf('mercator_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:   (time: 1, lev: 1, y: 236, x: 210)\n",
-       "Coordinates:\n",
-       "  * time      (time) datetime64[ns] 1996-12-31\n",
-       "  * lev       (lev) float64 0.0\n",
-       "  * y         (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
-       "  * x         (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n",
-       "Data variables:\n",
-       "    lat       (y, x) float64 -43.52 -43.52 -43.52 -43.52 ... 49.6 49.6 49.6 49.6\n",
-       "    lon       (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
-       "    mercator  |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, y: 236, x: 210)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", - " * x (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n", - "Data variables:\n", - " lat (y, x) float64 ...\n", - " lon (y, x) float64 ...\n", - " mercator |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('mercator_grid.nc')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/Jupyter_notebooks/4-providentia.ipynb b/Jupyter_notebooks/4-providentia.ipynb deleted file mode 100644 index 5e0303360a2221c26721ce15c7d648399a6d5e71..0000000000000000000000000000000000000000 --- a/Jupyter_notebooks/4-providentia.ipynb +++ /dev/null @@ -1,9953 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "from netCDF4 import Dataset\n", - "from nes import *" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Observations dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [], - "source": [ - "obs_path = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.3.3/hourly/sconco3/sconco3_201804.nc'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Read" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:                                                           (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n",
-       "Coordinates:\n",
-       "  * time                                                              (time) datetime64[ns] ...\n",
-       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
-       "Data variables: (12/179)\n",
-       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
-       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
-       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
-       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
-       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
-       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
-       "    ...                                                                ...\n",
-       "    station_timezone                                                  (station) object ...\n",
-       "    street_type                                                       (station) object ...\n",
-       "    street_width                                                      (station) float32 ...\n",
-       "    terrain                                                           (station) object ...\n",
-       "    vertical_datum                                                    (station) object ...\n",
-       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
-       "Attributes:\n",
-       "    title:                     Surface ozone data in the EBAS network in 2018...\n",
-       "    institution:               Barcelona Supercomputing Center\n",
-       "    source:                    Surface observations\n",
-       "    creator_name:              Dene R. Bowdalo\n",
-       "    creator_email:             dene.bowdalo@bsc.es\n",
-       "    conventions:               CF-1.7\n",
-       "    data_version:              1.3.3\n",
-       "    history:                   Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n",
-       "    NCO:                       4.7.2\n",
-       "    nco_openmp_thread_number:  1
" - ], - "text/plain": [ - "\n", - "Dimensions: (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] ...\n", - "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", - "Data variables: (12/179)\n", - " ASTER_v3_altitude (station) float32 ...\n", - " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", - " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", - " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", - " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", - " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", - " ... ...\n", - " station_timezone (station) object ...\n", - " street_type (station) object ...\n", - " street_width (station) float32 ...\n", - " terrain (station) object ...\n", - " vertical_datum (station) object ...\n", - " weekday_weekend_code (station, time) uint8 ...\n", - "Attributes:\n", - " title: Surface ozone data in the EBAS network in 2018...\n", - " institution: Barcelona Supercomputing Center\n", - " source: Surface observations\n", - " creator_name: Dene R. Bowdalo\n", - " creator_email: dene.bowdalo@bsc.es\n", - " conventions: CF-1.7\n", - " data_version: 1.3.3\n", - " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", - " NCO: 4.7.2\n", - " nco_openmp_thread_number: 1" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset(obs_path)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_1 = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", - "nessy_1" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[datetime.datetime(2018, 4, 1, 0, 0),\n", - " datetime.datetime(2018, 4, 1, 1, 0),\n", - " datetime.datetime(2018, 4, 1, 2, 0),\n", - " datetime.datetime(2018, 4, 1, 3, 0),\n", - " datetime.datetime(2018, 4, 1, 4, 0),\n", - " datetime.datetime(2018, 4, 1, 5, 0),\n", - " datetime.datetime(2018, 4, 1, 6, 0),\n", - " datetime.datetime(2018, 4, 1, 7, 0),\n", - " datetime.datetime(2018, 4, 1, 8, 0),\n", - " datetime.datetime(2018, 4, 1, 9, 0),\n", - " datetime.datetime(2018, 4, 1, 10, 0),\n", - " datetime.datetime(2018, 4, 1, 11, 0),\n", - " datetime.datetime(2018, 4, 1, 12, 0),\n", - " datetime.datetime(2018, 4, 1, 13, 0),\n", - " datetime.datetime(2018, 4, 1, 14, 0),\n", - " datetime.datetime(2018, 4, 1, 15, 0),\n", - " datetime.datetime(2018, 4, 1, 16, 0),\n", - " datetime.datetime(2018, 4, 1, 17, 0),\n", - " datetime.datetime(2018, 4, 1, 18, 0),\n", - " datetime.datetime(2018, 4, 1, 19, 0),\n", - " datetime.datetime(2018, 4, 1, 20, 0),\n", - " datetime.datetime(2018, 4, 1, 21, 0),\n", - " datetime.datetime(2018, 4, 1, 22, 0),\n", - " datetime.datetime(2018, 4, 1, 23, 0),\n", - " datetime.datetime(2018, 4, 2, 0, 0),\n", - " datetime.datetime(2018, 4, 2, 1, 0),\n", - " datetime.datetime(2018, 4, 2, 2, 0),\n", - " datetime.datetime(2018, 4, 2, 3, 0),\n", - " datetime.datetime(2018, 4, 2, 4, 0),\n", - " datetime.datetime(2018, 4, 2, 5, 0),\n", - " datetime.datetime(2018, 4, 2, 6, 0),\n", - " datetime.datetime(2018, 4, 2, 7, 0),\n", - " datetime.datetime(2018, 4, 2, 8, 0),\n", - " datetime.datetime(2018, 4, 2, 9, 0),\n", - " datetime.datetime(2018, 4, 2, 10, 0),\n", - " datetime.datetime(2018, 4, 2, 11, 0),\n", - " datetime.datetime(2018, 4, 2, 12, 0),\n", - " datetime.datetime(2018, 4, 2, 13, 0),\n", - " datetime.datetime(2018, 4, 2, 14, 0),\n", - " datetime.datetime(2018, 4, 2, 15, 0),\n", - " datetime.datetime(2018, 4, 2, 16, 0),\n", - " datetime.datetime(2018, 4, 2, 17, 0),\n", - " datetime.datetime(2018, 4, 2, 18, 0),\n", - " datetime.datetime(2018, 4, 2, 19, 0),\n", - " datetime.datetime(2018, 4, 2, 20, 0),\n", - " datetime.datetime(2018, 4, 2, 21, 0),\n", - " datetime.datetime(2018, 4, 2, 22, 0),\n", - " datetime.datetime(2018, 4, 2, 23, 0),\n", - " datetime.datetime(2018, 4, 3, 0, 0),\n", - " datetime.datetime(2018, 4, 3, 1, 0),\n", - " datetime.datetime(2018, 4, 3, 2, 0),\n", - " datetime.datetime(2018, 4, 3, 3, 0),\n", - " datetime.datetime(2018, 4, 3, 4, 0),\n", - " datetime.datetime(2018, 4, 3, 5, 0),\n", - " datetime.datetime(2018, 4, 3, 6, 0),\n", - " datetime.datetime(2018, 4, 3, 7, 0),\n", - " datetime.datetime(2018, 4, 3, 8, 0),\n", - " datetime.datetime(2018, 4, 3, 9, 0),\n", - " datetime.datetime(2018, 4, 3, 10, 0),\n", - " datetime.datetime(2018, 4, 3, 11, 0),\n", - " datetime.datetime(2018, 4, 3, 12, 0),\n", - " datetime.datetime(2018, 4, 3, 13, 0),\n", - " datetime.datetime(2018, 4, 3, 14, 0),\n", - " datetime.datetime(2018, 4, 3, 15, 0),\n", - " datetime.datetime(2018, 4, 3, 16, 0),\n", - " datetime.datetime(2018, 4, 3, 17, 0),\n", - " datetime.datetime(2018, 4, 3, 18, 0),\n", - " datetime.datetime(2018, 4, 3, 19, 0),\n", - " datetime.datetime(2018, 4, 3, 20, 0),\n", - " datetime.datetime(2018, 4, 3, 21, 0),\n", - " datetime.datetime(2018, 4, 3, 22, 0),\n", - " datetime.datetime(2018, 4, 3, 23, 0),\n", - " datetime.datetime(2018, 4, 4, 0, 0),\n", - " datetime.datetime(2018, 4, 4, 1, 0),\n", - " datetime.datetime(2018, 4, 4, 2, 0),\n", - " datetime.datetime(2018, 4, 4, 3, 0),\n", - " datetime.datetime(2018, 4, 4, 4, 0),\n", - " datetime.datetime(2018, 4, 4, 5, 0),\n", - " datetime.datetime(2018, 4, 4, 6, 0),\n", - " datetime.datetime(2018, 4, 4, 7, 0),\n", - " datetime.datetime(2018, 4, 4, 8, 0),\n", - " datetime.datetime(2018, 4, 4, 9, 0),\n", - " datetime.datetime(2018, 4, 4, 10, 0),\n", - " datetime.datetime(2018, 4, 4, 11, 0),\n", - " datetime.datetime(2018, 4, 4, 12, 0),\n", - " datetime.datetime(2018, 4, 4, 13, 0),\n", - " datetime.datetime(2018, 4, 4, 14, 0),\n", - " datetime.datetime(2018, 4, 4, 15, 0),\n", - " datetime.datetime(2018, 4, 4, 16, 0),\n", - " datetime.datetime(2018, 4, 4, 17, 0),\n", - " datetime.datetime(2018, 4, 4, 18, 0),\n", - " datetime.datetime(2018, 4, 4, 19, 0),\n", - " datetime.datetime(2018, 4, 4, 20, 0),\n", - " datetime.datetime(2018, 4, 4, 21, 0),\n", - " datetime.datetime(2018, 4, 4, 22, 0),\n", - " datetime.datetime(2018, 4, 4, 23, 0),\n", - " datetime.datetime(2018, 4, 5, 0, 0),\n", - " datetime.datetime(2018, 4, 5, 1, 0),\n", - " datetime.datetime(2018, 4, 5, 2, 0),\n", - " datetime.datetime(2018, 4, 5, 3, 0),\n", - " datetime.datetime(2018, 4, 5, 4, 0),\n", - " datetime.datetime(2018, 4, 5, 5, 0),\n", - " datetime.datetime(2018, 4, 5, 6, 0),\n", - " datetime.datetime(2018, 4, 5, 7, 0),\n", - " datetime.datetime(2018, 4, 5, 8, 0),\n", - " datetime.datetime(2018, 4, 5, 9, 0),\n", - " datetime.datetime(2018, 4, 5, 10, 0),\n", - " datetime.datetime(2018, 4, 5, 11, 0),\n", - " datetime.datetime(2018, 4, 5, 12, 0),\n", - " datetime.datetime(2018, 4, 5, 13, 0),\n", - " datetime.datetime(2018, 4, 5, 14, 0),\n", - " datetime.datetime(2018, 4, 5, 15, 0),\n", - " datetime.datetime(2018, 4, 5, 16, 0),\n", - " datetime.datetime(2018, 4, 5, 17, 0),\n", - " datetime.datetime(2018, 4, 5, 18, 0),\n", - " datetime.datetime(2018, 4, 5, 19, 0),\n", - " datetime.datetime(2018, 4, 5, 20, 0),\n", - " datetime.datetime(2018, 4, 5, 21, 0),\n", - " datetime.datetime(2018, 4, 5, 22, 0),\n", - " datetime.datetime(2018, 4, 5, 23, 0),\n", - " datetime.datetime(2018, 4, 6, 0, 0),\n", - " datetime.datetime(2018, 4, 6, 1, 0),\n", - " datetime.datetime(2018, 4, 6, 2, 0),\n", - " datetime.datetime(2018, 4, 6, 3, 0),\n", - " datetime.datetime(2018, 4, 6, 4, 0),\n", - " datetime.datetime(2018, 4, 6, 5, 0),\n", - " datetime.datetime(2018, 4, 6, 6, 0),\n", - " datetime.datetime(2018, 4, 6, 7, 0),\n", - " datetime.datetime(2018, 4, 6, 8, 0),\n", - " datetime.datetime(2018, 4, 6, 9, 0),\n", - " datetime.datetime(2018, 4, 6, 10, 0),\n", - " datetime.datetime(2018, 4, 6, 11, 0),\n", - " datetime.datetime(2018, 4, 6, 12, 0),\n", - " datetime.datetime(2018, 4, 6, 13, 0),\n", - " datetime.datetime(2018, 4, 6, 14, 0),\n", - " datetime.datetime(2018, 4, 6, 15, 0),\n", - " datetime.datetime(2018, 4, 6, 16, 0),\n", - " datetime.datetime(2018, 4, 6, 17, 0),\n", - " datetime.datetime(2018, 4, 6, 18, 0),\n", - " datetime.datetime(2018, 4, 6, 19, 0),\n", - " datetime.datetime(2018, 4, 6, 20, 0),\n", - " datetime.datetime(2018, 4, 6, 21, 0),\n", - " datetime.datetime(2018, 4, 6, 22, 0),\n", - " datetime.datetime(2018, 4, 6, 23, 0),\n", - " datetime.datetime(2018, 4, 7, 0, 0),\n", - " datetime.datetime(2018, 4, 7, 1, 0),\n", - " datetime.datetime(2018, 4, 7, 2, 0),\n", - " datetime.datetime(2018, 4, 7, 3, 0),\n", - " datetime.datetime(2018, 4, 7, 4, 0),\n", - " datetime.datetime(2018, 4, 7, 5, 0),\n", - " datetime.datetime(2018, 4, 7, 6, 0),\n", - " datetime.datetime(2018, 4, 7, 7, 0),\n", - " datetime.datetime(2018, 4, 7, 8, 0),\n", - " datetime.datetime(2018, 4, 7, 9, 0),\n", - " datetime.datetime(2018, 4, 7, 10, 0),\n", - " datetime.datetime(2018, 4, 7, 11, 0),\n", - " datetime.datetime(2018, 4, 7, 12, 0),\n", - " datetime.datetime(2018, 4, 7, 13, 0),\n", - " datetime.datetime(2018, 4, 7, 14, 0),\n", - " datetime.datetime(2018, 4, 7, 15, 0),\n", - " datetime.datetime(2018, 4, 7, 16, 0),\n", - " datetime.datetime(2018, 4, 7, 17, 0),\n", - " datetime.datetime(2018, 4, 7, 18, 0),\n", - " datetime.datetime(2018, 4, 7, 19, 0),\n", - " datetime.datetime(2018, 4, 7, 20, 0),\n", - " datetime.datetime(2018, 4, 7, 21, 0),\n", - " datetime.datetime(2018, 4, 7, 22, 0),\n", - " datetime.datetime(2018, 4, 7, 23, 0),\n", - " datetime.datetime(2018, 4, 8, 0, 0),\n", - " datetime.datetime(2018, 4, 8, 1, 0),\n", - " datetime.datetime(2018, 4, 8, 2, 0),\n", - " datetime.datetime(2018, 4, 8, 3, 0),\n", - " datetime.datetime(2018, 4, 8, 4, 0),\n", - " datetime.datetime(2018, 4, 8, 5, 0),\n", - " datetime.datetime(2018, 4, 8, 6, 0),\n", - " datetime.datetime(2018, 4, 8, 7, 0),\n", - " datetime.datetime(2018, 4, 8, 8, 0),\n", - " datetime.datetime(2018, 4, 8, 9, 0),\n", - " datetime.datetime(2018, 4, 8, 10, 0),\n", - " datetime.datetime(2018, 4, 8, 11, 0),\n", - " datetime.datetime(2018, 4, 8, 12, 0),\n", - " datetime.datetime(2018, 4, 8, 13, 0),\n", - " datetime.datetime(2018, 4, 8, 14, 0),\n", - " datetime.datetime(2018, 4, 8, 15, 0),\n", - " datetime.datetime(2018, 4, 8, 16, 0),\n", - " datetime.datetime(2018, 4, 8, 17, 0),\n", - " datetime.datetime(2018, 4, 8, 18, 0),\n", - " datetime.datetime(2018, 4, 8, 19, 0),\n", - " datetime.datetime(2018, 4, 8, 20, 0),\n", - " datetime.datetime(2018, 4, 8, 21, 0),\n", - " datetime.datetime(2018, 4, 8, 22, 0),\n", - " datetime.datetime(2018, 4, 8, 23, 0),\n", - " datetime.datetime(2018, 4, 9, 0, 0),\n", - " datetime.datetime(2018, 4, 9, 1, 0),\n", - " datetime.datetime(2018, 4, 9, 2, 0),\n", - " datetime.datetime(2018, 4, 9, 3, 0),\n", - " datetime.datetime(2018, 4, 9, 4, 0),\n", - " datetime.datetime(2018, 4, 9, 5, 0),\n", - " datetime.datetime(2018, 4, 9, 6, 0),\n", - " datetime.datetime(2018, 4, 9, 7, 0),\n", - " datetime.datetime(2018, 4, 9, 8, 0),\n", - " datetime.datetime(2018, 4, 9, 9, 0),\n", - " datetime.datetime(2018, 4, 9, 10, 0),\n", - " datetime.datetime(2018, 4, 9, 11, 0),\n", - " datetime.datetime(2018, 4, 9, 12, 0),\n", - " datetime.datetime(2018, 4, 9, 13, 0),\n", - " datetime.datetime(2018, 4, 9, 14, 0),\n", - " datetime.datetime(2018, 4, 9, 15, 0),\n", - " datetime.datetime(2018, 4, 9, 16, 0),\n", - " datetime.datetime(2018, 4, 9, 17, 0),\n", - " datetime.datetime(2018, 4, 9, 18, 0),\n", - " datetime.datetime(2018, 4, 9, 19, 0),\n", - " datetime.datetime(2018, 4, 9, 20, 0),\n", - " datetime.datetime(2018, 4, 9, 21, 0),\n", - " datetime.datetime(2018, 4, 9, 22, 0),\n", - " datetime.datetime(2018, 4, 9, 23, 0),\n", - " datetime.datetime(2018, 4, 10, 0, 0),\n", - " datetime.datetime(2018, 4, 10, 1, 0),\n", - " datetime.datetime(2018, 4, 10, 2, 0),\n", - " datetime.datetime(2018, 4, 10, 3, 0),\n", - " datetime.datetime(2018, 4, 10, 4, 0),\n", - " datetime.datetime(2018, 4, 10, 5, 0),\n", - " datetime.datetime(2018, 4, 10, 6, 0),\n", - " datetime.datetime(2018, 4, 10, 7, 0),\n", - " datetime.datetime(2018, 4, 10, 8, 0),\n", - " datetime.datetime(2018, 4, 10, 9, 0),\n", - " datetime.datetime(2018, 4, 10, 10, 0),\n", - " datetime.datetime(2018, 4, 10, 11, 0),\n", - " datetime.datetime(2018, 4, 10, 12, 0),\n", - " datetime.datetime(2018, 4, 10, 13, 0),\n", - " datetime.datetime(2018, 4, 10, 14, 0),\n", - " datetime.datetime(2018, 4, 10, 15, 0),\n", - " datetime.datetime(2018, 4, 10, 16, 0),\n", - " datetime.datetime(2018, 4, 10, 17, 0),\n", - " datetime.datetime(2018, 4, 10, 18, 0),\n", - " datetime.datetime(2018, 4, 10, 19, 0),\n", - " datetime.datetime(2018, 4, 10, 20, 0),\n", - " datetime.datetime(2018, 4, 10, 21, 0),\n", - " datetime.datetime(2018, 4, 10, 22, 0),\n", - " datetime.datetime(2018, 4, 10, 23, 0),\n", - " datetime.datetime(2018, 4, 11, 0, 0),\n", - " datetime.datetime(2018, 4, 11, 1, 0),\n", - " datetime.datetime(2018, 4, 11, 2, 0),\n", - " datetime.datetime(2018, 4, 11, 3, 0),\n", - " datetime.datetime(2018, 4, 11, 4, 0),\n", - " datetime.datetime(2018, 4, 11, 5, 0),\n", - " datetime.datetime(2018, 4, 11, 6, 0),\n", - " datetime.datetime(2018, 4, 11, 7, 0),\n", - " datetime.datetime(2018, 4, 11, 8, 0),\n", - " datetime.datetime(2018, 4, 11, 9, 0),\n", - " datetime.datetime(2018, 4, 11, 10, 0),\n", - " datetime.datetime(2018, 4, 11, 11, 0),\n", - " datetime.datetime(2018, 4, 11, 12, 0),\n", - " datetime.datetime(2018, 4, 11, 13, 0),\n", - " datetime.datetime(2018, 4, 11, 14, 0),\n", - " datetime.datetime(2018, 4, 11, 15, 0),\n", - " datetime.datetime(2018, 4, 11, 16, 0),\n", - " datetime.datetime(2018, 4, 11, 17, 0),\n", - " datetime.datetime(2018, 4, 11, 18, 0),\n", - " datetime.datetime(2018, 4, 11, 19, 0),\n", - " datetime.datetime(2018, 4, 11, 20, 0),\n", - " datetime.datetime(2018, 4, 11, 21, 0),\n", - " datetime.datetime(2018, 4, 11, 22, 0),\n", - " datetime.datetime(2018, 4, 11, 23, 0),\n", - " datetime.datetime(2018, 4, 12, 0, 0),\n", - " datetime.datetime(2018, 4, 12, 1, 0),\n", - " datetime.datetime(2018, 4, 12, 2, 0),\n", - " datetime.datetime(2018, 4, 12, 3, 0),\n", - " datetime.datetime(2018, 4, 12, 4, 0),\n", - " datetime.datetime(2018, 4, 12, 5, 0),\n", - " datetime.datetime(2018, 4, 12, 6, 0),\n", - " datetime.datetime(2018, 4, 12, 7, 0),\n", - " datetime.datetime(2018, 4, 12, 8, 0),\n", - " datetime.datetime(2018, 4, 12, 9, 0),\n", - " datetime.datetime(2018, 4, 12, 10, 0),\n", - " datetime.datetime(2018, 4, 12, 11, 0),\n", - " datetime.datetime(2018, 4, 12, 12, 0),\n", - " datetime.datetime(2018, 4, 12, 13, 0),\n", - " datetime.datetime(2018, 4, 12, 14, 0),\n", - " datetime.datetime(2018, 4, 12, 15, 0),\n", - " datetime.datetime(2018, 4, 12, 16, 0),\n", - " datetime.datetime(2018, 4, 12, 17, 0),\n", - " datetime.datetime(2018, 4, 12, 18, 0),\n", - " datetime.datetime(2018, 4, 12, 19, 0),\n", - " datetime.datetime(2018, 4, 12, 20, 0),\n", - " datetime.datetime(2018, 4, 12, 21, 0),\n", - " datetime.datetime(2018, 4, 12, 22, 0),\n", - " datetime.datetime(2018, 4, 12, 23, 0),\n", - " datetime.datetime(2018, 4, 13, 0, 0),\n", - " datetime.datetime(2018, 4, 13, 1, 0),\n", - " datetime.datetime(2018, 4, 13, 2, 0),\n", - " datetime.datetime(2018, 4, 13, 3, 0),\n", - " datetime.datetime(2018, 4, 13, 4, 0),\n", - " datetime.datetime(2018, 4, 13, 5, 0),\n", - " datetime.datetime(2018, 4, 13, 6, 0),\n", - " datetime.datetime(2018, 4, 13, 7, 0),\n", - " datetime.datetime(2018, 4, 13, 8, 0),\n", - " datetime.datetime(2018, 4, 13, 9, 0),\n", - " datetime.datetime(2018, 4, 13, 10, 0),\n", - " datetime.datetime(2018, 4, 13, 11, 0),\n", - " datetime.datetime(2018, 4, 13, 12, 0),\n", - " datetime.datetime(2018, 4, 13, 13, 0),\n", - " datetime.datetime(2018, 4, 13, 14, 0),\n", - " datetime.datetime(2018, 4, 13, 15, 0),\n", - " datetime.datetime(2018, 4, 13, 16, 0),\n", - " datetime.datetime(2018, 4, 13, 17, 0),\n", - " datetime.datetime(2018, 4, 13, 18, 0),\n", - " datetime.datetime(2018, 4, 13, 19, 0),\n", - " datetime.datetime(2018, 4, 13, 20, 0),\n", - " datetime.datetime(2018, 4, 13, 21, 0),\n", - " datetime.datetime(2018, 4, 13, 22, 0),\n", - " datetime.datetime(2018, 4, 13, 23, 0),\n", - " datetime.datetime(2018, 4, 14, 0, 0),\n", - " datetime.datetime(2018, 4, 14, 1, 0),\n", - " datetime.datetime(2018, 4, 14, 2, 0),\n", - " datetime.datetime(2018, 4, 14, 3, 0),\n", - " datetime.datetime(2018, 4, 14, 4, 0),\n", - " datetime.datetime(2018, 4, 14, 5, 0),\n", - " datetime.datetime(2018, 4, 14, 6, 0),\n", - " datetime.datetime(2018, 4, 14, 7, 0),\n", - " datetime.datetime(2018, 4, 14, 8, 0),\n", - " datetime.datetime(2018, 4, 14, 9, 0),\n", - " datetime.datetime(2018, 4, 14, 10, 0),\n", - " datetime.datetime(2018, 4, 14, 11, 0),\n", - " datetime.datetime(2018, 4, 14, 12, 0),\n", - " datetime.datetime(2018, 4, 14, 13, 0),\n", - " datetime.datetime(2018, 4, 14, 14, 0),\n", - " datetime.datetime(2018, 4, 14, 15, 0),\n", - " datetime.datetime(2018, 4, 14, 16, 0),\n", - " datetime.datetime(2018, 4, 14, 17, 0),\n", - " datetime.datetime(2018, 4, 14, 18, 0),\n", - " datetime.datetime(2018, 4, 14, 19, 0),\n", - " datetime.datetime(2018, 4, 14, 20, 0),\n", - " datetime.datetime(2018, 4, 14, 21, 0),\n", - " datetime.datetime(2018, 4, 14, 22, 0),\n", - " datetime.datetime(2018, 4, 14, 23, 0),\n", - " datetime.datetime(2018, 4, 15, 0, 0),\n", - " datetime.datetime(2018, 4, 15, 1, 0),\n", - " datetime.datetime(2018, 4, 15, 2, 0),\n", - " datetime.datetime(2018, 4, 15, 3, 0),\n", - " datetime.datetime(2018, 4, 15, 4, 0),\n", - " datetime.datetime(2018, 4, 15, 5, 0),\n", - " datetime.datetime(2018, 4, 15, 6, 0),\n", - " datetime.datetime(2018, 4, 15, 7, 0),\n", - " datetime.datetime(2018, 4, 15, 8, 0),\n", - " datetime.datetime(2018, 4, 15, 9, 0),\n", - " datetime.datetime(2018, 4, 15, 10, 0),\n", - " datetime.datetime(2018, 4, 15, 11, 0),\n", - " datetime.datetime(2018, 4, 15, 12, 0),\n", - " datetime.datetime(2018, 4, 15, 13, 0),\n", - " datetime.datetime(2018, 4, 15, 14, 0),\n", - " datetime.datetime(2018, 4, 15, 15, 0),\n", - " datetime.datetime(2018, 4, 15, 16, 0),\n", - " datetime.datetime(2018, 4, 15, 17, 0),\n", - " datetime.datetime(2018, 4, 15, 18, 0),\n", - " datetime.datetime(2018, 4, 15, 19, 0),\n", - " datetime.datetime(2018, 4, 15, 20, 0),\n", - " datetime.datetime(2018, 4, 15, 21, 0),\n", - " datetime.datetime(2018, 4, 15, 22, 0),\n", - " datetime.datetime(2018, 4, 15, 23, 0),\n", - " datetime.datetime(2018, 4, 16, 0, 0),\n", - " datetime.datetime(2018, 4, 16, 1, 0),\n", - " datetime.datetime(2018, 4, 16, 2, 0),\n", - " datetime.datetime(2018, 4, 16, 3, 0),\n", - " datetime.datetime(2018, 4, 16, 4, 0),\n", - " datetime.datetime(2018, 4, 16, 5, 0),\n", - " datetime.datetime(2018, 4, 16, 6, 0),\n", - " datetime.datetime(2018, 4, 16, 7, 0),\n", - " datetime.datetime(2018, 4, 16, 8, 0),\n", - " datetime.datetime(2018, 4, 16, 9, 0),\n", - " datetime.datetime(2018, 4, 16, 10, 0),\n", - " datetime.datetime(2018, 4, 16, 11, 0),\n", - " datetime.datetime(2018, 4, 16, 12, 0),\n", - " datetime.datetime(2018, 4, 16, 13, 0),\n", - " datetime.datetime(2018, 4, 16, 14, 0),\n", - " datetime.datetime(2018, 4, 16, 15, 0),\n", - " datetime.datetime(2018, 4, 16, 16, 0),\n", - " datetime.datetime(2018, 4, 16, 17, 0),\n", - " datetime.datetime(2018, 4, 16, 18, 0),\n", - " datetime.datetime(2018, 4, 16, 19, 0),\n", - " datetime.datetime(2018, 4, 16, 20, 0),\n", - " datetime.datetime(2018, 4, 16, 21, 0),\n", - " datetime.datetime(2018, 4, 16, 22, 0),\n", - " datetime.datetime(2018, 4, 16, 23, 0),\n", - " datetime.datetime(2018, 4, 17, 0, 0),\n", - " datetime.datetime(2018, 4, 17, 1, 0),\n", - " datetime.datetime(2018, 4, 17, 2, 0),\n", - " datetime.datetime(2018, 4, 17, 3, 0),\n", - " datetime.datetime(2018, 4, 17, 4, 0),\n", - " datetime.datetime(2018, 4, 17, 5, 0),\n", - " datetime.datetime(2018, 4, 17, 6, 0),\n", - " datetime.datetime(2018, 4, 17, 7, 0),\n", - " datetime.datetime(2018, 4, 17, 8, 0),\n", - " datetime.datetime(2018, 4, 17, 9, 0),\n", - " datetime.datetime(2018, 4, 17, 10, 0),\n", - " datetime.datetime(2018, 4, 17, 11, 0),\n", - " datetime.datetime(2018, 4, 17, 12, 0),\n", - " datetime.datetime(2018, 4, 17, 13, 0),\n", - " datetime.datetime(2018, 4, 17, 14, 0),\n", - " datetime.datetime(2018, 4, 17, 15, 0),\n", - " datetime.datetime(2018, 4, 17, 16, 0),\n", - " datetime.datetime(2018, 4, 17, 17, 0),\n", - " datetime.datetime(2018, 4, 17, 18, 0),\n", - " datetime.datetime(2018, 4, 17, 19, 0),\n", - " datetime.datetime(2018, 4, 17, 20, 0),\n", - " datetime.datetime(2018, 4, 17, 21, 0),\n", - " datetime.datetime(2018, 4, 17, 22, 0),\n", - " datetime.datetime(2018, 4, 17, 23, 0),\n", - " datetime.datetime(2018, 4, 18, 0, 0),\n", - " datetime.datetime(2018, 4, 18, 1, 0),\n", - " datetime.datetime(2018, 4, 18, 2, 0),\n", - " datetime.datetime(2018, 4, 18, 3, 0),\n", - " datetime.datetime(2018, 4, 18, 4, 0),\n", - " datetime.datetime(2018, 4, 18, 5, 0),\n", - " datetime.datetime(2018, 4, 18, 6, 0),\n", - " datetime.datetime(2018, 4, 18, 7, 0),\n", - " datetime.datetime(2018, 4, 18, 8, 0),\n", - " datetime.datetime(2018, 4, 18, 9, 0),\n", - " datetime.datetime(2018, 4, 18, 10, 0),\n", - " datetime.datetime(2018, 4, 18, 11, 0),\n", - " datetime.datetime(2018, 4, 18, 12, 0),\n", - " datetime.datetime(2018, 4, 18, 13, 0),\n", - " datetime.datetime(2018, 4, 18, 14, 0),\n", - " datetime.datetime(2018, 4, 18, 15, 0),\n", - " datetime.datetime(2018, 4, 18, 16, 0),\n", - " datetime.datetime(2018, 4, 18, 17, 0),\n", - " datetime.datetime(2018, 4, 18, 18, 0),\n", - " datetime.datetime(2018, 4, 18, 19, 0),\n", - " datetime.datetime(2018, 4, 18, 20, 0),\n", - " datetime.datetime(2018, 4, 18, 21, 0),\n", - " datetime.datetime(2018, 4, 18, 22, 0),\n", - " datetime.datetime(2018, 4, 18, 23, 0),\n", - " datetime.datetime(2018, 4, 19, 0, 0),\n", - " datetime.datetime(2018, 4, 19, 1, 0),\n", - " datetime.datetime(2018, 4, 19, 2, 0),\n", - " datetime.datetime(2018, 4, 19, 3, 0),\n", - " datetime.datetime(2018, 4, 19, 4, 0),\n", - " datetime.datetime(2018, 4, 19, 5, 0),\n", - " datetime.datetime(2018, 4, 19, 6, 0),\n", - " datetime.datetime(2018, 4, 19, 7, 0),\n", - " datetime.datetime(2018, 4, 19, 8, 0),\n", - " datetime.datetime(2018, 4, 19, 9, 0),\n", - " datetime.datetime(2018, 4, 19, 10, 0),\n", - " datetime.datetime(2018, 4, 19, 11, 0),\n", - " datetime.datetime(2018, 4, 19, 12, 0),\n", - " datetime.datetime(2018, 4, 19, 13, 0),\n", - " datetime.datetime(2018, 4, 19, 14, 0),\n", - " datetime.datetime(2018, 4, 19, 15, 0),\n", - " datetime.datetime(2018, 4, 19, 16, 0),\n", - " datetime.datetime(2018, 4, 19, 17, 0),\n", - " datetime.datetime(2018, 4, 19, 18, 0),\n", - " datetime.datetime(2018, 4, 19, 19, 0),\n", - " datetime.datetime(2018, 4, 19, 20, 0),\n", - " datetime.datetime(2018, 4, 19, 21, 0),\n", - " datetime.datetime(2018, 4, 19, 22, 0),\n", - " datetime.datetime(2018, 4, 19, 23, 0),\n", - " datetime.datetime(2018, 4, 20, 0, 0),\n", - " datetime.datetime(2018, 4, 20, 1, 0),\n", - " datetime.datetime(2018, 4, 20, 2, 0),\n", - " datetime.datetime(2018, 4, 20, 3, 0),\n", - " datetime.datetime(2018, 4, 20, 4, 0),\n", - " datetime.datetime(2018, 4, 20, 5, 0),\n", - " datetime.datetime(2018, 4, 20, 6, 0),\n", - " datetime.datetime(2018, 4, 20, 7, 0),\n", - " datetime.datetime(2018, 4, 20, 8, 0),\n", - " datetime.datetime(2018, 4, 20, 9, 0),\n", - " datetime.datetime(2018, 4, 20, 10, 0),\n", - " datetime.datetime(2018, 4, 20, 11, 0),\n", - " datetime.datetime(2018, 4, 20, 12, 0),\n", - " datetime.datetime(2018, 4, 20, 13, 0),\n", - " datetime.datetime(2018, 4, 20, 14, 0),\n", - " datetime.datetime(2018, 4, 20, 15, 0),\n", - " datetime.datetime(2018, 4, 20, 16, 0),\n", - " datetime.datetime(2018, 4, 20, 17, 0),\n", - " datetime.datetime(2018, 4, 20, 18, 0),\n", - " datetime.datetime(2018, 4, 20, 19, 0),\n", - " datetime.datetime(2018, 4, 20, 20, 0),\n", - " datetime.datetime(2018, 4, 20, 21, 0),\n", - " datetime.datetime(2018, 4, 20, 22, 0),\n", - " datetime.datetime(2018, 4, 20, 23, 0),\n", - " datetime.datetime(2018, 4, 21, 0, 0),\n", - " datetime.datetime(2018, 4, 21, 1, 0),\n", - " datetime.datetime(2018, 4, 21, 2, 0),\n", - " datetime.datetime(2018, 4, 21, 3, 0),\n", - " datetime.datetime(2018, 4, 21, 4, 0),\n", - " datetime.datetime(2018, 4, 21, 5, 0),\n", - " datetime.datetime(2018, 4, 21, 6, 0),\n", - " datetime.datetime(2018, 4, 21, 7, 0),\n", - " datetime.datetime(2018, 4, 21, 8, 0),\n", - " datetime.datetime(2018, 4, 21, 9, 0),\n", - " datetime.datetime(2018, 4, 21, 10, 0),\n", - " datetime.datetime(2018, 4, 21, 11, 0),\n", - " datetime.datetime(2018, 4, 21, 12, 0),\n", - " datetime.datetime(2018, 4, 21, 13, 0),\n", - " datetime.datetime(2018, 4, 21, 14, 0),\n", - " datetime.datetime(2018, 4, 21, 15, 0),\n", - " datetime.datetime(2018, 4, 21, 16, 0),\n", - " datetime.datetime(2018, 4, 21, 17, 0),\n", - " datetime.datetime(2018, 4, 21, 18, 0),\n", - " datetime.datetime(2018, 4, 21, 19, 0),\n", - " datetime.datetime(2018, 4, 21, 20, 0),\n", - " datetime.datetime(2018, 4, 21, 21, 0),\n", - " datetime.datetime(2018, 4, 21, 22, 0),\n", - " datetime.datetime(2018, 4, 21, 23, 0),\n", - " datetime.datetime(2018, 4, 22, 0, 0),\n", - " datetime.datetime(2018, 4, 22, 1, 0),\n", - " datetime.datetime(2018, 4, 22, 2, 0),\n", - " datetime.datetime(2018, 4, 22, 3, 0),\n", - " datetime.datetime(2018, 4, 22, 4, 0),\n", - " datetime.datetime(2018, 4, 22, 5, 0),\n", - " datetime.datetime(2018, 4, 22, 6, 0),\n", - " datetime.datetime(2018, 4, 22, 7, 0),\n", - " datetime.datetime(2018, 4, 22, 8, 0),\n", - " datetime.datetime(2018, 4, 22, 9, 0),\n", - " datetime.datetime(2018, 4, 22, 10, 0),\n", - " datetime.datetime(2018, 4, 22, 11, 0),\n", - " datetime.datetime(2018, 4, 22, 12, 0),\n", - " datetime.datetime(2018, 4, 22, 13, 0),\n", - " datetime.datetime(2018, 4, 22, 14, 0),\n", - " datetime.datetime(2018, 4, 22, 15, 0),\n", - " datetime.datetime(2018, 4, 22, 16, 0),\n", - " datetime.datetime(2018, 4, 22, 17, 0),\n", - " datetime.datetime(2018, 4, 22, 18, 0),\n", - " datetime.datetime(2018, 4, 22, 19, 0),\n", - " datetime.datetime(2018, 4, 22, 20, 0),\n", - " datetime.datetime(2018, 4, 22, 21, 0),\n", - " datetime.datetime(2018, 4, 22, 22, 0),\n", - " datetime.datetime(2018, 4, 22, 23, 0),\n", - " datetime.datetime(2018, 4, 23, 0, 0),\n", - " datetime.datetime(2018, 4, 23, 1, 0),\n", - " datetime.datetime(2018, 4, 23, 2, 0),\n", - " datetime.datetime(2018, 4, 23, 3, 0),\n", - " datetime.datetime(2018, 4, 23, 4, 0),\n", - " datetime.datetime(2018, 4, 23, 5, 0),\n", - " datetime.datetime(2018, 4, 23, 6, 0),\n", - " datetime.datetime(2018, 4, 23, 7, 0),\n", - " datetime.datetime(2018, 4, 23, 8, 0),\n", - " datetime.datetime(2018, 4, 23, 9, 0),\n", - " datetime.datetime(2018, 4, 23, 10, 0),\n", - " datetime.datetime(2018, 4, 23, 11, 0),\n", - " datetime.datetime(2018, 4, 23, 12, 0),\n", - " datetime.datetime(2018, 4, 23, 13, 0),\n", - " datetime.datetime(2018, 4, 23, 14, 0),\n", - " datetime.datetime(2018, 4, 23, 15, 0),\n", - " datetime.datetime(2018, 4, 23, 16, 0),\n", - " datetime.datetime(2018, 4, 23, 17, 0),\n", - " datetime.datetime(2018, 4, 23, 18, 0),\n", - " datetime.datetime(2018, 4, 23, 19, 0),\n", - " datetime.datetime(2018, 4, 23, 20, 0),\n", - " datetime.datetime(2018, 4, 23, 21, 0),\n", - " datetime.datetime(2018, 4, 23, 22, 0),\n", - " datetime.datetime(2018, 4, 23, 23, 0),\n", - " datetime.datetime(2018, 4, 24, 0, 0),\n", - " datetime.datetime(2018, 4, 24, 1, 0),\n", - " datetime.datetime(2018, 4, 24, 2, 0),\n", - " datetime.datetime(2018, 4, 24, 3, 0),\n", - " datetime.datetime(2018, 4, 24, 4, 0),\n", - " datetime.datetime(2018, 4, 24, 5, 0),\n", - " datetime.datetime(2018, 4, 24, 6, 0),\n", - " datetime.datetime(2018, 4, 24, 7, 0),\n", - " datetime.datetime(2018, 4, 24, 8, 0),\n", - " datetime.datetime(2018, 4, 24, 9, 0),\n", - " datetime.datetime(2018, 4, 24, 10, 0),\n", - " datetime.datetime(2018, 4, 24, 11, 0),\n", - " datetime.datetime(2018, 4, 24, 12, 0),\n", - " datetime.datetime(2018, 4, 24, 13, 0),\n", - " datetime.datetime(2018, 4, 24, 14, 0),\n", - " datetime.datetime(2018, 4, 24, 15, 0),\n", - " datetime.datetime(2018, 4, 24, 16, 0),\n", - " datetime.datetime(2018, 4, 24, 17, 0),\n", - " datetime.datetime(2018, 4, 24, 18, 0),\n", - " datetime.datetime(2018, 4, 24, 19, 0),\n", - " datetime.datetime(2018, 4, 24, 20, 0),\n", - " datetime.datetime(2018, 4, 24, 21, 0),\n", - " datetime.datetime(2018, 4, 24, 22, 0),\n", - " datetime.datetime(2018, 4, 24, 23, 0),\n", - " datetime.datetime(2018, 4, 25, 0, 0),\n", - " datetime.datetime(2018, 4, 25, 1, 0),\n", - " datetime.datetime(2018, 4, 25, 2, 0),\n", - " datetime.datetime(2018, 4, 25, 3, 0),\n", - " datetime.datetime(2018, 4, 25, 4, 0),\n", - " datetime.datetime(2018, 4, 25, 5, 0),\n", - " datetime.datetime(2018, 4, 25, 6, 0),\n", - " datetime.datetime(2018, 4, 25, 7, 0),\n", - " datetime.datetime(2018, 4, 25, 8, 0),\n", - " datetime.datetime(2018, 4, 25, 9, 0),\n", - " datetime.datetime(2018, 4, 25, 10, 0),\n", - " datetime.datetime(2018, 4, 25, 11, 0),\n", - " datetime.datetime(2018, 4, 25, 12, 0),\n", - " datetime.datetime(2018, 4, 25, 13, 0),\n", - " datetime.datetime(2018, 4, 25, 14, 0),\n", - " datetime.datetime(2018, 4, 25, 15, 0),\n", - " datetime.datetime(2018, 4, 25, 16, 0),\n", - " datetime.datetime(2018, 4, 25, 17, 0),\n", - " datetime.datetime(2018, 4, 25, 18, 0),\n", - " datetime.datetime(2018, 4, 25, 19, 0),\n", - " datetime.datetime(2018, 4, 25, 20, 0),\n", - " datetime.datetime(2018, 4, 25, 21, 0),\n", - " datetime.datetime(2018, 4, 25, 22, 0),\n", - " datetime.datetime(2018, 4, 25, 23, 0),\n", - " datetime.datetime(2018, 4, 26, 0, 0),\n", - " datetime.datetime(2018, 4, 26, 1, 0),\n", - " datetime.datetime(2018, 4, 26, 2, 0),\n", - " datetime.datetime(2018, 4, 26, 3, 0),\n", - " datetime.datetime(2018, 4, 26, 4, 0),\n", - " datetime.datetime(2018, 4, 26, 5, 0),\n", - " datetime.datetime(2018, 4, 26, 6, 0),\n", - " datetime.datetime(2018, 4, 26, 7, 0),\n", - " datetime.datetime(2018, 4, 26, 8, 0),\n", - " datetime.datetime(2018, 4, 26, 9, 0),\n", - " datetime.datetime(2018, 4, 26, 10, 0),\n", - " datetime.datetime(2018, 4, 26, 11, 0),\n", - " datetime.datetime(2018, 4, 26, 12, 0),\n", - " datetime.datetime(2018, 4, 26, 13, 0),\n", - " datetime.datetime(2018, 4, 26, 14, 0),\n", - " datetime.datetime(2018, 4, 26, 15, 0),\n", - " datetime.datetime(2018, 4, 26, 16, 0),\n", - " datetime.datetime(2018, 4, 26, 17, 0),\n", - " datetime.datetime(2018, 4, 26, 18, 0),\n", - " datetime.datetime(2018, 4, 26, 19, 0),\n", - " datetime.datetime(2018, 4, 26, 20, 0),\n", - " datetime.datetime(2018, 4, 26, 21, 0),\n", - " datetime.datetime(2018, 4, 26, 22, 0),\n", - " datetime.datetime(2018, 4, 26, 23, 0),\n", - " datetime.datetime(2018, 4, 27, 0, 0),\n", - " datetime.datetime(2018, 4, 27, 1, 0),\n", - " datetime.datetime(2018, 4, 27, 2, 0),\n", - " datetime.datetime(2018, 4, 27, 3, 0),\n", - " datetime.datetime(2018, 4, 27, 4, 0),\n", - " datetime.datetime(2018, 4, 27, 5, 0),\n", - " datetime.datetime(2018, 4, 27, 6, 0),\n", - " datetime.datetime(2018, 4, 27, 7, 0),\n", - " datetime.datetime(2018, 4, 27, 8, 0),\n", - " datetime.datetime(2018, 4, 27, 9, 0),\n", - " datetime.datetime(2018, 4, 27, 10, 0),\n", - " datetime.datetime(2018, 4, 27, 11, 0),\n", - " datetime.datetime(2018, 4, 27, 12, 0),\n", - " datetime.datetime(2018, 4, 27, 13, 0),\n", - " datetime.datetime(2018, 4, 27, 14, 0),\n", - " datetime.datetime(2018, 4, 27, 15, 0),\n", - " datetime.datetime(2018, 4, 27, 16, 0),\n", - " datetime.datetime(2018, 4, 27, 17, 0),\n", - " datetime.datetime(2018, 4, 27, 18, 0),\n", - " datetime.datetime(2018, 4, 27, 19, 0),\n", - " datetime.datetime(2018, 4, 27, 20, 0),\n", - " datetime.datetime(2018, 4, 27, 21, 0),\n", - " datetime.datetime(2018, 4, 27, 22, 0),\n", - " datetime.datetime(2018, 4, 27, 23, 0),\n", - " datetime.datetime(2018, 4, 28, 0, 0),\n", - " datetime.datetime(2018, 4, 28, 1, 0),\n", - " datetime.datetime(2018, 4, 28, 2, 0),\n", - " datetime.datetime(2018, 4, 28, 3, 0),\n", - " datetime.datetime(2018, 4, 28, 4, 0),\n", - " datetime.datetime(2018, 4, 28, 5, 0),\n", - " datetime.datetime(2018, 4, 28, 6, 0),\n", - " datetime.datetime(2018, 4, 28, 7, 0),\n", - " datetime.datetime(2018, 4, 28, 8, 0),\n", - " datetime.datetime(2018, 4, 28, 9, 0),\n", - " datetime.datetime(2018, 4, 28, 10, 0),\n", - " datetime.datetime(2018, 4, 28, 11, 0),\n", - " datetime.datetime(2018, 4, 28, 12, 0),\n", - " datetime.datetime(2018, 4, 28, 13, 0),\n", - " datetime.datetime(2018, 4, 28, 14, 0),\n", - " datetime.datetime(2018, 4, 28, 15, 0),\n", - " datetime.datetime(2018, 4, 28, 16, 0),\n", - " datetime.datetime(2018, 4, 28, 17, 0),\n", - " datetime.datetime(2018, 4, 28, 18, 0),\n", - " datetime.datetime(2018, 4, 28, 19, 0),\n", - " datetime.datetime(2018, 4, 28, 20, 0),\n", - " datetime.datetime(2018, 4, 28, 21, 0),\n", - " datetime.datetime(2018, 4, 28, 22, 0),\n", - " datetime.datetime(2018, 4, 28, 23, 0),\n", - " datetime.datetime(2018, 4, 29, 0, 0),\n", - " datetime.datetime(2018, 4, 29, 1, 0),\n", - " datetime.datetime(2018, 4, 29, 2, 0),\n", - " datetime.datetime(2018, 4, 29, 3, 0),\n", - " datetime.datetime(2018, 4, 29, 4, 0),\n", - " datetime.datetime(2018, 4, 29, 5, 0),\n", - " datetime.datetime(2018, 4, 29, 6, 0),\n", - " datetime.datetime(2018, 4, 29, 7, 0),\n", - " datetime.datetime(2018, 4, 29, 8, 0),\n", - " datetime.datetime(2018, 4, 29, 9, 0),\n", - " datetime.datetime(2018, 4, 29, 10, 0),\n", - " datetime.datetime(2018, 4, 29, 11, 0),\n", - " datetime.datetime(2018, 4, 29, 12, 0),\n", - " datetime.datetime(2018, 4, 29, 13, 0),\n", - " datetime.datetime(2018, 4, 29, 14, 0),\n", - " datetime.datetime(2018, 4, 29, 15, 0),\n", - " datetime.datetime(2018, 4, 29, 16, 0),\n", - " datetime.datetime(2018, 4, 29, 17, 0),\n", - " datetime.datetime(2018, 4, 29, 18, 0),\n", - " datetime.datetime(2018, 4, 29, 19, 0),\n", - " datetime.datetime(2018, 4, 29, 20, 0),\n", - " datetime.datetime(2018, 4, 29, 21, 0),\n", - " datetime.datetime(2018, 4, 29, 22, 0),\n", - " datetime.datetime(2018, 4, 29, 23, 0),\n", - " datetime.datetime(2018, 4, 30, 0, 0),\n", - " datetime.datetime(2018, 4, 30, 1, 0),\n", - " datetime.datetime(2018, 4, 30, 2, 0),\n", - " datetime.datetime(2018, 4, 30, 3, 0),\n", - " datetime.datetime(2018, 4, 30, 4, 0),\n", - " datetime.datetime(2018, 4, 30, 5, 0),\n", - " datetime.datetime(2018, 4, 30, 6, 0),\n", - " datetime.datetime(2018, 4, 30, 7, 0),\n", - " datetime.datetime(2018, 4, 30, 8, 0),\n", - " datetime.datetime(2018, 4, 30, 9, 0),\n", - " datetime.datetime(2018, 4, 30, 10, 0),\n", - " datetime.datetime(2018, 4, 30, 11, 0),\n", - " datetime.datetime(2018, 4, 30, 12, 0),\n", - " datetime.datetime(2018, 4, 30, 13, 0),\n", - " datetime.datetime(2018, 4, 30, 14, 0),\n", - " datetime.datetime(2018, 4, 30, 15, 0),\n", - " datetime.datetime(2018, 4, 30, 16, 0),\n", - " datetime.datetime(2018, 4, 30, 17, 0),\n", - " datetime.datetime(2018, 4, 30, 18, 0),\n", - " datetime.datetime(2018, 4, 30, 19, 0),\n", - " datetime.datetime(2018, 4, 30, 20, 0),\n", - " datetime.datetime(2018, 4, 30, 21, 0),\n", - " datetime.datetime(2018, 4, 30, 22, 0),\n", - " datetime.datetime(2018, 4, 30, 23, 0)]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_1.time" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': array([0]), 'units': ''}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_1.lev" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", - " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", - " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", - " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", - " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", - " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", - " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", - " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", - " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", - " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", - " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", - " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", - " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", - " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", - " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", - " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", - " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", - " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", - " 43.616667 , 47.3 , 46.65 , 45. ,\n", - " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", - " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", - " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", - " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", - " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", - " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", - " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", - " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", - " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", - " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", - " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", - " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", - " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", - " 78.90715 , 59. , 69.45 , 59.2 ,\n", - " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", - " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", - " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", - " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", - " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", - " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", - " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", - " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", - " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", - " mask=False,\n", - " fill_value=1e+20),\n", - " 'dimensions': ('station',),\n", - " 'standard_name': 'latitude',\n", - " 'long_name': 'latitude',\n", - " 'units': 'decimal degrees North',\n", - " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", - " 'axis': 'Y'}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_1.lat" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", - " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", - " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", - " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", - " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", - " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", - " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", - " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", - " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", - " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", - " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", - " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", - " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", - " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", - " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", - " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", - " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", - " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", - " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", - " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", - " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", - " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", - " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", - " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", - " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", - " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", - " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", - " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", - " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", - " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", - " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", - " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", - " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", - " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", - " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", - " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", - " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", - " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", - " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", - " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", - " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", - " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", - " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", - " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", - " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", - " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", - " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", - " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", - " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", - " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", - " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", - " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", - " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", - " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", - " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", - " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", - " mask=False,\n", - " fill_value=1e+20),\n", - " 'dimensions': ('station',),\n", - " 'standard_name': 'longitude',\n", - " 'long_name': 'longitude',\n", - " 'units': 'decimal degrees East',\n", - " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", - " 'axis': 'X'}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_1.lon" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Loading ASTER_v3_altitude var (1/175)\n", - "Rank 000: Loaded ASTER_v3_altitude var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((168,))\n", - "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", - "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((168,))\n", - "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/175)\n", - "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((168,))\n", - "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/175)\n", - "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((168,))\n", - "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", - "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((168,))\n", - "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", - "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((168,))\n", - "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", - "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((168,))\n", - "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", - "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((168,))\n", - "Rank 000: Loading ETOPO1_altitude var (18/175)\n", - "Rank 000: Loaded ETOPO1_altitude var ((168,))\n", - "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/175)\n", - "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((168,))\n", - "Rank 000: Loading GHOST_version var (20/175)\n", - "Rank 000: Loaded GHOST_version var ((168,))\n", - "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/175)\n", - "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((168,))\n", - "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/175)\n", - "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((168,))\n", - "Rank 000: Loading GHSL_average_population_density_25km var (23/175)\n", - "Rank 000: Loaded GHSL_average_population_density_25km var ((168,))\n", - "Rank 000: Loading GHSL_average_population_density_5km var (24/175)\n", - "Rank 000: Loaded GHSL_average_population_density_5km var ((168,))\n", - "Rank 000: Loading GHSL_built_up_area_density var (25/175)\n", - "Rank 000: Loaded GHSL_built_up_area_density var ((168,))\n", - "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/175)\n", - "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((168,))\n", - "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/175)\n", - "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((168,))\n", - "Rank 000: Loading GHSL_max_population_density_25km var (28/175)\n", - "Rank 000: Loaded GHSL_max_population_density_25km var ((168,))\n", - "Rank 000: Loading GHSL_max_population_density_5km var (29/175)\n", - "Rank 000: Loaded GHSL_max_population_density_5km var ((168,))\n", - "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/175)\n", - "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((168,))\n", - "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/175)\n", - "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((168,))\n", - "Rank 000: Loading GHSL_population_density var (32/175)\n", - "Rank 000: Loaded GHSL_population_density var ((168,))\n", - "Rank 000: Loading GHSL_settlement_model_classification var (33/175)\n", - "Rank 000: Loaded GHSL_settlement_model_classification var ((168,))\n", - "Rank 000: Loading GPW_average_population_density_25km var (34/175)\n", - "Rank 000: Loaded GPW_average_population_density_25km var ((168,))\n", - "Rank 000: Loading GPW_average_population_density_5km var (35/175)\n", - "Rank 000: Loaded GPW_average_population_density_5km var ((168,))\n", - "Rank 000: Loading GPW_max_population_density_25km var (36/175)\n", - "Rank 000: Loaded GPW_max_population_density_25km var ((168,))\n", - "Rank 000: Loading GPW_max_population_density_5km var (37/175)\n", - "Rank 000: Loaded GPW_max_population_density_5km var ((168,))\n", - "Rank 000: Loading GPW_population_density var (38/175)\n", - "Rank 000: Loaded GPW_population_density var ((168,))\n", - "Rank 000: Loading GSFC_coastline_proximity var (39/175)\n", - "Rank 000: Loaded GSFC_coastline_proximity var ((168,))\n", - "Rank 000: Loading Joly-Peuch_classification_code var (40/175)\n", - "Rank 000: Loaded Joly-Peuch_classification_code var ((168,))\n", - "Rank 000: Loading Koppen-Geiger_classification var (41/175)\n", - "Rank 000: Loaded Koppen-Geiger_classification var ((168,))\n", - "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/175)\n", - "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((168,))\n", - "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/175)\n", - "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((168,))\n", - "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", - "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((168,))\n", - "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", - "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((168,))\n", - "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", - "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((168,))\n", - "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", - "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((168,))\n", - "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", - "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((168,))\n", - "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", - "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((168,))\n", - "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/175)\n", - "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((168,))\n", - "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", - "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((168,))\n", - "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", - "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((168,))\n", - "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", - "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((168,))\n", - "Rank 000: Loading UMBC_anthrome_classification var (62/175)\n", - "Rank 000: Loaded UMBC_anthrome_classification var ((168,))\n", - "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/175)\n", - "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((168,))\n", - "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/175)\n", - "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((168,))\n", - "Rank 000: Loading WMO_region var (65/175)\n", - "Rank 000: Loaded WMO_region var ((168,))\n", - "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/175)\n", - "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((168,))\n", - "Rank 000: Loading WWF_TEOW_biome var (67/175)\n", - "Rank 000: Loaded WWF_TEOW_biome var ((168,))\n", - "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/175)\n", - "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((168,))\n", - "Rank 000: Loading administrative_country_division_1 var (69/175)\n", - "Rank 000: Loaded administrative_country_division_1 var ((168,))\n", - "Rank 000: Loading administrative_country_division_2 var (70/175)\n", - "Rank 000: Loaded administrative_country_division_2 var ((168,))\n", - "Rank 000: Loading altitude var (71/175)\n", - "Rank 000: Loaded altitude var ((168,))\n", - "Rank 000: Loading annual_native_max_gap_percent var (72/175)\n", - "Rank 000: Loaded annual_native_max_gap_percent var ((168, 720))\n", - "Rank 000: Loading annual_native_representativity_percent var (73/175)\n", - "Rank 000: Loaded annual_native_representativity_percent var ((168, 720))\n", - "Rank 000: Loading area_classification var (74/175)\n", - "Rank 000: Loaded area_classification var ((168,))\n", - "Rank 000: Loading associated_networks var (75/175)\n", - "Rank 000: Loaded associated_networks var ((168,))\n", - "Rank 000: Loading city var (76/175)\n", - "Rank 000: Loaded city var ((168,))\n", - "Rank 000: Loading climatology var (77/175)\n", - "Rank 000: Loaded climatology var ((168,))\n", - "Rank 000: Loading contact_email_address var (78/175)\n", - "Rank 000: Loaded contact_email_address var ((168,))\n", - "Rank 000: Loading contact_institution var (79/175)\n", - "Rank 000: Loaded contact_institution var ((168,))\n", - "Rank 000: Loading contact_name var (80/175)\n", - "Rank 000: Loaded contact_name var ((168,))\n", - "Rank 000: Loading country var (81/175)\n", - "Rank 000: Loaded country var ((168,))\n", - "Rank 000: Loading daily_native_max_gap_percent var (82/175)\n", - "Rank 000: Loaded daily_native_max_gap_percent var ((168, 720))\n", - "Rank 000: Loading daily_native_representativity_percent var (83/175)\n", - "Rank 000: Loaded daily_native_representativity_percent var ((168, 720))\n", - "Rank 000: Loading daily_passing_vehicles var (84/175)\n", - "Rank 000: Loaded daily_passing_vehicles var ((168,))\n", - "Rank 000: Loading data_level var (85/175)\n", - "Rank 000: Loaded data_level var ((168,))\n", - "Rank 000: Loading data_licence var (86/175)\n", - "Rank 000: Loaded data_licence var ((168,))\n", - "Rank 000: Loading day_night_code var (87/175)\n", - "Rank 000: Loaded day_night_code var ((168, 720))\n", - "Rank 000: Loading daytime_traffic_speed var (88/175)\n", - "Rank 000: Loaded daytime_traffic_speed var ((168,))\n", - "Rank 000: Loading derived_uncertainty_per_measurement var (89/175)\n", - "Rank 000: Loaded derived_uncertainty_per_measurement var ((168, 720))\n", - "Rank 000: Loading distance_to_building var (90/175)\n", - "Rank 000: Loaded distance_to_building var ((168,))\n", - "Rank 000: Loading distance_to_junction var (91/175)\n", - "Rank 000: Loaded distance_to_junction var ((168,))\n", - "Rank 000: Loading distance_to_kerb var (92/175)\n", - "Rank 000: Loaded distance_to_kerb var ((168,))\n", - "Rank 000: Loading distance_to_source var (93/175)\n", - "Rank 000: Loaded distance_to_source var ((168,))\n", - "Rank 000: Loading ellipsoid var (94/175)\n", - "Rank 000: Loaded ellipsoid var ((168,))\n", - "Rank 000: Loading horizontal_datum var (95/175)\n", - "Rank 000: Loaded horizontal_datum var ((168,))\n", - "Rank 000: Loading hourly_native_max_gap_percent var (96/175)\n", - "Rank 000: Loaded hourly_native_max_gap_percent var ((168, 720))\n", - "Rank 000: Loading hourly_native_representativity_percent var (97/175)\n", - "Rank 000: Loaded hourly_native_representativity_percent var ((168, 720))\n", - "Rank 000: Loading land_use var (98/175)\n", - "Rank 000: Loaded land_use var ((168,))\n", - "Rank 000: Loading local_time var (99/175)\n", - "Rank 000: Loaded local_time var ((168, 720))\n", - "Rank 000: Loading main_emission_source var (100/175)\n", - "Rank 000: Loaded main_emission_source var ((168,))\n", - "Rank 000: Loading mean_solar_time var (101/175)\n", - "Rank 000: Loaded mean_solar_time var ((168, 720))\n", - "Rank 000: Loading measurement_altitude var (102/175)\n", - "Rank 000: Loaded measurement_altitude var ((168,))\n", - "Rank 000: Loading measurement_methodology var (103/175)\n", - "Rank 000: Loaded measurement_methodology var ((168,))\n", - "Rank 000: Loading measurement_scale var (104/175)\n", - "Rank 000: Loaded measurement_scale var ((168,))\n", - "Rank 000: Loading measuring_instrument_calibration_scale var (105/175)\n", - "Rank 000: Loaded measuring_instrument_calibration_scale var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (106/175)\n", - "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_accuracy var (107/175)\n", - "Rank 000: Loaded measuring_instrument_documented_accuracy var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_flow_rate var (108/175)\n", - "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", - "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (110/175)\n", - "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_precision var (111/175)\n", - "Rank 000: Loaded measuring_instrument_documented_precision var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_span_drift var (112/175)\n", - "Rank 000: Loaded measuring_instrument_documented_span_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_uncertainty var (113/175)\n", - "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", - "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_zero_drift var (115/175)\n", - "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_documented_zonal_drift var (116/175)\n", - "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_further_details var (117/175)\n", - "Rank 000: Loaded measuring_instrument_further_details var ((168,))\n", - "Rank 000: Loading measuring_instrument_inlet_information var (118/175)\n", - "Rank 000: Loaded measuring_instrument_inlet_information var ((168,))\n", - "Rank 000: Loading measuring_instrument_manual_name var (119/175)\n", - "Rank 000: Loaded measuring_instrument_manual_name var ((168,))\n", - "Rank 000: Loading measuring_instrument_name var (120/175)\n", - "Rank 000: Loaded measuring_instrument_name var ((168,))\n", - "Rank 000: Loading measuring_instrument_process_details var (121/175)\n", - "Rank 000: Loaded measuring_instrument_process_details var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (122/175)\n", - "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_accuracy var (123/175)\n", - "Rank 000: Loaded measuring_instrument_reported_accuracy var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_flow_rate var (124/175)\n", - "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", - "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (126/175)\n", - "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_precision var (127/175)\n", - "Rank 000: Loaded measuring_instrument_reported_precision var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_span_drift var (128/175)\n", - "Rank 000: Loaded measuring_instrument_reported_span_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_uncertainty var (129/175)\n", - "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_units var (130/175)\n", - "Rank 000: Loaded measuring_instrument_reported_units var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", - "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_zero_drift var (132/175)\n", - "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_reported_zonal_drift var (133/175)\n", - "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((168,))\n", - "Rank 000: Loading measuring_instrument_sampling_type var (134/175)\n", - "Rank 000: Loaded measuring_instrument_sampling_type var ((168,))\n", - "Rank 000: Loading monthly_native_max_gap_percent var (135/175)\n", - "Rank 000: Loaded monthly_native_max_gap_percent var ((168, 720))\n", - "Rank 000: Loading monthly_native_representativity_percent var (136/175)\n", - "Rank 000: Loaded monthly_native_representativity_percent var ((168, 720))\n", - "Rank 000: Loading network var (137/175)\n", - "Rank 000: Loaded network var ((168,))\n", - "Rank 000: Loading network_maintenance_details var (138/175)\n", - "Rank 000: Loaded network_maintenance_details var ((168,))\n", - "Rank 000: Loading network_miscellaneous_details var (139/175)\n", - "Rank 000: Loaded network_miscellaneous_details var ((168,))\n", - "Rank 000: Loading network_provided_volume_standard_pressure var (140/175)\n", - "Rank 000: Loaded network_provided_volume_standard_pressure var ((168,))\n", - "Rank 000: Loading network_provided_volume_standard_temperature var (141/175)\n", - "Rank 000: Loaded network_provided_volume_standard_temperature var ((168,))\n", - "Rank 000: Loading network_qa_details var (142/175)\n", - "Rank 000: Loaded network_qa_details var ((168,))\n", - "Rank 000: Loading network_sampling_details var (143/175)\n", - "Rank 000: Loaded network_sampling_details var ((168,))\n", - "Rank 000: Loading network_uncertainty_details var (144/175)\n", - "Rank 000: Loaded network_uncertainty_details var ((168,))\n", - "Rank 000: Loading population var (145/175)\n", - "Rank 000: Loaded population var ((168,))\n", - "Rank 000: Loading primary_sampling_further_details var (146/175)\n", - "Rank 000: Loaded primary_sampling_further_details var ((168,))\n", - "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (147/175)\n", - "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((168,))\n", - "Rank 000: Loading primary_sampling_instrument_manual_name var (148/175)\n", - "Rank 000: Loaded primary_sampling_instrument_manual_name var ((168,))\n", - "Rank 000: Loading primary_sampling_instrument_name var (149/175)\n", - "Rank 000: Loaded primary_sampling_instrument_name var ((168,))\n", - "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (150/175)\n", - "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((168,))\n", - "Rank 000: Loading primary_sampling_process_details var (151/175)\n", - "Rank 000: Loaded primary_sampling_process_details var ((168,))\n", - "Rank 000: Loading primary_sampling_type var (152/175)\n", - "Rank 000: Loaded primary_sampling_type var ((168,))\n", - "Rank 000: Loading principal_investigator_email_address var (153/175)\n", - "Rank 000: Loaded principal_investigator_email_address var ((168,))\n", - "Rank 000: Loading principal_investigator_institution var (154/175)\n", - "Rank 000: Loaded principal_investigator_institution var ((168,))\n", - "Rank 000: Loading principal_investigator_name var (155/175)\n", - "Rank 000: Loaded principal_investigator_name var ((168,))\n", - "Rank 000: Loading process_warnings var (156/175)\n", - "Rank 000: Loaded process_warnings var ((168,))\n", - "Rank 000: Loading projection var (157/175)\n", - "Rank 000: Loaded projection var ((168,))\n", - "Rank 000: Loading reported_uncertainty_per_measurement var (158/175)\n", - "Rank 000: Loaded reported_uncertainty_per_measurement var ((168, 720))\n", - "Rank 000: Loading representative_radius var (159/175)\n", - "Rank 000: Loaded representative_radius var ((168,))\n", - "Rank 000: Loading sample_preparation_further_details var (160/175)\n", - "Rank 000: Loaded sample_preparation_further_details var ((168,))\n", - "Rank 000: Loading sample_preparation_process_details var (161/175)\n", - "Rank 000: Loaded sample_preparation_process_details var ((168,))\n", - "Rank 000: Loading sample_preparation_techniques var (162/175)\n", - "Rank 000: Loaded sample_preparation_techniques var ((168,))\n", - "Rank 000: Loading sample_preparation_types var (163/175)\n", - "Rank 000: Loaded sample_preparation_types var ((168,))\n", - "Rank 000: Loading sampling_height var (164/175)\n", - "Rank 000: Loaded sampling_height var ((168,))\n", - "Rank 000: Loading sconco3 var (165/175)\n", - "Rank 000: Loaded sconco3 var ((168, 720))\n", - "Rank 000: Loading season_code var (166/175)\n", - "Rank 000: Loaded season_code var ((168, 720))\n", - "Rank 000: Loading station_classification var (167/175)\n", - "Rank 000: Loaded station_classification var ((168,))\n", - "Rank 000: Loading station_name var (168/175)\n", - "Rank 000: Loaded station_name var ((168,))\n", - "Rank 000: Loading station_reference var (169/175)\n", - "Rank 000: Loaded station_reference var ((168,))\n", - "Rank 000: Loading station_timezone var (170/175)\n", - "Rank 000: Loaded station_timezone var ((168,))\n", - "Rank 000: Loading street_type var (171/175)\n", - "Rank 000: Loaded street_type var ((168,))\n", - "Rank 000: Loading street_width var (172/175)\n", - "Rank 000: Loaded street_width var ((168,))\n", - "Rank 000: Loading terrain var (173/175)\n", - "Rank 000: Loaded terrain var ((168,))\n", - "Rank 000: Loading vertical_datum var (174/175)\n", - "Rank 000: Loaded vertical_datum var ((168,))\n", - "Rank 000: Loading weekday_weekend_code var (175/175)\n", - "Rank 000: Loaded weekday_weekend_code var ((168, 720))\n" - ] - } - ], - "source": [ - "nessy_1.load()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Write" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating providentia_obs_file.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n", - "Rank 000: Writing ASTER_v3_altitude var (1/175)\n", - "Rank 000: Var ASTER_v3_altitude created (1/175)\n", - "Rank 000: Var ASTER_v3_altitude data (1/175)\n", - "Rank 000: Var ASTER_v3_altitude completed (1/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/175)\n", - "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/175)\n", - "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/175)\n", - "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/175)\n", - "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/175)\n", - "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/175)\n", - "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/175)\n", - "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/175)\n", - "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/175)\n", - "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/175)\n", - "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/175)\n", - "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/175)\n", - "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/175)\n", - "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/175)\n", - "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/175)\n", - "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/175)\n", - "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/175)\n", - "Rank 000: Writing ETOPO1_altitude var (18/175)\n", - "Rank 000: Var ETOPO1_altitude created (18/175)\n", - "Rank 000: Var ETOPO1_altitude data (18/175)\n", - "Rank 000: Var ETOPO1_altitude completed (18/175)\n", - "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/175)\n", - "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/175)\n", - "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/175)\n", - "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/175)\n", - "Rank 000: Writing GHOST_version var (20/175)\n", - "Rank 000: Var GHOST_version created (20/175)\n", - "Rank 000: Var GHOST_version data (20/175)\n", - "Rank 000: Var GHOST_version completed (20/175)\n", - "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/175)\n", - "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/175)\n", - "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/175)\n", - "Rank 000: Writing GHSL_average_population_density_25km var (23/175)\n", - "Rank 000: Var GHSL_average_population_density_25km created (23/175)\n", - "Rank 000: Var GHSL_average_population_density_25km data (23/175)\n", - "Rank 000: Var GHSL_average_population_density_25km completed (23/175)\n", - "Rank 000: Writing GHSL_average_population_density_5km var (24/175)\n", - "Rank 000: Var GHSL_average_population_density_5km created (24/175)\n", - "Rank 000: Var GHSL_average_population_density_5km data (24/175)\n", - "Rank 000: Var GHSL_average_population_density_5km completed (24/175)\n", - "Rank 000: Writing GHSL_built_up_area_density var (25/175)\n", - "Rank 000: Var GHSL_built_up_area_density created (25/175)\n", - "Rank 000: Var GHSL_built_up_area_density data (25/175)\n", - "Rank 000: Var GHSL_built_up_area_density completed (25/175)\n", - "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/175)\n", - "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/175)\n", - "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/175)\n", - "Rank 000: Writing GHSL_max_population_density_25km var (28/175)\n", - "Rank 000: Var GHSL_max_population_density_25km created (28/175)\n", - "Rank 000: Var GHSL_max_population_density_25km data (28/175)\n", - "Rank 000: Var GHSL_max_population_density_25km completed (28/175)\n", - "Rank 000: Writing GHSL_max_population_density_5km var (29/175)\n", - "Rank 000: Var GHSL_max_population_density_5km created (29/175)\n", - "Rank 000: Var GHSL_max_population_density_5km data (29/175)\n", - "Rank 000: Var GHSL_max_population_density_5km completed (29/175)\n", - "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/175)\n", - "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/175)\n", - "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/175)\n", - "Rank 000: Writing GHSL_population_density var (32/175)\n", - "Rank 000: Var GHSL_population_density created (32/175)\n", - "Rank 000: Var GHSL_population_density data (32/175)\n", - "Rank 000: Var GHSL_population_density completed (32/175)\n", - "Rank 000: Writing GHSL_settlement_model_classification var (33/175)\n", - "Rank 000: Var GHSL_settlement_model_classification created (33/175)\n", - "Rank 000: Var GHSL_settlement_model_classification data (33/175)\n", - "Rank 000: Var GHSL_settlement_model_classification completed (33/175)\n", - "Rank 000: Writing GPW_average_population_density_25km var (34/175)\n", - "Rank 000: Var GPW_average_population_density_25km created (34/175)\n", - "Rank 000: Var GPW_average_population_density_25km data (34/175)\n", - "Rank 000: Var GPW_average_population_density_25km completed (34/175)\n", - "Rank 000: Writing GPW_average_population_density_5km var (35/175)\n", - "Rank 000: Var GPW_average_population_density_5km created (35/175)\n", - "Rank 000: Var GPW_average_population_density_5km data (35/175)\n", - "Rank 000: Var GPW_average_population_density_5km completed (35/175)\n", - "Rank 000: Writing GPW_max_population_density_25km var (36/175)\n", - "Rank 000: Var GPW_max_population_density_25km created (36/175)\n", - "Rank 000: Var GPW_max_population_density_25km data (36/175)\n", - "Rank 000: Var GPW_max_population_density_25km completed (36/175)\n", - "Rank 000: Writing GPW_max_population_density_5km var (37/175)\n", - "Rank 000: Var GPW_max_population_density_5km created (37/175)\n", - "Rank 000: Var GPW_max_population_density_5km data (37/175)\n", - "Rank 000: Var GPW_max_population_density_5km completed (37/175)\n", - "Rank 000: Writing GPW_population_density var (38/175)\n", - "Rank 000: Var GPW_population_density created (38/175)\n", - "Rank 000: Var GPW_population_density data (38/175)\n", - "Rank 000: Var GPW_population_density completed (38/175)\n", - "Rank 000: Writing GSFC_coastline_proximity var (39/175)\n", - "Rank 000: Var GSFC_coastline_proximity created (39/175)\n", - "Rank 000: Var GSFC_coastline_proximity data (39/175)\n", - "Rank 000: Var GSFC_coastline_proximity completed (39/175)\n", - "Rank 000: Writing Joly-Peuch_classification_code var (40/175)\n", - "Rank 000: Var Joly-Peuch_classification_code created (40/175)\n", - "Rank 000: Var Joly-Peuch_classification_code data (40/175)\n", - "Rank 000: Var Joly-Peuch_classification_code completed (40/175)\n", - "Rank 000: Writing Koppen-Geiger_classification var (41/175)\n", - "Rank 000: Var Koppen-Geiger_classification created (41/175)\n", - "Rank 000: Var Koppen-Geiger_classification data (41/175)\n", - "Rank 000: Var Koppen-Geiger_classification completed (41/175)\n", - "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/175)\n", - "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/175)\n", - "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/175)\n", - "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/175)\n", - "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/175)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/175)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/175)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/175)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/175)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/175)\n", - "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/175)\n", - "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/175)\n", - "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/175)\n", - "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/175)\n", - "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/175)\n", - "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", - "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/175)\n", - "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/175)\n", - "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/175)\n", - "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/175)\n", - "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/175)\n", - "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/175)\n", - "Rank 000: Writing UMBC_anthrome_classification var (62/175)\n", - "Rank 000: Var UMBC_anthrome_classification created (62/175)\n", - "Rank 000: Var UMBC_anthrome_classification data (62/175)\n", - "Rank 000: Var UMBC_anthrome_classification completed (62/175)\n", - "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/175)\n", - "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/175)\n", - "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/175)\n", - "Rank 000: Writing WMO_region var (65/175)\n", - "Rank 000: Var WMO_region created (65/175)\n", - "Rank 000: Var WMO_region data (65/175)\n", - "Rank 000: Var WMO_region completed (65/175)\n", - "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/175)\n", - "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/175)\n", - "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/175)\n", - "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/175)\n", - "Rank 000: Writing WWF_TEOW_biome var (67/175)\n", - "Rank 000: Var WWF_TEOW_biome created (67/175)\n", - "Rank 000: Var WWF_TEOW_biome data (67/175)\n", - "Rank 000: Var WWF_TEOW_biome completed (67/175)\n", - "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/175)\n", - "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/175)\n", - "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/175)\n", - "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/175)\n", - "Rank 000: Writing administrative_country_division_1 var (69/175)\n", - "Rank 000: Var administrative_country_division_1 created (69/175)\n", - "Rank 000: Var administrative_country_division_1 data (69/175)\n", - "Rank 000: Var administrative_country_division_1 completed (69/175)\n", - "Rank 000: Writing administrative_country_division_2 var (70/175)\n", - "Rank 000: Var administrative_country_division_2 created (70/175)\n", - "Rank 000: Var administrative_country_division_2 data (70/175)\n", - "Rank 000: Var administrative_country_division_2 completed (70/175)\n", - "Rank 000: Writing altitude var (71/175)\n", - "Rank 000: Var altitude created (71/175)\n", - "Rank 000: Var altitude data (71/175)\n", - "Rank 000: Var altitude completed (71/175)\n", - "Rank 000: Writing annual_native_max_gap_percent var (72/175)\n", - "Rank 000: Var annual_native_max_gap_percent created (72/175)\n", - "Rank 000: Var annual_native_max_gap_percent data (72/175)\n", - "Rank 000: Var annual_native_max_gap_percent completed (72/175)\n", - "Rank 000: Writing annual_native_representativity_percent var (73/175)\n", - "Rank 000: Var annual_native_representativity_percent created (73/175)\n", - "Rank 000: Var annual_native_representativity_percent data (73/175)\n", - "Rank 000: Var annual_native_representativity_percent completed (73/175)\n", - "Rank 000: Writing area_classification var (74/175)\n", - "Rank 000: Var area_classification created (74/175)\n", - "Rank 000: Var area_classification data (74/175)\n", - "Rank 000: Var area_classification completed (74/175)\n", - "Rank 000: Writing associated_networks var (75/175)\n", - "Rank 000: Var associated_networks created (75/175)\n", - "Rank 000: Var associated_networks data (75/175)\n", - "Rank 000: Var associated_networks completed (75/175)\n", - "Rank 000: Writing city var (76/175)\n", - "Rank 000: Var city created (76/175)\n", - "Rank 000: Var city data (76/175)\n", - "Rank 000: Var city completed (76/175)\n", - "Rank 000: Writing climatology var (77/175)\n", - "Rank 000: Var climatology created (77/175)\n", - "Rank 000: Var climatology data (77/175)\n", - "Rank 000: Var climatology completed (77/175)\n", - "Rank 000: Writing contact_email_address var (78/175)\n", - "Rank 000: Var contact_email_address created (78/175)\n", - "Rank 000: Var contact_email_address data (78/175)\n", - "Rank 000: Var contact_email_address completed (78/175)\n", - "Rank 000: Writing contact_institution var (79/175)\n", - "Rank 000: Var contact_institution created (79/175)\n", - "Rank 000: Var contact_institution data (79/175)\n", - "Rank 000: Var contact_institution completed (79/175)\n", - "Rank 000: Writing contact_name var (80/175)\n", - "Rank 000: Var contact_name created (80/175)\n", - "Rank 000: Var contact_name data (80/175)\n", - "Rank 000: Var contact_name completed (80/175)\n", - "Rank 000: Writing country var (81/175)\n", - "Rank 000: Var country created (81/175)\n", - "Rank 000: Var country data (81/175)\n", - "Rank 000: Var country completed (81/175)\n", - "Rank 000: Writing daily_native_max_gap_percent var (82/175)\n", - "Rank 000: Var daily_native_max_gap_percent created (82/175)\n", - "Rank 000: Var daily_native_max_gap_percent data (82/175)\n", - "Rank 000: Var daily_native_max_gap_percent completed (82/175)\n", - "Rank 000: Writing daily_native_representativity_percent var (83/175)\n", - "Rank 000: Var daily_native_representativity_percent created (83/175)\n", - "Rank 000: Var daily_native_representativity_percent data (83/175)\n", - "Rank 000: Var daily_native_representativity_percent completed (83/175)\n", - "Rank 000: Writing daily_passing_vehicles var (84/175)\n", - "Rank 000: Var daily_passing_vehicles created (84/175)\n", - "Rank 000: Var daily_passing_vehicles data (84/175)\n", - "Rank 000: Var daily_passing_vehicles completed (84/175)\n", - "Rank 000: Writing data_level var (85/175)\n", - "Rank 000: Var data_level created (85/175)\n", - "Rank 000: Var data_level data (85/175)\n", - "Rank 000: Var data_level completed (85/175)\n", - "Rank 000: Writing data_licence var (86/175)\n", - "Rank 000: Var data_licence created (86/175)\n", - "Rank 000: Var data_licence data (86/175)\n", - "Rank 000: Var data_licence completed (86/175)\n", - "Rank 000: Writing day_night_code var (87/175)\n", - "Rank 000: Var day_night_code created (87/175)\n", - "Rank 000: Var day_night_code data (87/175)\n", - "Rank 000: Var day_night_code completed (87/175)\n", - "Rank 000: Writing daytime_traffic_speed var (88/175)\n", - "Rank 000: Var daytime_traffic_speed created (88/175)\n", - "Rank 000: Var daytime_traffic_speed data (88/175)\n", - "Rank 000: Var daytime_traffic_speed completed (88/175)\n", - "Rank 000: Writing derived_uncertainty_per_measurement var (89/175)\n", - "Rank 000: Var derived_uncertainty_per_measurement created (89/175)\n", - "Rank 000: Var derived_uncertainty_per_measurement data (89/175)\n", - "Rank 000: Var derived_uncertainty_per_measurement completed (89/175)\n", - "Rank 000: Writing distance_to_building var (90/175)\n", - "Rank 000: Var distance_to_building created (90/175)\n", - "Rank 000: Var distance_to_building data (90/175)\n", - "Rank 000: Var distance_to_building completed (90/175)\n", - "Rank 000: Writing distance_to_junction var (91/175)\n", - "Rank 000: Var distance_to_junction created (91/175)\n", - "Rank 000: Var distance_to_junction data (91/175)\n", - "Rank 000: Var distance_to_junction completed (91/175)\n", - "Rank 000: Writing distance_to_kerb var (92/175)\n", - "Rank 000: Var distance_to_kerb created (92/175)\n", - "Rank 000: Var distance_to_kerb data (92/175)\n", - "Rank 000: Var distance_to_kerb completed (92/175)\n", - "Rank 000: Writing distance_to_source var (93/175)\n", - "Rank 000: Var distance_to_source created (93/175)\n", - "Rank 000: Var distance_to_source data (93/175)\n", - "Rank 000: Var distance_to_source completed (93/175)\n", - "Rank 000: Writing ellipsoid var (94/175)\n", - "Rank 000: Var ellipsoid created (94/175)\n", - "Rank 000: Var ellipsoid data (94/175)\n", - "Rank 000: Var ellipsoid completed (94/175)\n", - "Rank 000: Writing horizontal_datum var (95/175)\n", - "Rank 000: Var horizontal_datum created (95/175)\n", - "Rank 000: Var horizontal_datum data (95/175)\n", - "Rank 000: Var horizontal_datum completed (95/175)\n", - "Rank 000: Writing hourly_native_max_gap_percent var (96/175)\n", - "Rank 000: Var hourly_native_max_gap_percent created (96/175)\n", - "Rank 000: Var hourly_native_max_gap_percent data (96/175)\n", - "Rank 000: Var hourly_native_max_gap_percent completed (96/175)\n", - "Rank 000: Writing hourly_native_representativity_percent var (97/175)\n", - "Rank 000: Var hourly_native_representativity_percent created (97/175)\n", - "Rank 000: Var hourly_native_representativity_percent data (97/175)\n", - "Rank 000: Var hourly_native_representativity_percent completed (97/175)\n", - "Rank 000: Writing land_use var (98/175)\n", - "Rank 000: Var land_use created (98/175)\n", - "Rank 000: Var land_use data (98/175)\n", - "Rank 000: Var land_use completed (98/175)\n", - "Rank 000: Writing local_time var (99/175)\n", - "Rank 000: Var local_time created (99/175)\n", - "Rank 000: Var local_time data (99/175)\n", - "Rank 000: Var local_time completed (99/175)\n", - "Rank 000: Writing main_emission_source var (100/175)\n", - "Rank 000: Var main_emission_source created (100/175)\n", - "Rank 000: Var main_emission_source data (100/175)\n", - "Rank 000: Var main_emission_source completed (100/175)\n", - "Rank 000: Writing mean_solar_time var (101/175)\n", - "Rank 000: Var mean_solar_time created (101/175)\n", - "Rank 000: Var mean_solar_time data (101/175)\n", - "Rank 000: Var mean_solar_time completed (101/175)\n", - "Rank 000: Writing measurement_altitude var (102/175)\n", - "Rank 000: Var measurement_altitude created (102/175)\n", - "Rank 000: Var measurement_altitude data (102/175)\n", - "Rank 000: Var measurement_altitude completed (102/175)\n", - "Rank 000: Writing measurement_methodology var (103/175)\n", - "Rank 000: Var measurement_methodology created (103/175)\n", - "Rank 000: Var measurement_methodology data (103/175)\n", - "Rank 000: Var measurement_methodology completed (103/175)\n", - "Rank 000: Writing measurement_scale var (104/175)\n", - "Rank 000: Var measurement_scale created (104/175)\n", - "Rank 000: Var measurement_scale data (104/175)\n", - "Rank 000: Var measurement_scale completed (104/175)\n", - "Rank 000: Writing measuring_instrument_calibration_scale var (105/175)\n", - "Rank 000: Var measuring_instrument_calibration_scale created (105/175)\n", - "Rank 000: Var measuring_instrument_calibration_scale data (105/175)\n", - "Rank 000: Var measuring_instrument_calibration_scale completed (105/175)\n", - "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (106/175)\n", - "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (106/175)\n", - "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (106/175)\n", - "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (106/175)\n", - "Rank 000: Writing measuring_instrument_documented_accuracy var (107/175)\n", - "Rank 000: Var measuring_instrument_documented_accuracy created (107/175)\n", - "Rank 000: Var measuring_instrument_documented_accuracy data (107/175)\n", - "Rank 000: Var measuring_instrument_documented_accuracy completed (107/175)\n", - "Rank 000: Writing measuring_instrument_documented_flow_rate var (108/175)\n", - "Rank 000: Var measuring_instrument_documented_flow_rate created (108/175)\n", - "Rank 000: Var measuring_instrument_documented_flow_rate data (108/175)\n", - "Rank 000: Var measuring_instrument_documented_flow_rate completed (108/175)\n", - "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", - "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (109/175)\n", - "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (109/175)\n", - "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (109/175)\n", - "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (110/175)\n", - "Rank 000: Var measuring_instrument_documented_measurement_resolution created (110/175)\n", - "Rank 000: Var measuring_instrument_documented_measurement_resolution data (110/175)\n", - "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (110/175)\n", - "Rank 000: Writing measuring_instrument_documented_precision var (111/175)\n", - "Rank 000: Var measuring_instrument_documented_precision created (111/175)\n", - "Rank 000: Var measuring_instrument_documented_precision data (111/175)\n", - "Rank 000: Var measuring_instrument_documented_precision completed (111/175)\n", - "Rank 000: Writing measuring_instrument_documented_span_drift var (112/175)\n", - "Rank 000: Var measuring_instrument_documented_span_drift created (112/175)\n", - "Rank 000: Var measuring_instrument_documented_span_drift data (112/175)\n", - "Rank 000: Var measuring_instrument_documented_span_drift completed (112/175)\n", - "Rank 000: Writing measuring_instrument_documented_uncertainty var (113/175)\n", - "Rank 000: Var measuring_instrument_documented_uncertainty created (113/175)\n", - "Rank 000: Var measuring_instrument_documented_uncertainty data (113/175)\n", - "Rank 000: Var measuring_instrument_documented_uncertainty completed (113/175)\n", - "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", - "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (114/175)\n", - "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (114/175)\n", - "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (114/175)\n", - "Rank 000: Writing measuring_instrument_documented_zero_drift var (115/175)\n", - "Rank 000: Var measuring_instrument_documented_zero_drift created (115/175)\n", - "Rank 000: Var measuring_instrument_documented_zero_drift data (115/175)\n", - "Rank 000: Var measuring_instrument_documented_zero_drift completed (115/175)\n", - "Rank 000: Writing measuring_instrument_documented_zonal_drift var (116/175)\n", - "Rank 000: Var measuring_instrument_documented_zonal_drift created (116/175)\n", - "Rank 000: Var measuring_instrument_documented_zonal_drift data (116/175)\n", - "Rank 000: Var measuring_instrument_documented_zonal_drift completed (116/175)\n", - "Rank 000: Writing measuring_instrument_further_details var (117/175)\n", - "Rank 000: Var measuring_instrument_further_details created (117/175)\n", - "Rank 000: Var measuring_instrument_further_details data (117/175)\n", - "Rank 000: Var measuring_instrument_further_details completed (117/175)\n", - "Rank 000: Writing measuring_instrument_inlet_information var (118/175)\n", - "Rank 000: Var measuring_instrument_inlet_information created (118/175)\n", - "Rank 000: Var measuring_instrument_inlet_information data (118/175)\n", - "Rank 000: Var measuring_instrument_inlet_information completed (118/175)\n", - "Rank 000: Writing measuring_instrument_manual_name var (119/175)\n", - "Rank 000: Var measuring_instrument_manual_name created (119/175)\n", - "Rank 000: Var measuring_instrument_manual_name data (119/175)\n", - "Rank 000: Var measuring_instrument_manual_name completed (119/175)\n", - "Rank 000: Writing measuring_instrument_name var (120/175)\n", - "Rank 000: Var measuring_instrument_name created (120/175)\n", - "Rank 000: Var measuring_instrument_name data (120/175)\n", - "Rank 000: Var measuring_instrument_name completed (120/175)\n", - "Rank 000: Writing measuring_instrument_process_details var (121/175)\n", - "Rank 000: Var measuring_instrument_process_details created (121/175)\n", - "Rank 000: Var measuring_instrument_process_details data (121/175)\n", - "Rank 000: Var measuring_instrument_process_details completed (121/175)\n", - "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (122/175)\n", - "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (122/175)\n", - "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (122/175)\n", - "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (122/175)\n", - "Rank 000: Writing measuring_instrument_reported_accuracy var (123/175)\n", - "Rank 000: Var measuring_instrument_reported_accuracy created (123/175)\n", - "Rank 000: Var measuring_instrument_reported_accuracy data (123/175)\n", - "Rank 000: Var measuring_instrument_reported_accuracy completed (123/175)\n", - "Rank 000: Writing measuring_instrument_reported_flow_rate var (124/175)\n", - "Rank 000: Var measuring_instrument_reported_flow_rate created (124/175)\n", - "Rank 000: Var measuring_instrument_reported_flow_rate data (124/175)\n", - "Rank 000: Var measuring_instrument_reported_flow_rate completed (124/175)\n", - "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", - "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (125/175)\n", - "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (125/175)\n", - "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (125/175)\n", - "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (126/175)\n", - "Rank 000: Var measuring_instrument_reported_measurement_resolution created (126/175)\n", - "Rank 000: Var measuring_instrument_reported_measurement_resolution data (126/175)\n", - "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (126/175)\n", - "Rank 000: Writing measuring_instrument_reported_precision var (127/175)\n", - "Rank 000: Var measuring_instrument_reported_precision created (127/175)\n", - "Rank 000: Var measuring_instrument_reported_precision data (127/175)\n", - "Rank 000: Var measuring_instrument_reported_precision completed (127/175)\n", - "Rank 000: Writing measuring_instrument_reported_span_drift var (128/175)\n", - "Rank 000: Var measuring_instrument_reported_span_drift created (128/175)\n", - "Rank 000: Var measuring_instrument_reported_span_drift data (128/175)\n", - "Rank 000: Var measuring_instrument_reported_span_drift completed (128/175)\n", - "Rank 000: Writing measuring_instrument_reported_uncertainty var (129/175)\n", - "Rank 000: Var measuring_instrument_reported_uncertainty created (129/175)\n", - "Rank 000: Var measuring_instrument_reported_uncertainty data (129/175)\n", - "Rank 000: Var measuring_instrument_reported_uncertainty completed (129/175)\n", - "Rank 000: Writing measuring_instrument_reported_units var (130/175)\n", - "Rank 000: Var measuring_instrument_reported_units created (130/175)\n", - "Rank 000: Var measuring_instrument_reported_units data (130/175)\n", - "Rank 000: Var measuring_instrument_reported_units completed (130/175)\n", - "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", - "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (131/175)\n", - "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (131/175)\n", - "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (131/175)\n", - "Rank 000: Writing measuring_instrument_reported_zero_drift var (132/175)\n", - "Rank 000: Var measuring_instrument_reported_zero_drift created (132/175)\n", - "Rank 000: Var measuring_instrument_reported_zero_drift data (132/175)\n", - "Rank 000: Var measuring_instrument_reported_zero_drift completed (132/175)\n", - "Rank 000: Writing measuring_instrument_reported_zonal_drift var (133/175)\n", - "Rank 000: Var measuring_instrument_reported_zonal_drift created (133/175)\n", - "Rank 000: Var measuring_instrument_reported_zonal_drift data (133/175)\n", - "Rank 000: Var measuring_instrument_reported_zonal_drift completed (133/175)\n", - "Rank 000: Writing measuring_instrument_sampling_type var (134/175)\n", - "Rank 000: Var measuring_instrument_sampling_type created (134/175)\n", - "Rank 000: Var measuring_instrument_sampling_type data (134/175)\n", - "Rank 000: Var measuring_instrument_sampling_type completed (134/175)\n", - "Rank 000: Writing monthly_native_max_gap_percent var (135/175)\n", - "Rank 000: Var monthly_native_max_gap_percent created (135/175)\n", - "Rank 000: Var monthly_native_max_gap_percent data (135/175)\n", - "Rank 000: Var monthly_native_max_gap_percent completed (135/175)\n", - "Rank 000: Writing monthly_native_representativity_percent var (136/175)\n", - "Rank 000: Var monthly_native_representativity_percent created (136/175)\n", - "Rank 000: Var monthly_native_representativity_percent data (136/175)\n", - "Rank 000: Var monthly_native_representativity_percent completed (136/175)\n", - "Rank 000: Writing network var (137/175)\n", - "Rank 000: Var network created (137/175)\n", - "Rank 000: Var network data (137/175)\n", - "Rank 000: Var network completed (137/175)\n", - "Rank 000: Writing network_maintenance_details var (138/175)\n", - "Rank 000: Var network_maintenance_details created (138/175)\n", - "Rank 000: Var network_maintenance_details data (138/175)\n", - "Rank 000: Var network_maintenance_details completed (138/175)\n", - "Rank 000: Writing network_miscellaneous_details var (139/175)\n", - "Rank 000: Var network_miscellaneous_details created (139/175)\n", - "Rank 000: Var network_miscellaneous_details data (139/175)\n", - "Rank 000: Var network_miscellaneous_details completed (139/175)\n", - "Rank 000: Writing network_provided_volume_standard_pressure var (140/175)\n", - "Rank 000: Var network_provided_volume_standard_pressure created (140/175)\n", - "Rank 000: Var network_provided_volume_standard_pressure data (140/175)\n", - "Rank 000: Var network_provided_volume_standard_pressure completed (140/175)\n", - "Rank 000: Writing network_provided_volume_standard_temperature var (141/175)\n", - "Rank 000: Var network_provided_volume_standard_temperature created (141/175)\n", - "Rank 000: Var network_provided_volume_standard_temperature data (141/175)\n", - "Rank 000: Var network_provided_volume_standard_temperature completed (141/175)\n", - "Rank 000: Writing network_qa_details var (142/175)\n", - "Rank 000: Var network_qa_details created (142/175)\n", - "Rank 000: Var network_qa_details data (142/175)\n", - "Rank 000: Var network_qa_details completed (142/175)\n", - "Rank 000: Writing network_sampling_details var (143/175)\n", - "Rank 000: Var network_sampling_details created (143/175)\n", - "Rank 000: Var network_sampling_details data (143/175)\n", - "Rank 000: Var network_sampling_details completed (143/175)\n", - "Rank 000: Writing network_uncertainty_details var (144/175)\n", - "Rank 000: Var network_uncertainty_details created (144/175)\n", - "Rank 000: Var network_uncertainty_details data (144/175)\n", - "Rank 000: Var network_uncertainty_details completed (144/175)\n", - "Rank 000: Writing population var (145/175)\n", - "Rank 000: Var population created (145/175)\n", - "Rank 000: Var population data (145/175)\n", - "Rank 000: Var population completed (145/175)\n", - "Rank 000: Writing primary_sampling_further_details var (146/175)\n", - "Rank 000: Var primary_sampling_further_details created (146/175)\n", - "Rank 000: Var primary_sampling_further_details data (146/175)\n", - "Rank 000: Var primary_sampling_further_details completed (146/175)\n", - "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (147/175)\n", - "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (147/175)\n", - "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (147/175)\n", - "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (147/175)\n", - "Rank 000: Writing primary_sampling_instrument_manual_name var (148/175)\n", - "Rank 000: Var primary_sampling_instrument_manual_name created (148/175)\n", - "Rank 000: Var primary_sampling_instrument_manual_name data (148/175)\n", - "Rank 000: Var primary_sampling_instrument_manual_name completed (148/175)\n", - "Rank 000: Writing primary_sampling_instrument_name var (149/175)\n", - "Rank 000: Var primary_sampling_instrument_name created (149/175)\n", - "Rank 000: Var primary_sampling_instrument_name data (149/175)\n", - "Rank 000: Var primary_sampling_instrument_name completed (149/175)\n", - "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (150/175)\n", - "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (150/175)\n", - "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (150/175)\n", - "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (150/175)\n", - "Rank 000: Writing primary_sampling_process_details var (151/175)\n", - "Rank 000: Var primary_sampling_process_details created (151/175)\n", - "Rank 000: Var primary_sampling_process_details data (151/175)\n", - "Rank 000: Var primary_sampling_process_details completed (151/175)\n", - "Rank 000: Writing primary_sampling_type var (152/175)\n", - "Rank 000: Var primary_sampling_type created (152/175)\n", - "Rank 000: Var primary_sampling_type data (152/175)\n", - "Rank 000: Var primary_sampling_type completed (152/175)\n", - "Rank 000: Writing principal_investigator_email_address var (153/175)\n", - "Rank 000: Var principal_investigator_email_address created (153/175)\n", - "Rank 000: Var principal_investigator_email_address data (153/175)\n", - "Rank 000: Var principal_investigator_email_address completed (153/175)\n", - "Rank 000: Writing principal_investigator_institution var (154/175)\n", - "Rank 000: Var principal_investigator_institution created (154/175)\n", - "Rank 000: Var principal_investigator_institution data (154/175)\n", - "Rank 000: Var principal_investigator_institution completed (154/175)\n", - "Rank 000: Writing principal_investigator_name var (155/175)\n", - "Rank 000: Var principal_investigator_name created (155/175)\n", - "Rank 000: Var principal_investigator_name data (155/175)\n", - "Rank 000: Var principal_investigator_name completed (155/175)\n", - "Rank 000: Writing process_warnings var (156/175)\n", - "Rank 000: Var process_warnings created (156/175)\n", - "Rank 000: Var process_warnings data (156/175)\n", - "Rank 000: Var process_warnings completed (156/175)\n", - "Rank 000: Writing projection var (157/175)\n", - "Rank 000: Var projection created (157/175)\n", - "Rank 000: Var projection data (157/175)\n", - "Rank 000: Var projection completed (157/175)\n", - "Rank 000: Writing reported_uncertainty_per_measurement var (158/175)\n", - "Rank 000: Var reported_uncertainty_per_measurement created (158/175)\n", - "Rank 000: Var reported_uncertainty_per_measurement data (158/175)\n", - "Rank 000: Var reported_uncertainty_per_measurement completed (158/175)\n", - "Rank 000: Writing representative_radius var (159/175)\n", - "Rank 000: Var representative_radius created (159/175)\n", - "Rank 000: Var representative_radius data (159/175)\n", - "Rank 000: Var representative_radius completed (159/175)\n", - "Rank 000: Writing sample_preparation_further_details var (160/175)\n", - "Rank 000: Var sample_preparation_further_details created (160/175)\n", - "Rank 000: Var sample_preparation_further_details data (160/175)\n", - "Rank 000: Var sample_preparation_further_details completed (160/175)\n", - "Rank 000: Writing sample_preparation_process_details var (161/175)\n", - "Rank 000: Var sample_preparation_process_details created (161/175)\n", - "Rank 000: Var sample_preparation_process_details data (161/175)\n", - "Rank 000: Var sample_preparation_process_details completed (161/175)\n", - "Rank 000: Writing sample_preparation_techniques var (162/175)\n", - "Rank 000: Var sample_preparation_techniques created (162/175)\n", - "Rank 000: Var sample_preparation_techniques data (162/175)\n", - "Rank 000: Var sample_preparation_techniques completed (162/175)\n", - "Rank 000: Writing sample_preparation_types var (163/175)\n", - "Rank 000: Var sample_preparation_types created (163/175)\n", - "Rank 000: Var sample_preparation_types data (163/175)\n", - "Rank 000: Var sample_preparation_types completed (163/175)\n", - "Rank 000: Writing sampling_height var (164/175)\n", - "Rank 000: Var sampling_height created (164/175)\n", - "Rank 000: Var sampling_height data (164/175)\n", - "Rank 000: Var sampling_height completed (164/175)\n", - "Rank 000: Writing sconco3 var (165/175)\n", - "Rank 000: Var sconco3 created (165/175)\n", - "Rank 000: Var sconco3 data (165/175)\n", - "Rank 000: Var sconco3 completed (165/175)\n", - "Rank 000: Writing season_code var (166/175)\n", - "Rank 000: Var season_code created (166/175)\n", - "Rank 000: Var season_code data (166/175)\n", - "Rank 000: Var season_code completed (166/175)\n", - "Rank 000: Writing station_classification var (167/175)\n", - "Rank 000: Var station_classification created (167/175)\n", - "Rank 000: Var station_classification data (167/175)\n", - "Rank 000: Var station_classification completed (167/175)\n", - "Rank 000: Writing station_name var (168/175)\n", - "Rank 000: Var station_name created (168/175)\n", - "Rank 000: Var station_name data (168/175)\n", - "Rank 000: Var station_name completed (168/175)\n", - "Rank 000: Writing station_reference var (169/175)\n", - "Rank 000: Var station_reference created (169/175)\n", - "Rank 000: Var station_reference data (169/175)\n", - "Rank 000: Var station_reference completed (169/175)\n", - "Rank 000: Writing station_timezone var (170/175)\n", - "Rank 000: Var station_timezone created (170/175)\n", - "Rank 000: Var station_timezone data (170/175)\n", - "Rank 000: Var station_timezone completed (170/175)\n", - "Rank 000: Writing street_type var (171/175)\n", - "Rank 000: Var street_type created (171/175)\n", - "Rank 000: Var street_type data (171/175)\n", - "Rank 000: Var street_type completed (171/175)\n", - "Rank 000: Writing street_width var (172/175)\n", - "Rank 000: Var street_width created (172/175)\n", - "Rank 000: Var street_width data (172/175)\n", - "Rank 000: Var street_width completed (172/175)\n", - "Rank 000: Writing terrain var (173/175)\n", - "Rank 000: Var terrain created (173/175)\n", - "Rank 000: Var terrain data (173/175)\n", - "Rank 000: Var terrain completed (173/175)\n", - "Rank 000: Writing vertical_datum var (174/175)\n", - "Rank 000: Var vertical_datum created (174/175)\n", - "Rank 000: Var vertical_datum data (174/175)\n", - "Rank 000: Var vertical_datum completed (174/175)\n", - "Rank 000: Writing weekday_weekend_code var (175/175)\n", - "Rank 000: Var weekday_weekend_code created (175/175)\n", - "Rank 000: Var weekday_weekend_code data (175/175)\n", - "Rank 000: Var weekday_weekend_code completed (175/175)\n" - ] - } - ], - "source": [ - "nessy_1.to_netcdf('providentia_obs_file.nc', info=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Experiments dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Read" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
-       "Coordinates:\n",
-       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
-       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
-       "Data variables:\n",
-       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
-       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
-       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
-       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
-       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
-       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
-       "    sconco3                 (station, time) float32 ...\n",
-       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
-       "Attributes:\n",
-       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
-       "    institution:    Barcelona Supercomputing Center\n",
-       "    source:         Experiment cams61_chimere_ph2\n",
-       "    creator_name:   Dene R. Bowdalo\n",
-       "    creator_email:  dene.bowdalo@bsc.es\n",
-       "    conventions:    CF-1.7\n",
-       "    data_version:   1.0\n",
-       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
-       "    NCO:            4.7.2
" - ], - "text/plain": [ - "\n", - "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", - "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", - "Data variables:\n", - " grid_edge_latitude (grid_edge) float64 ...\n", - " grid_edge_longitude (grid_edge) float64 ...\n", - " latitude (station) float64 ...\n", - " longitude (station) float64 ...\n", - " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", - " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", - " sconco3 (station, time) float32 ...\n", - " station_reference (station) object ...\n", - "Attributes:\n", - " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", - " institution: Barcelona Supercomputing Center\n", - " source: Experiment cams61_chimere_ph2\n", - " creator_name: Dene R. Bowdalo\n", - " creator_email: dene.bowdalo@bsc.es\n", - " conventions: CF-1.7\n", - " data_version: 1.0\n", - " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", - " NCO: 4.7.2" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset(exp_path)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_2 = open_netcdf(path=exp_path, info=True, parallel_method='X')\n", - "nessy_2" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[datetime.datetime(2018, 4, 1, 0, 0),\n", - " datetime.datetime(2018, 4, 1, 1, 0),\n", - " datetime.datetime(2018, 4, 1, 2, 0),\n", - " datetime.datetime(2018, 4, 1, 3, 0),\n", - " datetime.datetime(2018, 4, 1, 4, 0),\n", - " datetime.datetime(2018, 4, 1, 5, 0),\n", - " datetime.datetime(2018, 4, 1, 6, 0),\n", - " datetime.datetime(2018, 4, 1, 7, 0),\n", - " datetime.datetime(2018, 4, 1, 8, 0),\n", - " datetime.datetime(2018, 4, 1, 9, 0),\n", - " datetime.datetime(2018, 4, 1, 10, 0),\n", - " datetime.datetime(2018, 4, 1, 11, 0),\n", - " datetime.datetime(2018, 4, 1, 12, 0),\n", - " datetime.datetime(2018, 4, 1, 13, 0),\n", - " datetime.datetime(2018, 4, 1, 14, 0),\n", - " datetime.datetime(2018, 4, 1, 15, 0),\n", - " datetime.datetime(2018, 4, 1, 16, 0),\n", - " datetime.datetime(2018, 4, 1, 17, 0),\n", - " datetime.datetime(2018, 4, 1, 18, 0),\n", - " datetime.datetime(2018, 4, 1, 19, 0),\n", - " datetime.datetime(2018, 4, 1, 20, 0),\n", - " datetime.datetime(2018, 4, 1, 21, 0),\n", - " datetime.datetime(2018, 4, 1, 22, 0),\n", - " datetime.datetime(2018, 4, 1, 23, 0),\n", - " datetime.datetime(2018, 4, 2, 0, 0),\n", - " datetime.datetime(2018, 4, 2, 1, 0),\n", - " datetime.datetime(2018, 4, 2, 2, 0),\n", - " datetime.datetime(2018, 4, 2, 3, 0),\n", - " datetime.datetime(2018, 4, 2, 4, 0),\n", - " datetime.datetime(2018, 4, 2, 5, 0),\n", - " datetime.datetime(2018, 4, 2, 6, 0),\n", - " datetime.datetime(2018, 4, 2, 7, 0),\n", - " datetime.datetime(2018, 4, 2, 8, 0),\n", - " datetime.datetime(2018, 4, 2, 9, 0),\n", - " datetime.datetime(2018, 4, 2, 10, 0),\n", - " datetime.datetime(2018, 4, 2, 11, 0),\n", - " datetime.datetime(2018, 4, 2, 12, 0),\n", - " datetime.datetime(2018, 4, 2, 13, 0),\n", - " datetime.datetime(2018, 4, 2, 14, 0),\n", - " datetime.datetime(2018, 4, 2, 15, 0),\n", - " datetime.datetime(2018, 4, 2, 16, 0),\n", - " datetime.datetime(2018, 4, 2, 17, 0),\n", - " datetime.datetime(2018, 4, 2, 18, 0),\n", - " datetime.datetime(2018, 4, 2, 19, 0),\n", - " datetime.datetime(2018, 4, 2, 20, 0),\n", - " datetime.datetime(2018, 4, 2, 21, 0),\n", - " datetime.datetime(2018, 4, 2, 22, 0),\n", - " datetime.datetime(2018, 4, 2, 23, 0),\n", - " datetime.datetime(2018, 4, 3, 0, 0),\n", - " datetime.datetime(2018, 4, 3, 1, 0),\n", - " datetime.datetime(2018, 4, 3, 2, 0),\n", - " datetime.datetime(2018, 4, 3, 3, 0),\n", - " datetime.datetime(2018, 4, 3, 4, 0),\n", - " datetime.datetime(2018, 4, 3, 5, 0),\n", - " datetime.datetime(2018, 4, 3, 6, 0),\n", - " datetime.datetime(2018, 4, 3, 7, 0),\n", - " datetime.datetime(2018, 4, 3, 8, 0),\n", - " datetime.datetime(2018, 4, 3, 9, 0),\n", - " datetime.datetime(2018, 4, 3, 10, 0),\n", - " datetime.datetime(2018, 4, 3, 11, 0),\n", - " datetime.datetime(2018, 4, 3, 12, 0),\n", - " datetime.datetime(2018, 4, 3, 13, 0),\n", - " datetime.datetime(2018, 4, 3, 14, 0),\n", - " datetime.datetime(2018, 4, 3, 15, 0),\n", - " datetime.datetime(2018, 4, 3, 16, 0),\n", - " datetime.datetime(2018, 4, 3, 17, 0),\n", - " datetime.datetime(2018, 4, 3, 18, 0),\n", - " datetime.datetime(2018, 4, 3, 19, 0),\n", - " datetime.datetime(2018, 4, 3, 20, 0),\n", - " datetime.datetime(2018, 4, 3, 21, 0),\n", - " datetime.datetime(2018, 4, 3, 22, 0),\n", - " datetime.datetime(2018, 4, 3, 23, 0),\n", - " datetime.datetime(2018, 4, 4, 0, 0),\n", - " datetime.datetime(2018, 4, 4, 1, 0),\n", - " datetime.datetime(2018, 4, 4, 2, 0),\n", - " datetime.datetime(2018, 4, 4, 3, 0),\n", - " datetime.datetime(2018, 4, 4, 4, 0),\n", - " datetime.datetime(2018, 4, 4, 5, 0),\n", - " datetime.datetime(2018, 4, 4, 6, 0),\n", - " datetime.datetime(2018, 4, 4, 7, 0),\n", - " datetime.datetime(2018, 4, 4, 8, 0),\n", - " datetime.datetime(2018, 4, 4, 9, 0),\n", - " datetime.datetime(2018, 4, 4, 10, 0),\n", - " datetime.datetime(2018, 4, 4, 11, 0),\n", - " datetime.datetime(2018, 4, 4, 12, 0),\n", - " datetime.datetime(2018, 4, 4, 13, 0),\n", - " datetime.datetime(2018, 4, 4, 14, 0),\n", - " datetime.datetime(2018, 4, 4, 15, 0),\n", - " datetime.datetime(2018, 4, 4, 16, 0),\n", - " datetime.datetime(2018, 4, 4, 17, 0),\n", - " datetime.datetime(2018, 4, 4, 18, 0),\n", - " datetime.datetime(2018, 4, 4, 19, 0),\n", - " datetime.datetime(2018, 4, 4, 20, 0),\n", - " datetime.datetime(2018, 4, 4, 21, 0),\n", - " datetime.datetime(2018, 4, 4, 22, 0),\n", - " datetime.datetime(2018, 4, 4, 23, 0),\n", - " datetime.datetime(2018, 4, 5, 0, 0),\n", - " datetime.datetime(2018, 4, 5, 1, 0),\n", - " datetime.datetime(2018, 4, 5, 2, 0),\n", - " datetime.datetime(2018, 4, 5, 3, 0),\n", - " datetime.datetime(2018, 4, 5, 4, 0),\n", - " datetime.datetime(2018, 4, 5, 5, 0),\n", - " datetime.datetime(2018, 4, 5, 6, 0),\n", - " datetime.datetime(2018, 4, 5, 7, 0),\n", - " datetime.datetime(2018, 4, 5, 8, 0),\n", - " datetime.datetime(2018, 4, 5, 9, 0),\n", - " datetime.datetime(2018, 4, 5, 10, 0),\n", - " datetime.datetime(2018, 4, 5, 11, 0),\n", - " datetime.datetime(2018, 4, 5, 12, 0),\n", - " datetime.datetime(2018, 4, 5, 13, 0),\n", - " datetime.datetime(2018, 4, 5, 14, 0),\n", - " datetime.datetime(2018, 4, 5, 15, 0),\n", - " datetime.datetime(2018, 4, 5, 16, 0),\n", - " datetime.datetime(2018, 4, 5, 17, 0),\n", - " datetime.datetime(2018, 4, 5, 18, 0),\n", - " datetime.datetime(2018, 4, 5, 19, 0),\n", - " datetime.datetime(2018, 4, 5, 20, 0),\n", - " datetime.datetime(2018, 4, 5, 21, 0),\n", - " datetime.datetime(2018, 4, 5, 22, 0),\n", - " datetime.datetime(2018, 4, 5, 23, 0),\n", - " datetime.datetime(2018, 4, 6, 0, 0),\n", - " datetime.datetime(2018, 4, 6, 1, 0),\n", - " datetime.datetime(2018, 4, 6, 2, 0),\n", - " datetime.datetime(2018, 4, 6, 3, 0),\n", - " datetime.datetime(2018, 4, 6, 4, 0),\n", - " datetime.datetime(2018, 4, 6, 5, 0),\n", - " datetime.datetime(2018, 4, 6, 6, 0),\n", - " datetime.datetime(2018, 4, 6, 7, 0),\n", - " datetime.datetime(2018, 4, 6, 8, 0),\n", - " datetime.datetime(2018, 4, 6, 9, 0),\n", - " datetime.datetime(2018, 4, 6, 10, 0),\n", - " datetime.datetime(2018, 4, 6, 11, 0),\n", - " datetime.datetime(2018, 4, 6, 12, 0),\n", - " datetime.datetime(2018, 4, 6, 13, 0),\n", - " datetime.datetime(2018, 4, 6, 14, 0),\n", - " datetime.datetime(2018, 4, 6, 15, 0),\n", - " datetime.datetime(2018, 4, 6, 16, 0),\n", - " datetime.datetime(2018, 4, 6, 17, 0),\n", - " datetime.datetime(2018, 4, 6, 18, 0),\n", - " datetime.datetime(2018, 4, 6, 19, 0),\n", - " datetime.datetime(2018, 4, 6, 20, 0),\n", - " datetime.datetime(2018, 4, 6, 21, 0),\n", - " datetime.datetime(2018, 4, 6, 22, 0),\n", - " datetime.datetime(2018, 4, 6, 23, 0),\n", - " datetime.datetime(2018, 4, 7, 0, 0),\n", - " datetime.datetime(2018, 4, 7, 1, 0),\n", - " datetime.datetime(2018, 4, 7, 2, 0),\n", - " datetime.datetime(2018, 4, 7, 3, 0),\n", - " datetime.datetime(2018, 4, 7, 4, 0),\n", - " datetime.datetime(2018, 4, 7, 5, 0),\n", - " datetime.datetime(2018, 4, 7, 6, 0),\n", - " datetime.datetime(2018, 4, 7, 7, 0),\n", - " datetime.datetime(2018, 4, 7, 8, 0),\n", - " datetime.datetime(2018, 4, 7, 9, 0),\n", - " datetime.datetime(2018, 4, 7, 10, 0),\n", - " datetime.datetime(2018, 4, 7, 11, 0),\n", - " datetime.datetime(2018, 4, 7, 12, 0),\n", - " datetime.datetime(2018, 4, 7, 13, 0),\n", - " datetime.datetime(2018, 4, 7, 14, 0),\n", - " datetime.datetime(2018, 4, 7, 15, 0),\n", - " datetime.datetime(2018, 4, 7, 16, 0),\n", - " datetime.datetime(2018, 4, 7, 17, 0),\n", - " datetime.datetime(2018, 4, 7, 18, 0),\n", - " datetime.datetime(2018, 4, 7, 19, 0),\n", - " datetime.datetime(2018, 4, 7, 20, 0),\n", - " datetime.datetime(2018, 4, 7, 21, 0),\n", - " datetime.datetime(2018, 4, 7, 22, 0),\n", - " datetime.datetime(2018, 4, 7, 23, 0),\n", - " datetime.datetime(2018, 4, 8, 0, 0),\n", - " datetime.datetime(2018, 4, 8, 1, 0),\n", - " datetime.datetime(2018, 4, 8, 2, 0),\n", - " datetime.datetime(2018, 4, 8, 3, 0),\n", - " datetime.datetime(2018, 4, 8, 4, 0),\n", - " datetime.datetime(2018, 4, 8, 5, 0),\n", - " datetime.datetime(2018, 4, 8, 6, 0),\n", - " datetime.datetime(2018, 4, 8, 7, 0),\n", - " datetime.datetime(2018, 4, 8, 8, 0),\n", - " datetime.datetime(2018, 4, 8, 9, 0),\n", - " datetime.datetime(2018, 4, 8, 10, 0),\n", - " datetime.datetime(2018, 4, 8, 11, 0),\n", - " datetime.datetime(2018, 4, 8, 12, 0),\n", - " datetime.datetime(2018, 4, 8, 13, 0),\n", - " datetime.datetime(2018, 4, 8, 14, 0),\n", - " datetime.datetime(2018, 4, 8, 15, 0),\n", - " datetime.datetime(2018, 4, 8, 16, 0),\n", - " datetime.datetime(2018, 4, 8, 17, 0),\n", - " datetime.datetime(2018, 4, 8, 18, 0),\n", - " datetime.datetime(2018, 4, 8, 19, 0),\n", - " datetime.datetime(2018, 4, 8, 20, 0),\n", - " datetime.datetime(2018, 4, 8, 21, 0),\n", - " datetime.datetime(2018, 4, 8, 22, 0),\n", - " datetime.datetime(2018, 4, 8, 23, 0),\n", - " datetime.datetime(2018, 4, 9, 0, 0),\n", - " datetime.datetime(2018, 4, 9, 1, 0),\n", - " datetime.datetime(2018, 4, 9, 2, 0),\n", - " datetime.datetime(2018, 4, 9, 3, 0),\n", - " datetime.datetime(2018, 4, 9, 4, 0),\n", - " datetime.datetime(2018, 4, 9, 5, 0),\n", - " datetime.datetime(2018, 4, 9, 6, 0),\n", - " datetime.datetime(2018, 4, 9, 7, 0),\n", - " datetime.datetime(2018, 4, 9, 8, 0),\n", - " datetime.datetime(2018, 4, 9, 9, 0),\n", - " datetime.datetime(2018, 4, 9, 10, 0),\n", - " datetime.datetime(2018, 4, 9, 11, 0),\n", - " datetime.datetime(2018, 4, 9, 12, 0),\n", - " datetime.datetime(2018, 4, 9, 13, 0),\n", - " datetime.datetime(2018, 4, 9, 14, 0),\n", - " datetime.datetime(2018, 4, 9, 15, 0),\n", - " datetime.datetime(2018, 4, 9, 16, 0),\n", - " datetime.datetime(2018, 4, 9, 17, 0),\n", - " datetime.datetime(2018, 4, 9, 18, 0),\n", - " datetime.datetime(2018, 4, 9, 19, 0),\n", - " datetime.datetime(2018, 4, 9, 20, 0),\n", - " datetime.datetime(2018, 4, 9, 21, 0),\n", - " datetime.datetime(2018, 4, 9, 22, 0),\n", - " datetime.datetime(2018, 4, 9, 23, 0),\n", - " datetime.datetime(2018, 4, 10, 0, 0),\n", - " datetime.datetime(2018, 4, 10, 1, 0),\n", - " datetime.datetime(2018, 4, 10, 2, 0),\n", - " datetime.datetime(2018, 4, 10, 3, 0),\n", - " datetime.datetime(2018, 4, 10, 4, 0),\n", - " datetime.datetime(2018, 4, 10, 5, 0),\n", - " datetime.datetime(2018, 4, 10, 6, 0),\n", - " datetime.datetime(2018, 4, 10, 7, 0),\n", - " datetime.datetime(2018, 4, 10, 8, 0),\n", - " datetime.datetime(2018, 4, 10, 9, 0),\n", - " datetime.datetime(2018, 4, 10, 10, 0),\n", - " datetime.datetime(2018, 4, 10, 11, 0),\n", - " datetime.datetime(2018, 4, 10, 12, 0),\n", - " datetime.datetime(2018, 4, 10, 13, 0),\n", - " datetime.datetime(2018, 4, 10, 14, 0),\n", - " datetime.datetime(2018, 4, 10, 15, 0),\n", - " datetime.datetime(2018, 4, 10, 16, 0),\n", - " datetime.datetime(2018, 4, 10, 17, 0),\n", - " datetime.datetime(2018, 4, 10, 18, 0),\n", - " datetime.datetime(2018, 4, 10, 19, 0),\n", - " datetime.datetime(2018, 4, 10, 20, 0),\n", - " datetime.datetime(2018, 4, 10, 21, 0),\n", - " datetime.datetime(2018, 4, 10, 22, 0),\n", - " datetime.datetime(2018, 4, 10, 23, 0),\n", - " datetime.datetime(2018, 4, 11, 0, 0),\n", - " datetime.datetime(2018, 4, 11, 1, 0),\n", - " datetime.datetime(2018, 4, 11, 2, 0),\n", - " datetime.datetime(2018, 4, 11, 3, 0),\n", - " datetime.datetime(2018, 4, 11, 4, 0),\n", - " datetime.datetime(2018, 4, 11, 5, 0),\n", - " datetime.datetime(2018, 4, 11, 6, 0),\n", - " datetime.datetime(2018, 4, 11, 7, 0),\n", - " datetime.datetime(2018, 4, 11, 8, 0),\n", - " datetime.datetime(2018, 4, 11, 9, 0),\n", - " datetime.datetime(2018, 4, 11, 10, 0),\n", - " datetime.datetime(2018, 4, 11, 11, 0),\n", - " datetime.datetime(2018, 4, 11, 12, 0),\n", - " datetime.datetime(2018, 4, 11, 13, 0),\n", - " datetime.datetime(2018, 4, 11, 14, 0),\n", - " datetime.datetime(2018, 4, 11, 15, 0),\n", - " datetime.datetime(2018, 4, 11, 16, 0),\n", - " datetime.datetime(2018, 4, 11, 17, 0),\n", - " datetime.datetime(2018, 4, 11, 18, 0),\n", - " datetime.datetime(2018, 4, 11, 19, 0),\n", - " datetime.datetime(2018, 4, 11, 20, 0),\n", - " datetime.datetime(2018, 4, 11, 21, 0),\n", - " datetime.datetime(2018, 4, 11, 22, 0),\n", - " datetime.datetime(2018, 4, 11, 23, 0),\n", - " datetime.datetime(2018, 4, 12, 0, 0),\n", - " datetime.datetime(2018, 4, 12, 1, 0),\n", - " datetime.datetime(2018, 4, 12, 2, 0),\n", - " datetime.datetime(2018, 4, 12, 3, 0),\n", - " datetime.datetime(2018, 4, 12, 4, 0),\n", - " datetime.datetime(2018, 4, 12, 5, 0),\n", - " datetime.datetime(2018, 4, 12, 6, 0),\n", - " datetime.datetime(2018, 4, 12, 7, 0),\n", - " datetime.datetime(2018, 4, 12, 8, 0),\n", - " datetime.datetime(2018, 4, 12, 9, 0),\n", - " datetime.datetime(2018, 4, 12, 10, 0),\n", - " datetime.datetime(2018, 4, 12, 11, 0),\n", - " datetime.datetime(2018, 4, 12, 12, 0),\n", - " datetime.datetime(2018, 4, 12, 13, 0),\n", - " datetime.datetime(2018, 4, 12, 14, 0),\n", - " datetime.datetime(2018, 4, 12, 15, 0),\n", - " datetime.datetime(2018, 4, 12, 16, 0),\n", - " datetime.datetime(2018, 4, 12, 17, 0),\n", - " datetime.datetime(2018, 4, 12, 18, 0),\n", - " datetime.datetime(2018, 4, 12, 19, 0),\n", - " datetime.datetime(2018, 4, 12, 20, 0),\n", - " datetime.datetime(2018, 4, 12, 21, 0),\n", - " datetime.datetime(2018, 4, 12, 22, 0),\n", - " datetime.datetime(2018, 4, 12, 23, 0),\n", - " datetime.datetime(2018, 4, 13, 0, 0),\n", - " datetime.datetime(2018, 4, 13, 1, 0),\n", - " datetime.datetime(2018, 4, 13, 2, 0),\n", - " datetime.datetime(2018, 4, 13, 3, 0),\n", - " datetime.datetime(2018, 4, 13, 4, 0),\n", - " datetime.datetime(2018, 4, 13, 5, 0),\n", - " datetime.datetime(2018, 4, 13, 6, 0),\n", - " datetime.datetime(2018, 4, 13, 7, 0),\n", - " datetime.datetime(2018, 4, 13, 8, 0),\n", - " datetime.datetime(2018, 4, 13, 9, 0),\n", - " datetime.datetime(2018, 4, 13, 10, 0),\n", - " datetime.datetime(2018, 4, 13, 11, 0),\n", - " datetime.datetime(2018, 4, 13, 12, 0),\n", - " datetime.datetime(2018, 4, 13, 13, 0),\n", - " datetime.datetime(2018, 4, 13, 14, 0),\n", - " datetime.datetime(2018, 4, 13, 15, 0),\n", - " datetime.datetime(2018, 4, 13, 16, 0),\n", - " datetime.datetime(2018, 4, 13, 17, 0),\n", - " datetime.datetime(2018, 4, 13, 18, 0),\n", - " datetime.datetime(2018, 4, 13, 19, 0),\n", - " datetime.datetime(2018, 4, 13, 20, 0),\n", - " datetime.datetime(2018, 4, 13, 21, 0),\n", - " datetime.datetime(2018, 4, 13, 22, 0),\n", - " datetime.datetime(2018, 4, 13, 23, 0),\n", - " datetime.datetime(2018, 4, 14, 0, 0),\n", - " datetime.datetime(2018, 4, 14, 1, 0),\n", - " datetime.datetime(2018, 4, 14, 2, 0),\n", - " datetime.datetime(2018, 4, 14, 3, 0),\n", - " datetime.datetime(2018, 4, 14, 4, 0),\n", - " datetime.datetime(2018, 4, 14, 5, 0),\n", - " datetime.datetime(2018, 4, 14, 6, 0),\n", - " datetime.datetime(2018, 4, 14, 7, 0),\n", - " datetime.datetime(2018, 4, 14, 8, 0),\n", - " datetime.datetime(2018, 4, 14, 9, 0),\n", - " datetime.datetime(2018, 4, 14, 10, 0),\n", - " datetime.datetime(2018, 4, 14, 11, 0),\n", - " datetime.datetime(2018, 4, 14, 12, 0),\n", - " datetime.datetime(2018, 4, 14, 13, 0),\n", - " datetime.datetime(2018, 4, 14, 14, 0),\n", - " datetime.datetime(2018, 4, 14, 15, 0),\n", - " datetime.datetime(2018, 4, 14, 16, 0),\n", - " datetime.datetime(2018, 4, 14, 17, 0),\n", - " datetime.datetime(2018, 4, 14, 18, 0),\n", - " datetime.datetime(2018, 4, 14, 19, 0),\n", - " datetime.datetime(2018, 4, 14, 20, 0),\n", - " datetime.datetime(2018, 4, 14, 21, 0),\n", - " datetime.datetime(2018, 4, 14, 22, 0),\n", - " datetime.datetime(2018, 4, 14, 23, 0),\n", - " datetime.datetime(2018, 4, 15, 0, 0),\n", - " datetime.datetime(2018, 4, 15, 1, 0),\n", - " datetime.datetime(2018, 4, 15, 2, 0),\n", - " datetime.datetime(2018, 4, 15, 3, 0),\n", - " datetime.datetime(2018, 4, 15, 4, 0),\n", - " datetime.datetime(2018, 4, 15, 5, 0),\n", - " datetime.datetime(2018, 4, 15, 6, 0),\n", - " datetime.datetime(2018, 4, 15, 7, 0),\n", - " datetime.datetime(2018, 4, 15, 8, 0),\n", - " datetime.datetime(2018, 4, 15, 9, 0),\n", - " datetime.datetime(2018, 4, 15, 10, 0),\n", - " datetime.datetime(2018, 4, 15, 11, 0),\n", - " datetime.datetime(2018, 4, 15, 12, 0),\n", - " datetime.datetime(2018, 4, 15, 13, 0),\n", - " datetime.datetime(2018, 4, 15, 14, 0),\n", - " datetime.datetime(2018, 4, 15, 15, 0),\n", - " datetime.datetime(2018, 4, 15, 16, 0),\n", - " datetime.datetime(2018, 4, 15, 17, 0),\n", - " datetime.datetime(2018, 4, 15, 18, 0),\n", - " datetime.datetime(2018, 4, 15, 19, 0),\n", - " datetime.datetime(2018, 4, 15, 20, 0),\n", - " datetime.datetime(2018, 4, 15, 21, 0),\n", - " datetime.datetime(2018, 4, 15, 22, 0),\n", - " datetime.datetime(2018, 4, 15, 23, 0),\n", - " datetime.datetime(2018, 4, 16, 0, 0),\n", - " datetime.datetime(2018, 4, 16, 1, 0),\n", - " datetime.datetime(2018, 4, 16, 2, 0),\n", - " datetime.datetime(2018, 4, 16, 3, 0),\n", - " datetime.datetime(2018, 4, 16, 4, 0),\n", - " datetime.datetime(2018, 4, 16, 5, 0),\n", - " datetime.datetime(2018, 4, 16, 6, 0),\n", - " datetime.datetime(2018, 4, 16, 7, 0),\n", - " datetime.datetime(2018, 4, 16, 8, 0),\n", - " datetime.datetime(2018, 4, 16, 9, 0),\n", - " datetime.datetime(2018, 4, 16, 10, 0),\n", - " datetime.datetime(2018, 4, 16, 11, 0),\n", - " datetime.datetime(2018, 4, 16, 12, 0),\n", - " datetime.datetime(2018, 4, 16, 13, 0),\n", - " datetime.datetime(2018, 4, 16, 14, 0),\n", - " datetime.datetime(2018, 4, 16, 15, 0),\n", - " datetime.datetime(2018, 4, 16, 16, 0),\n", - " datetime.datetime(2018, 4, 16, 17, 0),\n", - " datetime.datetime(2018, 4, 16, 18, 0),\n", - " datetime.datetime(2018, 4, 16, 19, 0),\n", - " datetime.datetime(2018, 4, 16, 20, 0),\n", - " datetime.datetime(2018, 4, 16, 21, 0),\n", - " datetime.datetime(2018, 4, 16, 22, 0),\n", - " datetime.datetime(2018, 4, 16, 23, 0),\n", - " datetime.datetime(2018, 4, 17, 0, 0),\n", - " datetime.datetime(2018, 4, 17, 1, 0),\n", - " datetime.datetime(2018, 4, 17, 2, 0),\n", - " datetime.datetime(2018, 4, 17, 3, 0),\n", - " datetime.datetime(2018, 4, 17, 4, 0),\n", - " datetime.datetime(2018, 4, 17, 5, 0),\n", - " datetime.datetime(2018, 4, 17, 6, 0),\n", - " datetime.datetime(2018, 4, 17, 7, 0),\n", - " datetime.datetime(2018, 4, 17, 8, 0),\n", - " datetime.datetime(2018, 4, 17, 9, 0),\n", - " datetime.datetime(2018, 4, 17, 10, 0),\n", - " datetime.datetime(2018, 4, 17, 11, 0),\n", - " datetime.datetime(2018, 4, 17, 12, 0),\n", - " datetime.datetime(2018, 4, 17, 13, 0),\n", - " datetime.datetime(2018, 4, 17, 14, 0),\n", - " datetime.datetime(2018, 4, 17, 15, 0),\n", - " datetime.datetime(2018, 4, 17, 16, 0),\n", - " datetime.datetime(2018, 4, 17, 17, 0),\n", - " datetime.datetime(2018, 4, 17, 18, 0),\n", - " datetime.datetime(2018, 4, 17, 19, 0),\n", - " datetime.datetime(2018, 4, 17, 20, 0),\n", - " datetime.datetime(2018, 4, 17, 21, 0),\n", - " datetime.datetime(2018, 4, 17, 22, 0),\n", - " datetime.datetime(2018, 4, 17, 23, 0),\n", - " datetime.datetime(2018, 4, 18, 0, 0),\n", - " datetime.datetime(2018, 4, 18, 1, 0),\n", - " datetime.datetime(2018, 4, 18, 2, 0),\n", - " datetime.datetime(2018, 4, 18, 3, 0),\n", - " datetime.datetime(2018, 4, 18, 4, 0),\n", - " datetime.datetime(2018, 4, 18, 5, 0),\n", - " datetime.datetime(2018, 4, 18, 6, 0),\n", - " datetime.datetime(2018, 4, 18, 7, 0),\n", - " datetime.datetime(2018, 4, 18, 8, 0),\n", - " datetime.datetime(2018, 4, 18, 9, 0),\n", - " datetime.datetime(2018, 4, 18, 10, 0),\n", - " datetime.datetime(2018, 4, 18, 11, 0),\n", - " datetime.datetime(2018, 4, 18, 12, 0),\n", - " datetime.datetime(2018, 4, 18, 13, 0),\n", - " datetime.datetime(2018, 4, 18, 14, 0),\n", - " datetime.datetime(2018, 4, 18, 15, 0),\n", - " datetime.datetime(2018, 4, 18, 16, 0),\n", - " datetime.datetime(2018, 4, 18, 17, 0),\n", - " datetime.datetime(2018, 4, 18, 18, 0),\n", - " datetime.datetime(2018, 4, 18, 19, 0),\n", - " datetime.datetime(2018, 4, 18, 20, 0),\n", - " datetime.datetime(2018, 4, 18, 21, 0),\n", - " datetime.datetime(2018, 4, 18, 22, 0),\n", - " datetime.datetime(2018, 4, 18, 23, 0),\n", - " datetime.datetime(2018, 4, 19, 0, 0),\n", - " datetime.datetime(2018, 4, 19, 1, 0),\n", - " datetime.datetime(2018, 4, 19, 2, 0),\n", - " datetime.datetime(2018, 4, 19, 3, 0),\n", - " datetime.datetime(2018, 4, 19, 4, 0),\n", - " datetime.datetime(2018, 4, 19, 5, 0),\n", - " datetime.datetime(2018, 4, 19, 6, 0),\n", - " datetime.datetime(2018, 4, 19, 7, 0),\n", - " datetime.datetime(2018, 4, 19, 8, 0),\n", - " datetime.datetime(2018, 4, 19, 9, 0),\n", - " datetime.datetime(2018, 4, 19, 10, 0),\n", - " datetime.datetime(2018, 4, 19, 11, 0),\n", - " datetime.datetime(2018, 4, 19, 12, 0),\n", - " datetime.datetime(2018, 4, 19, 13, 0),\n", - " datetime.datetime(2018, 4, 19, 14, 0),\n", - " datetime.datetime(2018, 4, 19, 15, 0),\n", - " datetime.datetime(2018, 4, 19, 16, 0),\n", - " datetime.datetime(2018, 4, 19, 17, 0),\n", - " datetime.datetime(2018, 4, 19, 18, 0),\n", - " datetime.datetime(2018, 4, 19, 19, 0),\n", - " datetime.datetime(2018, 4, 19, 20, 0),\n", - " datetime.datetime(2018, 4, 19, 21, 0),\n", - " datetime.datetime(2018, 4, 19, 22, 0),\n", - " datetime.datetime(2018, 4, 19, 23, 0),\n", - " datetime.datetime(2018, 4, 20, 0, 0),\n", - " datetime.datetime(2018, 4, 20, 1, 0),\n", - " datetime.datetime(2018, 4, 20, 2, 0),\n", - " datetime.datetime(2018, 4, 20, 3, 0),\n", - " datetime.datetime(2018, 4, 20, 4, 0),\n", - " datetime.datetime(2018, 4, 20, 5, 0),\n", - " datetime.datetime(2018, 4, 20, 6, 0),\n", - " datetime.datetime(2018, 4, 20, 7, 0),\n", - " datetime.datetime(2018, 4, 20, 8, 0),\n", - " datetime.datetime(2018, 4, 20, 9, 0),\n", - " datetime.datetime(2018, 4, 20, 10, 0),\n", - " datetime.datetime(2018, 4, 20, 11, 0),\n", - " datetime.datetime(2018, 4, 20, 12, 0),\n", - " datetime.datetime(2018, 4, 20, 13, 0),\n", - " datetime.datetime(2018, 4, 20, 14, 0),\n", - " datetime.datetime(2018, 4, 20, 15, 0),\n", - " datetime.datetime(2018, 4, 20, 16, 0),\n", - " datetime.datetime(2018, 4, 20, 17, 0),\n", - " datetime.datetime(2018, 4, 20, 18, 0),\n", - " datetime.datetime(2018, 4, 20, 19, 0),\n", - " datetime.datetime(2018, 4, 20, 20, 0),\n", - " datetime.datetime(2018, 4, 20, 21, 0),\n", - " datetime.datetime(2018, 4, 20, 22, 0),\n", - " datetime.datetime(2018, 4, 20, 23, 0),\n", - " datetime.datetime(2018, 4, 21, 0, 0),\n", - " datetime.datetime(2018, 4, 21, 1, 0),\n", - " datetime.datetime(2018, 4, 21, 2, 0),\n", - " datetime.datetime(2018, 4, 21, 3, 0),\n", - " datetime.datetime(2018, 4, 21, 4, 0),\n", - " datetime.datetime(2018, 4, 21, 5, 0),\n", - " datetime.datetime(2018, 4, 21, 6, 0),\n", - " datetime.datetime(2018, 4, 21, 7, 0),\n", - " datetime.datetime(2018, 4, 21, 8, 0),\n", - " datetime.datetime(2018, 4, 21, 9, 0),\n", - " datetime.datetime(2018, 4, 21, 10, 0),\n", - " datetime.datetime(2018, 4, 21, 11, 0),\n", - " datetime.datetime(2018, 4, 21, 12, 0),\n", - " datetime.datetime(2018, 4, 21, 13, 0),\n", - " datetime.datetime(2018, 4, 21, 14, 0),\n", - " datetime.datetime(2018, 4, 21, 15, 0),\n", - " datetime.datetime(2018, 4, 21, 16, 0),\n", - " datetime.datetime(2018, 4, 21, 17, 0),\n", - " datetime.datetime(2018, 4, 21, 18, 0),\n", - " datetime.datetime(2018, 4, 21, 19, 0),\n", - " datetime.datetime(2018, 4, 21, 20, 0),\n", - " datetime.datetime(2018, 4, 21, 21, 0),\n", - " datetime.datetime(2018, 4, 21, 22, 0),\n", - " datetime.datetime(2018, 4, 21, 23, 0),\n", - " datetime.datetime(2018, 4, 22, 0, 0),\n", - " datetime.datetime(2018, 4, 22, 1, 0),\n", - " datetime.datetime(2018, 4, 22, 2, 0),\n", - " datetime.datetime(2018, 4, 22, 3, 0),\n", - " datetime.datetime(2018, 4, 22, 4, 0),\n", - " datetime.datetime(2018, 4, 22, 5, 0),\n", - " datetime.datetime(2018, 4, 22, 6, 0),\n", - " datetime.datetime(2018, 4, 22, 7, 0),\n", - " datetime.datetime(2018, 4, 22, 8, 0),\n", - " datetime.datetime(2018, 4, 22, 9, 0),\n", - " datetime.datetime(2018, 4, 22, 10, 0),\n", - " datetime.datetime(2018, 4, 22, 11, 0),\n", - " datetime.datetime(2018, 4, 22, 12, 0),\n", - " datetime.datetime(2018, 4, 22, 13, 0),\n", - " datetime.datetime(2018, 4, 22, 14, 0),\n", - " datetime.datetime(2018, 4, 22, 15, 0),\n", - " datetime.datetime(2018, 4, 22, 16, 0),\n", - " datetime.datetime(2018, 4, 22, 17, 0),\n", - " datetime.datetime(2018, 4, 22, 18, 0),\n", - " datetime.datetime(2018, 4, 22, 19, 0),\n", - " datetime.datetime(2018, 4, 22, 20, 0),\n", - " datetime.datetime(2018, 4, 22, 21, 0),\n", - " datetime.datetime(2018, 4, 22, 22, 0),\n", - " datetime.datetime(2018, 4, 22, 23, 0),\n", - " datetime.datetime(2018, 4, 23, 0, 0),\n", - " datetime.datetime(2018, 4, 23, 1, 0),\n", - " datetime.datetime(2018, 4, 23, 2, 0),\n", - " datetime.datetime(2018, 4, 23, 3, 0),\n", - " datetime.datetime(2018, 4, 23, 4, 0),\n", - " datetime.datetime(2018, 4, 23, 5, 0),\n", - " datetime.datetime(2018, 4, 23, 6, 0),\n", - " datetime.datetime(2018, 4, 23, 7, 0),\n", - " datetime.datetime(2018, 4, 23, 8, 0),\n", - " datetime.datetime(2018, 4, 23, 9, 0),\n", - " datetime.datetime(2018, 4, 23, 10, 0),\n", - " datetime.datetime(2018, 4, 23, 11, 0),\n", - " datetime.datetime(2018, 4, 23, 12, 0),\n", - " datetime.datetime(2018, 4, 23, 13, 0),\n", - " datetime.datetime(2018, 4, 23, 14, 0),\n", - " datetime.datetime(2018, 4, 23, 15, 0),\n", - " datetime.datetime(2018, 4, 23, 16, 0),\n", - " datetime.datetime(2018, 4, 23, 17, 0),\n", - " datetime.datetime(2018, 4, 23, 18, 0),\n", - " datetime.datetime(2018, 4, 23, 19, 0),\n", - " datetime.datetime(2018, 4, 23, 20, 0),\n", - " datetime.datetime(2018, 4, 23, 21, 0),\n", - " datetime.datetime(2018, 4, 23, 22, 0),\n", - " datetime.datetime(2018, 4, 23, 23, 0),\n", - " datetime.datetime(2018, 4, 24, 0, 0),\n", - " datetime.datetime(2018, 4, 24, 1, 0),\n", - " datetime.datetime(2018, 4, 24, 2, 0),\n", - " datetime.datetime(2018, 4, 24, 3, 0),\n", - " datetime.datetime(2018, 4, 24, 4, 0),\n", - " datetime.datetime(2018, 4, 24, 5, 0),\n", - " datetime.datetime(2018, 4, 24, 6, 0),\n", - " datetime.datetime(2018, 4, 24, 7, 0),\n", - " datetime.datetime(2018, 4, 24, 8, 0),\n", - " datetime.datetime(2018, 4, 24, 9, 0),\n", - " datetime.datetime(2018, 4, 24, 10, 0),\n", - " datetime.datetime(2018, 4, 24, 11, 0),\n", - " datetime.datetime(2018, 4, 24, 12, 0),\n", - " datetime.datetime(2018, 4, 24, 13, 0),\n", - " datetime.datetime(2018, 4, 24, 14, 0),\n", - " datetime.datetime(2018, 4, 24, 15, 0),\n", - " datetime.datetime(2018, 4, 24, 16, 0),\n", - " datetime.datetime(2018, 4, 24, 17, 0),\n", - " datetime.datetime(2018, 4, 24, 18, 0),\n", - " datetime.datetime(2018, 4, 24, 19, 0),\n", - " datetime.datetime(2018, 4, 24, 20, 0),\n", - " datetime.datetime(2018, 4, 24, 21, 0),\n", - " datetime.datetime(2018, 4, 24, 22, 0),\n", - " datetime.datetime(2018, 4, 24, 23, 0),\n", - " datetime.datetime(2018, 4, 25, 0, 0),\n", - " datetime.datetime(2018, 4, 25, 1, 0),\n", - " datetime.datetime(2018, 4, 25, 2, 0),\n", - " datetime.datetime(2018, 4, 25, 3, 0),\n", - " datetime.datetime(2018, 4, 25, 4, 0),\n", - " datetime.datetime(2018, 4, 25, 5, 0),\n", - " datetime.datetime(2018, 4, 25, 6, 0),\n", - " datetime.datetime(2018, 4, 25, 7, 0),\n", - " datetime.datetime(2018, 4, 25, 8, 0),\n", - " datetime.datetime(2018, 4, 25, 9, 0),\n", - " datetime.datetime(2018, 4, 25, 10, 0),\n", - " datetime.datetime(2018, 4, 25, 11, 0),\n", - " datetime.datetime(2018, 4, 25, 12, 0),\n", - " datetime.datetime(2018, 4, 25, 13, 0),\n", - " datetime.datetime(2018, 4, 25, 14, 0),\n", - " datetime.datetime(2018, 4, 25, 15, 0),\n", - " datetime.datetime(2018, 4, 25, 16, 0),\n", - " datetime.datetime(2018, 4, 25, 17, 0),\n", - " datetime.datetime(2018, 4, 25, 18, 0),\n", - " datetime.datetime(2018, 4, 25, 19, 0),\n", - " datetime.datetime(2018, 4, 25, 20, 0),\n", - " datetime.datetime(2018, 4, 25, 21, 0),\n", - " datetime.datetime(2018, 4, 25, 22, 0),\n", - " datetime.datetime(2018, 4, 25, 23, 0),\n", - " datetime.datetime(2018, 4, 26, 0, 0),\n", - " datetime.datetime(2018, 4, 26, 1, 0),\n", - " datetime.datetime(2018, 4, 26, 2, 0),\n", - " datetime.datetime(2018, 4, 26, 3, 0),\n", - " datetime.datetime(2018, 4, 26, 4, 0),\n", - " datetime.datetime(2018, 4, 26, 5, 0),\n", - " datetime.datetime(2018, 4, 26, 6, 0),\n", - " datetime.datetime(2018, 4, 26, 7, 0),\n", - " datetime.datetime(2018, 4, 26, 8, 0),\n", - " datetime.datetime(2018, 4, 26, 9, 0),\n", - " datetime.datetime(2018, 4, 26, 10, 0),\n", - " datetime.datetime(2018, 4, 26, 11, 0),\n", - " datetime.datetime(2018, 4, 26, 12, 0),\n", - " datetime.datetime(2018, 4, 26, 13, 0),\n", - " datetime.datetime(2018, 4, 26, 14, 0),\n", - " datetime.datetime(2018, 4, 26, 15, 0),\n", - " datetime.datetime(2018, 4, 26, 16, 0),\n", - " datetime.datetime(2018, 4, 26, 17, 0),\n", - " datetime.datetime(2018, 4, 26, 18, 0),\n", - " datetime.datetime(2018, 4, 26, 19, 0),\n", - " datetime.datetime(2018, 4, 26, 20, 0),\n", - " datetime.datetime(2018, 4, 26, 21, 0),\n", - " datetime.datetime(2018, 4, 26, 22, 0),\n", - " datetime.datetime(2018, 4, 26, 23, 0),\n", - " datetime.datetime(2018, 4, 27, 0, 0),\n", - " datetime.datetime(2018, 4, 27, 1, 0),\n", - " datetime.datetime(2018, 4, 27, 2, 0),\n", - " datetime.datetime(2018, 4, 27, 3, 0),\n", - " datetime.datetime(2018, 4, 27, 4, 0),\n", - " datetime.datetime(2018, 4, 27, 5, 0),\n", - " datetime.datetime(2018, 4, 27, 6, 0),\n", - " datetime.datetime(2018, 4, 27, 7, 0),\n", - " datetime.datetime(2018, 4, 27, 8, 0),\n", - " datetime.datetime(2018, 4, 27, 9, 0),\n", - " datetime.datetime(2018, 4, 27, 10, 0),\n", - " datetime.datetime(2018, 4, 27, 11, 0),\n", - " datetime.datetime(2018, 4, 27, 12, 0),\n", - " datetime.datetime(2018, 4, 27, 13, 0),\n", - " datetime.datetime(2018, 4, 27, 14, 0),\n", - " datetime.datetime(2018, 4, 27, 15, 0),\n", - " datetime.datetime(2018, 4, 27, 16, 0),\n", - " datetime.datetime(2018, 4, 27, 17, 0),\n", - " datetime.datetime(2018, 4, 27, 18, 0),\n", - " datetime.datetime(2018, 4, 27, 19, 0),\n", - " datetime.datetime(2018, 4, 27, 20, 0),\n", - " datetime.datetime(2018, 4, 27, 21, 0),\n", - " datetime.datetime(2018, 4, 27, 22, 0),\n", - " datetime.datetime(2018, 4, 27, 23, 0),\n", - " datetime.datetime(2018, 4, 28, 0, 0),\n", - " datetime.datetime(2018, 4, 28, 1, 0),\n", - " datetime.datetime(2018, 4, 28, 2, 0),\n", - " datetime.datetime(2018, 4, 28, 3, 0),\n", - " datetime.datetime(2018, 4, 28, 4, 0),\n", - " datetime.datetime(2018, 4, 28, 5, 0),\n", - " datetime.datetime(2018, 4, 28, 6, 0),\n", - " datetime.datetime(2018, 4, 28, 7, 0),\n", - " datetime.datetime(2018, 4, 28, 8, 0),\n", - " datetime.datetime(2018, 4, 28, 9, 0),\n", - " datetime.datetime(2018, 4, 28, 10, 0),\n", - " datetime.datetime(2018, 4, 28, 11, 0),\n", - " datetime.datetime(2018, 4, 28, 12, 0),\n", - " datetime.datetime(2018, 4, 28, 13, 0),\n", - " datetime.datetime(2018, 4, 28, 14, 0),\n", - " datetime.datetime(2018, 4, 28, 15, 0),\n", - " datetime.datetime(2018, 4, 28, 16, 0),\n", - " datetime.datetime(2018, 4, 28, 17, 0),\n", - " datetime.datetime(2018, 4, 28, 18, 0),\n", - " datetime.datetime(2018, 4, 28, 19, 0),\n", - " datetime.datetime(2018, 4, 28, 20, 0),\n", - " datetime.datetime(2018, 4, 28, 21, 0),\n", - " datetime.datetime(2018, 4, 28, 22, 0),\n", - " datetime.datetime(2018, 4, 28, 23, 0),\n", - " datetime.datetime(2018, 4, 29, 0, 0),\n", - " datetime.datetime(2018, 4, 29, 1, 0),\n", - " datetime.datetime(2018, 4, 29, 2, 0),\n", - " datetime.datetime(2018, 4, 29, 3, 0),\n", - " datetime.datetime(2018, 4, 29, 4, 0),\n", - " datetime.datetime(2018, 4, 29, 5, 0),\n", - " datetime.datetime(2018, 4, 29, 6, 0),\n", - " datetime.datetime(2018, 4, 29, 7, 0),\n", - " datetime.datetime(2018, 4, 29, 8, 0),\n", - " datetime.datetime(2018, 4, 29, 9, 0),\n", - " datetime.datetime(2018, 4, 29, 10, 0),\n", - " datetime.datetime(2018, 4, 29, 11, 0),\n", - " datetime.datetime(2018, 4, 29, 12, 0),\n", - " datetime.datetime(2018, 4, 29, 13, 0),\n", - " datetime.datetime(2018, 4, 29, 14, 0),\n", - " datetime.datetime(2018, 4, 29, 15, 0),\n", - " datetime.datetime(2018, 4, 29, 16, 0),\n", - " datetime.datetime(2018, 4, 29, 17, 0),\n", - " datetime.datetime(2018, 4, 29, 18, 0),\n", - " datetime.datetime(2018, 4, 29, 19, 0),\n", - " datetime.datetime(2018, 4, 29, 20, 0),\n", - " datetime.datetime(2018, 4, 29, 21, 0),\n", - " datetime.datetime(2018, 4, 29, 22, 0),\n", - " datetime.datetime(2018, 4, 29, 23, 0),\n", - " datetime.datetime(2018, 4, 30, 0, 0),\n", - " datetime.datetime(2018, 4, 30, 1, 0),\n", - " datetime.datetime(2018, 4, 30, 2, 0),\n", - " datetime.datetime(2018, 4, 30, 3, 0),\n", - " datetime.datetime(2018, 4, 30, 4, 0),\n", - " datetime.datetime(2018, 4, 30, 5, 0),\n", - " datetime.datetime(2018, 4, 30, 6, 0),\n", - " datetime.datetime(2018, 4, 30, 7, 0),\n", - " datetime.datetime(2018, 4, 30, 8, 0),\n", - " datetime.datetime(2018, 4, 30, 9, 0),\n", - " datetime.datetime(2018, 4, 30, 10, 0),\n", - " datetime.datetime(2018, 4, 30, 11, 0),\n", - " datetime.datetime(2018, 4, 30, 12, 0),\n", - " datetime.datetime(2018, 4, 30, 13, 0),\n", - " datetime.datetime(2018, 4, 30, 14, 0),\n", - " datetime.datetime(2018, 4, 30, 15, 0),\n", - " datetime.datetime(2018, 4, 30, 16, 0),\n", - " datetime.datetime(2018, 4, 30, 17, 0),\n", - " datetime.datetime(2018, 4, 30, 18, 0),\n", - " datetime.datetime(2018, 4, 30, 19, 0),\n", - " datetime.datetime(2018, 4, 30, 20, 0),\n", - " datetime.datetime(2018, 4, 30, 21, 0),\n", - " datetime.datetime(2018, 4, 30, 22, 0),\n", - " datetime.datetime(2018, 4, 30, 23, 0)]" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_2.time" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': array([0]), 'units': ''}" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_2.lev" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': masked_array(data=[-64.24006 , -54.84846497, -22.10333333, -31.66861111,\n", - " 47.76666641, 46.677778 , 48.721111 , 47.529167 ,\n", - " 47.05407 , 46.693611 , 47.348056 , 47.973056 ,\n", - " 48.878611 , 48.106111 , 48.371111 , 48.334722 ,\n", - " 48.050833 , 47.838611 , 47.040277 , 47.06694444,\n", - " 49.877778 , 50.629421 , 50.503333 , 41.695833 ,\n", - " 32.27000046, 80.05000305, 46.5475 , 46.813056 ,\n", - " 47.479722 , 47.049722 , 47.0675 , 47.18961391,\n", - " -30.17254 , 16.86403 , 35.0381 , 49.73508444,\n", - " 49.573394 , 49.066667 , 54.925556 , 52.802222 ,\n", - " 47.914722 , 53.166667 , 50.65 , 54.4368 ,\n", - " 47.80149841, 47.4165 , -70.666 , 54.746495 ,\n", - " 81.6 , 55.693588 , 72.58000183, 56.290424 ,\n", - " 59.5 , 58.383333 , 39.54694 , 42.72056 ,\n", - " 39.87528 , 37.23722 , 43.43917 , 41.27417 ,\n", - " 42.31917 , 38.47278 , 39.08278 , 41.23889 ,\n", - " 41.39389 , 42.63472 , 37.05194 , 28.309 ,\n", - " 59.779167 , 60.53002 , 66.320278 , 67.97333333,\n", - " 48.5 , 49.9 , 47.266667 , 43.616667 ,\n", - " 47.3 , 46.65 , 45. , 45.8 ,\n", - " 48.633333 , 42.936667 , 48.70861111, 44.56944444,\n", - " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", - " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", - " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", - " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", - " 51.778056 , 60.13922 , -75.62 , 51.149617 ,\n", - " 38.366667 , 35.316667 , 46.966667 , 46.91 ,\n", - " -0.20194 , 51.939722 , 53.32583 , 45.8 ,\n", - " 44.183333 , 37.571111 , 35.5182 , 42.805462 ,\n", - " -69.005 , 39.0319 , 24.2883 , 24.466941 ,\n", - " 36.53833389, 33.293917 , 55.37611111, 56.161944 ,\n", - " 57.135278 , 41.536111 , 36.0722 , 52.083333 ,\n", - " 53.333889 , 51.541111 , 52.3 , 51.974444 ,\n", - " 58.38853 , 65.833333 , 62.783333 , 78.90715 ,\n", - " 59. , 69.45 , 59.2 , 60.372386 ,\n", - " -72.0117 , 59.2 , -41.40819168, -77.83200073,\n", - " -45.0379982 , 51.814408 , 50.736444 , 54.753894 ,\n", - " 54.15 , 43.4 , 71.58616638, 63.85 ,\n", - " 67.883333 , 57.394 , 57.1645 , 57.9525 ,\n", - " 56.0429 , 60.0858 , 57.816667 , 64.25 ,\n", - " 59.728 , 45.566667 , 46.428611 , 46.299444 ,\n", - " 48.933333 , 49.15 , 49.05 , 47.96 ,\n", - " 71.32301331, 40.12498 , 19.53623009, -89.99694824,\n", - " 41.05410004, 21.5731 , -34.35348 ],\n", - " mask=False,\n", - " fill_value=1e+20),\n", - " 'dimensions': ('station',),\n", - " 'standard_name': 'latitude',\n", - " 'units': 'decimal degrees North',\n", - " 'long_name': 'latitude',\n", - " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", - " 'axis': 'Y'}" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_2.lat" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", - " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", - " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", - " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", - " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", - " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", - " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", - " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", - " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", - " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", - " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", - " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", - " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", - " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", - " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", - " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", - " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", - " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", - " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", - " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", - " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", - " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", - " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", - " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", - " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", - " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", - " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", - " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", - " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", - " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", - " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", - " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", - " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", - " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", - " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", - " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", - " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", - " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", - " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", - " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", - " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", - " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", - " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", - " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", - " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", - " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", - " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", - " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", - " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", - " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", - " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", - " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", - " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", - " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", - " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", - " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", - " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", - " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", - " 1.84896800e+01],\n", - " mask=False,\n", - " fill_value=1e+20),\n", - " 'dimensions': ('station',),\n", - " 'standard_name': 'longitude',\n", - " 'units': 'decimal degrees East',\n", - " 'long_name': 'longitude',\n", - " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", - " 'axis': 'X'}" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy_2.lon" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Loading grid_edge_latitude var (1/6)\n", - "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", - "Rank 000: Loading grid_edge_longitude var (2/6)\n", - "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", - "Rank 000: Loading model_centre_latitude var (3/6)\n", - "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", - "Rank 000: Loading model_centre_longitude var (4/6)\n", - "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", - "Rank 000: Loading sconco3 var (5/6)\n", - "Rank 000: Loaded sconco3 var ((175, 720))\n", - "Rank 000: Loading station_reference var (6/6)\n", - "Rank 000: Loaded station_reference var ((175,))\n" - ] - } - ], - "source": [ - "nessy_2.load()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Write" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating providentia_exp_file.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n", - "Rank 000: Writing grid_edge_latitude var (1/6)\n", - "**ERROR** an error has occurred while writing the 'grid_edge_latitude' variable\n" - ] - }, - { - "ename": "ValueError", - "evalue": "cannot find dimension grid_edge in this group or parent groups", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0mdim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdimensions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mdimname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dimensions'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'parent'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnessy_2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'providentia_exp_file.nc'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1431\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1432\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1433\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1434\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1435\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1381\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1382\u001b[0m \u001b[0;31m# Create variables\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1383\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_variables\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1384\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1385\u001b[0m \u001b[0;31m# Create metadata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 439\u001b[0m \u001b[0;31m# print(\"**ERROR** an error hase occurred while writing the '{0}' variable\".format(var_name),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 440\u001b[0m \u001b[0;31m# file=sys.stderr)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 442\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 443\u001b[0m \u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'WARNING!!! '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 379\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 380\u001b[0m var = netcdf.createVariable(var_name, var_dtype, var_dims,\n\u001b[0;32m--> 381\u001b[0;31m zlib=self.zip_lvl > 0, complevel=self.zip_lvl)\n\u001b[0m\u001b[1;32m 382\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaster\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.createVariable\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Variable.__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"cannot find dimension %s in this group or parent groups\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mdimname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: cannot find dimension grid_edge in this group or parent groups" - ] - } - ], - "source": [ - "nessy_2.to_netcdf('providentia_exp_file.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/Jupyter_notebooks/input/mercator_grid_example.nc b/Jupyter_notebooks/input/mercator_grid_example.nc deleted file mode 100644 index f3c3b0866b358adc9bb15f72f10d31a3c56d96e4..0000000000000000000000000000000000000000 Binary files a/Jupyter_notebooks/input/mercator_grid_example.nc and /dev/null differ diff --git a/README.md b/README.md index 95ad6c17febf3a5bd178894d65f3ee51bcaac208..cd76560edb7d0dd536901d95c62fc50d0d03c7a5 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,30 @@ -# NES +# About -NetCDF for Earth Science +NES (NetCDF for Earth Science) is the Python I/O library used by SNES, the framework that implements the data post-processing pipelines at the Earth Sciences department, to read and write netCDF files. -test local \ No newline at end of file +# How to clone + +Use the following command to get a copy of the repository: + +``` +git clone https://earth.bsc.es/gitlab/es/NES.git +``` + +You can use the latest stable version of NES +by accessing the production branch: + +``` +git checkout production +``` + +You can also access the master branch to test new features, +that are to be implemented in the upcoming release: + +``` +git checkout master +``` + +# How to run + +For running NES, please follow the instruction in +the [wiki](https://earth.bsc.es/gitlab/es/NES/-/wikis/home). \ No newline at end of file diff --git a/nes/__init__.py b/nes/__init__.py index a597aeec8aea8391855767a4cc79976e867c3ec5..a15e5c1dc0bd4acf3e6df7384aabce7c0b78dcb2 100644 --- a/nes/__init__.py +++ b/nes/__init__.py @@ -1,5 +1,5 @@ -__date__ = "2022-08-12" -__version__ = "0.9.0" +__date__ = "2022-11-24" +__version__ = "1.0.0" from .load_nes import open_netcdf, concatenate_netcdfs from .create_nes import create_nes diff --git a/nes/create_nes.py b/nes/create_nes.py index 6c321a36a074179925118e9df300f39147db19de..41c42884f08a54b551a790b637ae58af0b55da1d 100644 --- a/nes/create_nes.py +++ b/nes/create_nes.py @@ -7,8 +7,35 @@ from .nc_projections import * def create_nes(comm=None, info=False, projection=None, parallel_method='Y', balanced=False, - strlen=75, times=None, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, **kwargs): - + strlen=75, times=None, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, + **kwargs): + """ + Create a Nes class from scratch. + + Parameters + ---------- + comm: MPI.Communicator + MPI Communicator. + info: bool + Indicates if you want to get reading/writing info. + parallel_method : str + Indicates the parallelization method that you want. Default: 'Y'. + accepted values: ['X', 'Y', 'T']. + balanced : bool + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use. + last_level : int, None + Index of the last level to use. None if it is the last. + kwargs : + Projection dependent parameters to create it from scratch. + """ + if comm is None: comm = MPI.COMM_WORLD else: diff --git a/nes/interpolation/__init__.py b/nes/interpolation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..22c351a85dec0724f58fe1b70502c171b5286c7a --- /dev/null +++ b/nes/interpolation/__init__.py @@ -0,0 +1,3 @@ +from .vertical_interpolation import add_4d_vertical_info +from .vertical_interpolation import interpolate_vertical +from .horizontal_interpolation import interpolate_horizontal diff --git a/nes/interpolation/horizontal_interpolation.py b/nes/interpolation/horizontal_interpolation.py index e2aff54c1589fa1d7e1fedae54ca68dba7bb11cf..74dadd918532cfc0b205b13da10361380816b95e 100644 --- a/nes/interpolation/horizontal_interpolation.py +++ b/nes/interpolation/horizontal_interpolation.py @@ -1,6 +1,7 @@ #!/usr/bin/env python import sys +import warnings import numpy as np import os import nes @@ -12,25 +13,28 @@ from warnings import warn def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='NearestNeighbour', n_neighbours=4, - info=False): + info=False, to_providentia=False): """ Horizontal interpolation from one grid to another one. Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour']. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 - info: bool + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. + info : bool Indicates if you want to print extra info during the interpolation process. + to_providentia : bool + Indicates if we want the interpolated grid in Providentia format. """ + # Obtain weight matrix if self.parallel_method == 'T': weights, idx = get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours) @@ -78,16 +82,25 @@ def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='Neares raise IndexError("Data with vertical levels cannot be interpolated to points") final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].reshape( (src_shape[0], idx.shape[-1])) - if isinstance(dst_grid, nes.PointsNesGHOST): - final_dst.erase_flags() - print('pre final shape:', final_dst.variables[var_name]['data'].shape) - final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].T - # final_dst.variables[var_name]['dtype'] = final_dst.variables[var_name]['data'].dtype - final_dst.variables[var_name]['dimensions'] = ('station', 'time') - print('final shape:', final_dst.variables[var_name]['data'].shape) - + if isinstance(dst_grid, nes.PointsNesGHOST) and not to_providentia: + final_dst = final_dst.to_points() + final_dst.global_attrs = self.global_attrs + if to_providentia: + # self = experiment to interpolate (regular, rotated, etc.) + # final_dst = interpolated experiment (points) + if isinstance(final_dst, nes.PointsNes): + model_centre_lat, model_centre_lon = self.create_providentia_exp_centre_coordinates() + grid_edge_lat, grid_edge_lon = self.create_providentia_exp_grid_edge_coordinates() + final_dst = final_dst.to_providentia(model_centre_lon=model_centre_lon, + model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, + grid_edge_lat=grid_edge_lat) + else: + msg = "The final projection must be points to interpolate an experiment and get it in Providentia format." + warnings.warn(msg) + return final_dst @@ -97,20 +110,20 @@ def get_src_data(comm, var_data, idx, parallel_method): Parameters ---------- - comm : MPI.Communicator - + comm : MPI.Communicator. var_data : np.array Rank source data. idx : np.array - Index of the needed data in a 2D flatten way + Index of the needed data in a 2D flatten way. parallel_method: str - Source parallel method + Source parallel method. Returns ------- np.array - Flatten source needed data + Flatten source needed data. """ + if parallel_method == 'T': var_data = var_data.flatten() else: @@ -128,6 +141,7 @@ def get_src_data(comm, var_data, idx, parallel_method): var_data = comm.bcast(var_data) var_data = np.take(var_data, idx) + return var_data @@ -139,22 +153,22 @@ def get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbour Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour']. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. Returns ------- tuple - weights and source data index - + Weights and source data index. """ + if weight_matrix_path is not None: with FileLock(weight_matrix_path.replace('.nc', '.lock')): if self.master: @@ -209,22 +223,22 @@ def get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbou Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. Accepted values: ['NearestNeighbour'] n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. Returns ------- tuple - weights and source data index - + Weights and source data index. """ + if isinstance(dst_grid, nes.PointsNes) and weight_matrix_path is not None: if self.master: warn("To point weight matrix cannot be saved.") @@ -290,21 +304,23 @@ def get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbou def read_weight_matrix(weight_matrix_path, comm=None, parallel_method='T'): """ + Read weight matrix. Parameters ---------- weight_matrix_path : str - Path of the weight matrix + Path of the weight matrix. comm : MPI.Communicator - Communicator to read the weight matrix + Communicator to read the weight matrix. parallel_method : str Nes parallel method to read the weight matrix. Returns ------- nes.Nes - Weight matrix + Weight matrix. """ + weight_matrix = nes.open_netcdf(path=weight_matrix_path, comm=comm, parallel_method=parallel_method, balanced=True) weight_matrix.load() @@ -318,20 +334,20 @@ def create_nn_weight_matrix(self, dst_grid, n_neighbours=4, info=False): Parameters ---------- self : nes.Nes - Source projection Nes Object + Source projection Nes Object. dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. n_neighbours : int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. info: bool Indicates if you want to print extra info during the interpolation process. Returns ------- nes.Nes - Weight Matrix - + Weight matrix. """ + if info and self.master: print("\tCreating Nearest Neighbour Weight Matrix with {0} neighbours".format(n_neighbours)) sys.stdout.flush() @@ -392,22 +408,24 @@ def create_nn_weight_matrix(self, dst_grid, n_neighbours=4, info=False): weight_matrix.variables['idx'] = {'data': idx_transf, 'units': ''} weight_matrix.lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} weight_matrix._lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} + return weight_matrix def lon_lat_to_cartesian(lon, lat, radius=1.0): """ - To calculate lon, lat coordinates of a point on a sphere + Calculate lon, lat coordinates of a point on a sphere. Parameters ---------- lon : np.array - Longitude values + Longitude values. lat : np.array - Latitude values + Latitude values. radius : float Radius of the sphere to get the distances. """ + lon_r = np.radians(lon) lat_r = np.radians(lat) diff --git a/nes/interpolation/vertical_interpolation.py b/nes/interpolation/vertical_interpolation.py index 30eff96dd75ecd92ba2a2f6279a668ff04a72b8c..461dbcba90441444aa5ae779c4b35947006caeb1 100644 --- a/nes/interpolation/vertical_interpolation.py +++ b/nes/interpolation/vertical_interpolation.py @@ -10,15 +10,16 @@ from copy import copy def add_4d_vertical_info(self, info_to_add): """ To add the vertical information from other source. - + Parameters ---------- self : nes.Nes - Source Nes object + Source Nes object. info_to_add : nes.Nes, str - Nes object with the vertical information as variable or str with the path to the NetCDF file that contains + Nes object with the vertical information as variable or str with the path to the NetCDF file that contains the vertical data. """ + vertical_var = list(self.concatenate(info_to_add)) self.vertical_var_name = vertical_var[0] @@ -27,174 +28,157 @@ def add_4d_vertical_info(self, info_to_add): def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): """ - Vertical interpolation method + Vertical interpolation method. Parameters ---------- self : Nes - Source Nes object + Source Nes object. new_levels : list - List of new vertical levels + List of new vertical levels. new_src_vertical kind : str Vertical interpolation type. extrapolate : None, tuple, str - Extrapolate method (for non linear operations) + Extrapolate method (for non linear operations). info: None, bool - Indicates if you want to print extra information + Indicates if you want to print extra information. """ + if info is None: - info = self.print_info + info = self.info if new_src_vertical is not None: self.add_4d_vertical_info(new_src_vertical) + if new_levels[0] > new_levels[-1]: + ascendant = False + else: + ascendant = True nz_new = len(new_levels) if self.vertical_var_name is None: # To use current level data - for var_name, var_info in self.variables.items(): - if var_info['data'] is None: - self.load(var_name) - if var_name != self.vertical_var_name: - if info and self.master: - print("\t{var} vertical interpolation".format(var=var_name)) - sys.stdout.flush() - src_data = np.flip(var_info['data'], axis=1) - nt, nz, ny, nx = src_data.shape - dst_data = np.ma.masked_all((nt, nz_new, ny, nx)) - - for t in range(nt): - if info and self.master: - print('\t\tTime step: {0} ({1}/{2}))'.format(self.time[t], t + 1, nt)) - sys.stdout.flush() - for j in range(ny): - for i in range(nx): - if extrapolate is None: - fill_value = (np.float64(src_data[t, 0, j, i]), np.float64(src_data[t, -1, j, i])) - else: - fill_value = extrapolate - try: - # f = interp1d(np.array(self.lev['data'], dtype=np.float64), - # np.array(src_data[t, :, j, i], dtype=np.float64), - # kind=kind, - # bounds_error=False, - # fill_value=fill_value) - # dst_data[t, :, j, i] = np.array(f(new_levels), dtype=np.float32) - if kind == 'linear': - dst_data[t, :, j, i] = np.array( - np.interp(new_levels, - np.array(self.lev['data'], dtype=np.float64), - np.array(src_data[t, :, j, i], dtype=np.float64)), - dtype=src_data.dtype) - else: - dst_data[t, :, j, i] = np.array( - interp1d(np.array(self.lev['data'], dtype=np.float64), - np.array(src_data[t, :, j, i], dtype=np.float64), - kind=kind, - bounds_error=False, - fill_value=fill_value)(new_levels), - dtype=src_data.dtype) - except Exception as e: - print("time lat lon", t, j, i) - print("***********************") - print("LEVELS", self.lev['data']) - print("VAR", src_data[t, :, j, i]) - print("+++++++++++++++++++++++") - raise Exception(str(e)) - self.variables[var_name]['data'] = copy(dst_data) - # Updating level information - self.lev['data'] = new_levels - self._lev['data'] = new_levels - # raise NotImplementedError('Vertical information with no 4D vertical data is not implemented') + current_level = True + # Checking old order + src_levels = self.lev['data'] + if src_levels[0] > src_levels[-1]: + if not ascendant: + flip = False + else: + flip = True + src_levels = np.flip(src_levels) + else: + if ascendant: + flip = False + else: + flip = True + src_levels = np.flip(src_levels) else: + current_level = False src_levels = self.variables[self.vertical_var_name]['data'] if self.vertical_var_name == 'layer_thickness': - src_levels = np.cumsum(np.flip(src_levels, axis=1), axis=1) + src_levels = np.flip(np.cumsum(np.flip(src_levels, axis=1), axis=1)) else: - src_levels = np.flip(src_levels, axis=1) - for var_name, var_info in self.variables.items(): - if var_info['data'] is None: - self.load(var_name) - if var_name != self.vertical_var_name: - if info and self.master: - print("\t{var} vertical interpolation".format(var=var_name)) + # src_levels = np.flip(src_levels, axis=1) + pass + # Checking old order + if np.nanmean(src_levels[:, 0, :, :]) > np.nanmean(src_levels[:, -1, :, :]): + if not ascendant: + flip = False + else: + flip = True + src_levels = np.flip(src_levels, axis=1) + else: + if ascendant: + flip = False + else: + flip = True + src_levels = np.flip(src_levels, axis=1) + + # Loop over variables + for var_name in self.variables.keys(): + if self.variables[var_name]['data'] is None: + # Loading data if it is not loaded yet + self.load(var_name) + + if var_name != self.vertical_var_name: + if flip: + self.variables[var_name]['data'] = np.flip(self.variables[var_name]['data'], axis=1) + if info and self.master: + print("\t{var} vertical interpolation".format(var=var_name)) + sys.stdout.flush() + nt, nz, ny, nx = self.variables[var_name]['data'].shape + dst_data = np.empty((nt, nz_new, ny, nx), dtype=self.variables[var_name]['data'].dtype) + for t in range(nt): + # if info and self.rank == self.size - 1: + if self.info and self.master: + print('\t\t{3} time step {0} ({1}/{2}))'.format(self.time[t], t + 1, nt, var_name)) sys.stdout.flush() - src_data = np.flip(var_info['data'], axis=1) - nt, nz, ny, nx = src_data.shape - dst_data = np.empty((nt, nz_new, ny, nx), dtype=np.float32) - - for t in range(nt): - # if info and self.rank == self.size - 1: - if self.print_info and self.master: - print('\t\t{3} time step {0} ({1}/{2}))'.format(self.time[t], t+1, nt, var_name)) - sys.stdout.flush() - for j in range(ny): - for i in range(nx): - # level_array = None - # nl = src_levels[t, 0, j, i] - src_levels[t, 1, j, i] - # if nl > 0: - # level_max = np.max(src_levels[t, :, j, i]) - # level_array = np.asarray(new_levels) - # level_array = level_array.astype('float32') - # level_array = np.where(level_array > level_max, level_max, level_array) - # if nl < 0: - # level_min = np.min(src_levels[t, :, j, i]) - # level_array = np.asarray(new_levels) - # level_array = level_array.astype('float32') - # level_array = np.where(level_array < level_min, level_min, level_array) - curr_level_values = src_levels[t, :, j, i] - try: - # check if all values are identical or masked - if ((isinstance(curr_level_values, np.ndarray) and - (curr_level_values == curr_level_values[0]).all()) or - (isinstance(curr_level_values, np.ma.core.MaskedArray) and - curr_level_values.mask.all())): - kind = 'slinear' - else: - kind = kind # 'cubic' - if extrapolate is None: - fill_value = (np.float64(src_data[t, 0, j, i]), np.float64(src_data[t, -1, j, i])) - else: - fill_value = extrapolate - - if kind == 'linear': - dst_data[t, :, j, i] = np.array( - np.interp(new_levels, - np.array(src_levels[t, :, j, i], dtype=np.float64), - np.array(src_data[t, :, j, i], dtype=np.float64)), - dtype=src_data.dtype) - else: - dst_data[t, :, j, i] = np.array( - interp1d(np.array(src_levels[t, :, j, i], dtype=np.float64), - np.array(src_data[t, :, j, i], dtype=np.float64), - kind=kind, - bounds_error=False, - fill_value=fill_value)(new_levels), - dtype=src_data.dtype) - except Exception as e: - print("time lat lon", t, j, i) - print("***********************") - print("LEVELS", np.array(src_levels[t, :, j, i], dtype=np.float64)) - print("DATA", np.array(src_data[t, :, j, i], dtype=np.float64)) - print("METHOD", kind) - print("FILL_VALUE", fill_value) - print("+++++++++++++++++++++++") - raise Exception(str(e)) - # if level_array is not None: - # dst_data[t, :, j, i] = np.array(f(level_array), dtype=np.float32) - - self.variables[var_name]['data'] = copy(dst_data) - # print(self.variables[var_name]['data']) - - # Updating level information - new_lev_info = {'data': new_levels} - for var_attr, attr_info in self.variables[self.vertical_var_name].items(): - if var_attr not in ['data', 'dimensions', 'crs', 'grid_mapping']: - new_lev_info[var_attr] = copy(attr_info) - self.lev = new_lev_info - self._lev = new_lev_info - self.free_vars(self.vertical_var_name) - self.vertical_var_name = None + for j in range(ny): + for i in range(nx): + curr_level_values = src_levels[t, :, j, i] + try: + # check if all values are identical or masked + if ((isinstance(curr_level_values, np.ndarray) and + (curr_level_values == curr_level_values[0]).all()) or + (isinstance(curr_level_values, np.ma.core.MaskedArray) and + curr_level_values.mask.all())): + kind = 'slinear' + else: + kind = kind # 'cubic' + if extrapolate is None: + fill_value = (np.float64(self.variables[var_name]['data'][t, 0, j, i]), + np.float64(self.variables[var_name]['data'][t, -1, j, i])) + else: + fill_value = extrapolate - return None + # We force the interpolation with float64 to avoid negative values + # We don't know why the negatives appears with float34 + if current_level: + # 1D vertical component + src_levels_aux = src_levels + else: + # 4D vertical component + src_levels_aux = src_levels[t, :, j, i] + + if kind == 'linear': + dst_data[t, :, j, i] = np.array( + np.interp(new_levels, + np.array(src_levels_aux, dtype=np.float64), + np.array(self.variables[var_name]['data'][t, :, j, i], dtype=np.float64)), + dtype=self.variables[var_name]['data'].dtype) + else: + dst_data[t, :, j, i] = np.array( + interp1d(np.array(src_levels_aux, dtype=np.float64), + np.array(self.variables[var_name]['data'][t, :, j, i], dtype=np.float64), + kind=kind, + bounds_error=False, + fill_value=fill_value)(new_levels), + dtype=self.variables[var_name]['data'].dtype) + except Exception as e: + print("time lat lon", t, j, i) + print("***********************") + print("LEVELS", np.array(src_levels[t, :, j, i], dtype=np.float64)) + print("DATA", np.array(self.variables[var_name]['data'][t, :, j, i], dtype=np.float64)) + print("METHOD", kind) + print("FILL_VALUE", fill_value) + print("+++++++++++++++++++++++") + raise Exception(str(e)) + # if level_array is not None: + # dst_data[t, :, j, i] = np.array(f(level_array), dtype=np.float32) + + self.variables[var_name]['data'] = copy(dst_data) + # print(self.variables[var_name]['data']) + + # Updating level information + new_lev_info = {'data': new_levels} + for var_attr, attr_info in self.variables[self.vertical_var_name].items(): + if var_attr not in ['data', 'dimensions', 'crs', 'grid_mapping']: + new_lev_info[var_attr] = copy(attr_info) + self.lev = new_lev_info + self._lev = new_lev_info + self.free_vars(self.vertical_var_name) + self.vertical_var_name = None + + return self diff --git a/nes/load_nes.py b/nes/load_nes.py index 6d43b4d0841581d3d602a52274d86d37cd636c10..525e7267f45428bef4563acea7031071da9c0197 100644 --- a/nes/load_nes.py +++ b/nes/load_nes.py @@ -11,37 +11,38 @@ from .nc_projections import * def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, balanced=False): """ - Open a netCDF file + Open a netCDF file. Parameters ---------- path : str - Path to the NetCDF file to read + Path to the NetCDF file to read. comm : MPI.COMM - MPI communicator to use in that netCDF. Default: MPI.COMM_WORLD + MPI communicator to use in that netCDF. Default: MPI.COMM_WORLD. xarray : bool - (Not working) Indicates if you want to use xarray. Default: False + (Not working) Indicates if you want to use xarray. Default: False. info : bool - Indicates if you want to print (stdout) the reading/writing steps + Indicates if you want to print (stdout) the reading/writing steps. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default: 'Y' (over Y axis) - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T'] balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. Returns ------- Nes - Nes object. Variables read in lazy mode (only metadata) + Nes object. Variables read in lazy mode (only metadata). """ + if comm is None: comm = MPI.COMM_WORLD else: @@ -57,6 +58,7 @@ def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', dataset = Dataset(path, format="NETCDF4", mode='r', parallel=False) else: dataset = Dataset(path, format="NETCDF4", mode='r', parallel=True, comm=comm, info=MPI.Info()) + if __is_rotated(dataset): # Rotated grids nessy = RotatedNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, @@ -72,6 +74,13 @@ def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + elif __is_points_providentia(dataset): + # Points - Providentia + nessy = PointsNesProvidentia(comm=comm, dataset=dataset, xarray=xarray, info=info, + parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, + balanced=balanced,) else: # Points - non-GHOST nessy = PointsNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, @@ -103,13 +112,14 @@ def __is_rotated(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a rotated one + Indicated if the netCDF is a rotated one. """ + if 'rotated_pole' in dataset.variables.keys(): return True else: @@ -123,13 +133,14 @@ def __is_points(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a points non-GHOST one + Indicated if the netCDF is a points non-GHOST one. """ + if 'station' in dataset.dimensions: return True else: @@ -143,19 +154,42 @@ def __is_points_ghost(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a points GHOST one + Indicated if the netCDF is a points GHOST one. """ + if 'N_flag_codes' in dataset.dimensions and 'N_qa_codes' in dataset.dimensions: return True else: return False +def __is_points_providentia(dataset): + """ + Check if the netCDF is a points dataset in Providentia format or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object. + + Returns + ------- + value : bool + Indicated if the netCDF is a points Providentia one. + """ + + if (('grid_edge' in dataset.dimensions) and ('model_latitude' in dataset.dimensions) + and ('model_longitude' in dataset.dimensions)): + return True + else: + return False + + def __is_lcc(dataset): """ Check if the netCDF is in Lambert Conformal Conic (LCC) projection or not. @@ -163,13 +197,14 @@ def __is_lcc(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a LCC one + Indicated if the netCDF is a LCC one. """ + if 'Lambert_conformal' in dataset.variables.keys(): return True else: @@ -183,13 +218,14 @@ def __is_mercator(dataset): Parameters ---------- dataset : Dataset - netcdf4-python opened dataset object + netcdf4-python opened dataset object. Returns ------- value : bool - Indicated if the netCDF is a Mercator one + Indicated if the netCDF is a Mercator one. """ + if 'mercator' in dataset.variables.keys(): return True else: @@ -199,33 +235,19 @@ def __is_mercator(dataset): def concatenate_netcdfs(nessy_list, comm=None, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, balanced=False): """ - Concatenate variables form different sources + Concatenate variables form different sources. Parameters ---------- nessy_list : list - List of Nes objects or list of paths to concatenate + List of Nes objects or list of paths to concatenate. comm : MPI.Communicator - info : bool - Indicates if you want to print (stdout) the reading/writing steps - avoid_first_hours : int - Number of hours to remove from first time steps. - avoid_last_hours : int - Number of hours to remove from last time steps. - parallel_method : str - Indicates the parallelization method that you want. Default: 'Y' (over Y axis) - accepted values: ['X', 'Y', 'T'] - balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode - first_level : int - Index of the first level to use - last_level : int, None - Index of the last level to use. None if it is the last. + MPI Communicator. Returns ------- Nes - Nes object with all the variables + Nes object with all the variables. """ if not isinstance(nessy_list, list): raise AttributeError("You must pass a list of NES objects or paths.") @@ -256,4 +278,5 @@ def concatenate_netcdfs(nessy_list, comm=None, info=False, parallel_method='Y', balanced=balanced ) nessy_first.concatenate(aux_nessy) + return nessy_first diff --git a/nes/nc_projections/__init__.py b/nes/nc_projections/__init__.py index 384265791dea5ec6fb4dcada082558f858e5628c..fc6bc15a527b2aa0489f7eaa6294b6f175b0ddb5 100644 --- a/nes/nc_projections/__init__.py +++ b/nes/nc_projections/__init__.py @@ -3,5 +3,6 @@ from .latlon_nes import LatLonNes from .rotated_nes import RotatedNes from .points_nes import PointsNes from .points_nes_ghost import PointsNesGHOST +from .points_nes_providentia import PointsNesProvidentia from .lcc_nes import LCCNes from .mercator_nes import MercatorNes diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py index 25280237464d9ede43933dddff8f7dcb5a01599e..8f03f5041cd3145b8cbd1783bf104f31b82ba374 100644 --- a/nes/nc_projections/default_nes.py +++ b/nes/nc_projections/default_nes.py @@ -1,19 +1,24 @@ #!/usr/bin/env python import sys +import gc import os import warnings import numpy as np +import pandas as pd import datetime from xarray import open_dataset from netCDF4 import Dataset, num2date, date2num from mpi4py import MPI from cfunits import Units from numpy.ma.core import MaskError -from copy import deepcopy +import geopandas as gpd +from shapely.geometry import Polygon +from copy import deepcopy, copy import datetime from ..interpolation import vertical_interpolation from ..interpolation import horizontal_interpolation +from ..nes_formats import to_netcdf_cams_ra class Nes(object): @@ -21,28 +26,27 @@ class Nes(object): Attributes ---------- - comm : MPI.Communicator + comm : MPI.Communicator. rank : int - MPI rank + MPI rank. master : bool - True when rank == 0 + True when rank == 0. size : int - Size of the communicator - + Size of the communicator. print_info : bool - Indicates if you want to print reading/writing info + Indicates if you want to print reading/writing info. is_xarray : bool - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. __ini_path : str - Path to the original file to read when open_netcdf is called + Path to the original file to read when open_netcdf is called. hours_start : int - Number of hours to avoid from the first original values + Number of hours to avoid from the first original values. hours_end : int - Number of hours to avoid from the last original values + Number of hours to avoid from the last original values. dataset : xr.Dataset - (not working) xArray Dataset + (not working) xArray Dataset. netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. variables : dict Variables information. The variables are stored in a dictionary with the var_name as key and another dictionary with the information. @@ -58,13 +62,13 @@ class Nes(object): Longitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. parallel_method : str Parallel method to read/write. - Can be chosen any of the following axis to parallelize: 'T', 'Y' or 'X' + Can be chosen any of the following axis to parallelize: 'T', 'Y' or 'X'. read_axis_limits : dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. write_axis_limits : dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. time : list List of time steps of the rank data. lev : dict @@ -76,11 +80,11 @@ class Nes(object): global_attrs : dict Global attributes with the attribute name as key and data as values. _var_dim : None, tuple - Tuple with the name of the Y and X dimensions for the variables + Tuple with the name of the Y and X dimensions for the variables. _lat_dim : None, tuple - Tuple with the name of the dimensions of the Latitude values + Tuple with the name of the dimensions of the Latitude values. _lon_dim : None, tuple - Tuple with the name of the dimensions of the Longitude values + Tuple with the name of the dimensions of the Longitude values. """ def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, @@ -91,30 +95,31 @@ class Nes(object): Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + accepted values: ['X', 'Y', 'T']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. times : list, None List of times to substitute the current ones while creation. kwargs : @@ -131,15 +136,20 @@ class Nes(object): self.size = self.comm.Get_size() # General info - self.print_info = info + self.info = info self.is_xarray = xarray self.__ini_path = path + self.shapefile = None # Selecting info self.hours_start = avoid_first_hours self.hours_end = avoid_last_hours self.first_level = first_level self.last_level = last_level + self.lat_min = None + self.lat_max = None + self.lon_min = None + self.lon_max = None self.balanced = balanced # Define parallel method @@ -157,10 +167,11 @@ class Nes(object): # Complete dimensions self._time = times self._time_bnds = self.__get_time_bnds(create_nes) + self._lat_bnds, self._lon_bnds = self.__get_coordinates_bnds(create_nes) self._lev = {'data': np.array([0]), 'units': '', 'positive': 'up'} - self._lat, self._lon = self._create_centroids(**kwargs) + self._lat, self._lon = self._create_centre_coordinates(**kwargs) # Set axis limits for parallel reading self.read_axis_limits = self.get_read_axis_limits() @@ -169,6 +180,7 @@ class Nes(object): self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] self.time_bnds = self._time_bnds self.lev = deepcopy(self._lev) + self.lat_bnds, self.lon_bnds = self._lat_bnds, self._lon_bnds # Set NetCDF attributes self.global_attrs = self.__get_global_attributes(create_nes) @@ -199,6 +211,7 @@ class Nes(object): self._lev = self._get_coordinate_dimension(['lev', 'level', 'lm', 'plev']) self._lat = self._get_coordinate_dimension(['lat', 'latitude']) self._lon = self._get_coordinate_dimension(['lon', 'longitude']) + self._lat_bnds, self._lon_bnds = self.__get_coordinates_bnds() # Set axis limits for parallel reading self.read_axis_limits = self.get_read_axis_limits() @@ -209,6 +222,8 @@ class Nes(object): self.lev = self._get_coordinate_values(self._lev, 'Z') self.lat = self._get_coordinate_values(self._lat, 'Y') self.lon = self._get_coordinate_values(self._lon, 'X') + self.lat_bnds = self._get_coordinate_values(self._lat_bnds, 'Y', bounds=True) + self.lon_bnds = self._get_coordinate_values(self._lon_bnds, 'X', bounds=True) # Set axis limits for parallel writing self.write_axis_limits = self.get_write_axis_limits() @@ -230,48 +245,51 @@ class Nes(object): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + accepted values: ['X', 'Y', 'T']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. """ + new = Nes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create=create_nes, balanced=balanced) + return new def __del__(self): """ - To delete the Nes object and close all the opened Datasets + To delete the Nes object and close all the opened Datasets. """ + self.close() - for var_name, var_info in self.variables.items(): - del var_info['data'] + self.free_vars(list(self.variables.keys())) del self.variables try: del self.time @@ -284,10 +302,15 @@ class Nes(object): del self._lat del self.lon del self._lon + del self._lat_bnds + del self.lat_bnds + del self._lon_bnds + del self.lon_bnds except AttributeError: pass del self + gc.collect() return None @@ -298,8 +321,9 @@ class Nes(object): Returns ------- state : dict - Dictionary with the class parameters + Dictionary with the class parameters. """ + d = self.__dict__ state = {k: d[k] for k in d if k not in ['comm', 'variables', 'netcdf']} @@ -307,13 +331,14 @@ class Nes(object): def __setstate__(self, state): """ - Set the state of the class + Set the state of the class. Parameters ---------- state: dict - Dictionary with the class parameters + Dictionary with the class parameters. """ + self.__dict__ = state return None @@ -321,19 +346,19 @@ class Nes(object): def copy(self, copy_vars=False): """ Copy the Nes object. - - The copy will avoid to copy the communicator, dataset and variables by default + The copy will avoid to copy the communicator, dataset and variables by default. Parameters ---------- copy_vars: bool - Indicates if you want to copy the variables (in lazy mode) + Indicates if you want to copy the variables (in lazy mode). Returns ------- nessy : Nes - Copy of the Nes object + Copy of the Nes object. """ + nessy = deepcopy(self) nessy.netcdf = None if copy_vars: @@ -353,6 +378,7 @@ class Nes(object): """ Erase the communicator and the parallelization indexes. """ + self.comm = None self.rank = 0 self.master = 0 @@ -362,13 +388,14 @@ class Nes(object): def set_communicator(self, comm): """ - Set a new communicator and the correspondent parallelization indexes + Set a new communicator and the correspondent parallelization indexes. Parameters ---------- comm: MPI.COMM - Communicator to be set + Communicator to be set. """ + self.comm = comm self.rank = self.comm.Get_rank() self.master = self.rank == 0 @@ -385,8 +412,9 @@ class Nes(object): Parameters ---------- levels : dict - Dictionary with the new level information to be set + Dictionary with the new level information to be set. """ + self._lev = deepcopy(levels) self.lev = deepcopy(levels) @@ -399,8 +427,9 @@ class Nes(object): Parameters ---------- time_bnds : list - List with the new time bounds information to be set + List with the new time bounds information to be set. """ + correct_format = True for time_bnd in np.array(time_bnds).flatten(): if not isinstance(time_bnd, datetime.datetime): @@ -423,6 +452,70 @@ class Nes(object): return None + def create_single_spatial_bounds(self, coordinates, inc, spatial_nv=2, inverse=False): + """ + Calculate the vertices coordinates. + + Parameters + ---------- + coordinates : np.array + Coordinates in degrees (latitude or longitude). + inc : float + Increment between centre values. + spatial_nv : int + Non mandatory parameter that informs the number of vertices that must have the + boundaries. Default: 2. + inverse : bool + For some grid latitudes. + + Returns + ---------- + bounds : np.array + Array with as many elements as vertices for each value of coords. + """ + + # Create new arrays moving the centres half increment less and more. + coords_left = coordinates - inc / 2 + coords_right = coordinates + inc / 2 + + # Defining the number of corners needed. 2 to regular grids and 4 for irregular ones. + if spatial_nv == 2: + # Create an array of N arrays of 2 elements to store the floor and the ceil values for each cell + bounds = np.dstack((coords_left, coords_right)) + bounds = bounds.reshape((len(coordinates), spatial_nv)) + elif spatial_nv == 4: + # Create an array of N arrays of 4 elements to store the corner values for each cell + # It can be stored in clockwise starting form the left-top element, or in inverse mode. + if inverse: + bounds = np.dstack((coords_left, coords_left, coords_right, coords_right)) + else: + bounds = np.dstack((coords_left, coords_right, coords_right, coords_left)) + else: + raise ValueError('The number of vertices of the boundaries must be 2 or 4.') + + return bounds + + def create_spatial_bounds(self): + """ + Calculate longitude and latitude bounds and set them. + """ + + inc_lat = np.abs(np.mean(np.diff(self._lat['data']))) + lat_bnds = self.create_single_spatial_bounds(self._lat['data'], inc_lat, spatial_nv=2) + + self._lat_bnds = deepcopy(lat_bnds) + self.lat_bnds = lat_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + :] + + inc_lon = np.abs(np.mean(np.diff(self._lon['data']))) + lon_bnds = self.create_single_spatial_bounds(self._lon['data'], inc_lon, spatial_nv=2) + + self._lon_bnds = deepcopy(lon_bnds) + self.lon_bnds = lon_bnds[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + return None + def free_vars(self, var_list): """ Erase the selected variables from the variables information. @@ -430,8 +523,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if isinstance(var_list, str): var_list = [var_list] @@ -439,11 +533,13 @@ class Nes(object): self.dataset = self.dataset.drop_vars(var_list) self.variables = self._get_lazy_variables() else: - for var_name in var_list: - if self.variables is not None: + if self.variables is not None: + for var_name in var_list: if var_name in self.variables: + if 'data' in self.variables[var_name].keys(): + del self.variables[var_name]['data'] del self.variables[var_name] - + gc.collect() return None def keep_vars(self, var_list): @@ -453,8 +549,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if isinstance(var_list, str): var_list = [var_list] @@ -466,13 +563,14 @@ class Nes(object): def get_time_interval(self): """ - Calculate the interrval of hours between time steps + Calculate the interrval of hours between time steps. Returns ------- int - Number of hours between time steps + Number of hours between time steps. """ + time_interval = self._time[1] - self._time[0] time_interval = int(time_interval.seconds // 3600) @@ -480,20 +578,21 @@ class Nes(object): def sel_time(self, time, copy=False): """ - To select only one time step + To select only one time step. Parameters ---------- time : datetime.datetime - Time stamp to select + Time stamp to select. copy : bool - Indicates if you want a copy with the selected time step (True) or to modify te existen one (False) + Indicates if you want a copy with the selected time step (True) or to modify te existing one (False). Returns ------- Nes - Nes object with the data (and metadata) of the selected time step + Nes object with the data (and metadata) of the selected time step. """ + if copy: aux_nessy = self.copy(copy_vars=False) aux_nessy.comm = self.comm @@ -521,14 +620,146 @@ class Nes(object): aux_nessy.variables[var_name]['data'] = aux_nessy.variables[var_name]['data'][[idx_time]] return aux_nessy + def sel(self, hours_start=None, time_min=None, hours_end=None, time_max=None, lev_min=None, lev_max=None, + lat_min=None, lat_max=None, lon_min=None, lon_max=None): + loaded_vars = False + for var_info in self.variables.values(): + if var_info['data'] is not None: + loaded_vars = True + # var_info['data'] = None + if loaded_vars: + raise ValueError("Some variables have been loaded. Use select function before load.") + + # First time filter + if hours_start is not None: + if time_min is not None: + raise ValueError("Choose to select by hours_start or time_min but not both") + self.hours_start = hours_start + elif time_min is not None: + if time_min <= self._time[0]: + self.hours_start = 0 + else: + self.hours_start = int((time_min - self._time[0]).total_seconds() // 3600) + + # Last time Filter + if hours_end is not None: + if time_max is not None: + raise ValueError("Choose to select by hours_end or time_max but not both") + self.hours_end = hours_end + elif time_max is not None: + if time_max >= self._time[-1]: + self.hours_end = 0 + else: + self.hours_end = int((self._time[-1] - time_max).total_seconds() // 3600) + + # Level filter + self.first_level = lev_min + self.last_level = lev_max + + # Coordinate filter + self.lat_min = lat_min + self.lat_max = lat_max + self.lon_min = lon_min + self.lon_max = lon_max + + # New Axis limits + self.read_axis_limits = self.get_read_axis_limits() + # Dimensions screening + self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + self.time_bnds = self._time_bnds + self.lev = self._get_coordinate_values(self._lev, 'Z') + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + + self.lat_bnds = self._get_coordinate_values(self._lat_bnds, 'Y', bounds=True) + self.lon_bnds = self._get_coordinate_values(self._lon_bnds, 'X', bounds=True) + + # Removing complete coordinates + self.write_axis_limits = self.get_write_axis_limits() + + self.filter_coordinates_selection() + + return None + + def filter_coordinates_selection(self): + idx = self.get_idx_intervals() + + self._time = self._time[idx['idx_t_min']:idx['idx_t_max']] + + if len(self._lat['data'].shape) == 1: + # Regular projection + self._lat['data'] = self._lat['data'][idx['idx_y_min']:idx['idx_y_max']] + self._lon['data'] = self._lon['data'][idx['idx_x_min']:idx['idx_x_max']] + + if self._lat_bnds is not None: + self._lat_bnds = self._lat_bnds[idx['idx_y_min']:idx['idx_y_max'], :] + if self._lon_bnds is not None: + self._lon_bnds = self._lon_bnds[idx['idx_x_min']:idx['idx_x_max'], :] + else: + # Irregular projections + self._lat['data'] = self._lat['data'][idx['idx_y_min']:idx['idx_y_max'], idx['idx_x_min']:idx['idx_x_max']] + self._lon['data'] = self._lon['data'][idx['idx_y_min']:idx['idx_y_max'], idx['idx_x_min']:idx['idx_x_max']] + + if self._lat_bnds is not None: + self._lat_bnds = self._lat_bnds[idx['idx_y_min']:idx['idx_y_max'], idx['idx_x_min']:idx['idx_x_max'], :] + if self._lon_bnds is not None: + self._lon_bnds = self._lon_bnds[idx['idx_y_min']:idx['idx_y_max'], idx['idx_x_min']:idx['idx_x_max'], :] + + return None + + def get_idx_intervals(self): + idx = {'idx_t_min': self.get_time_id(self.hours_start, first=True), + 'idx_t_max': self.get_time_id(self.hours_end, first=False)} + + # Axis Y + if self.lat_min is None: + idx['idx_y_min'] = 0 + else: + idx['idx_y_min'] = self.get_coordinate_id(self._lat['data'], self.lat_min, axis=0) + if self.lat_max is None: + idx['idx_y_max'] = self._lat['data'].shape[0] + 1 + else: + idx['idx_y_max'] = self.get_coordinate_id(self._lat['data'], self.lat_max, axis=0) + 1 + + if idx['idx_y_min'] > idx['idx_y_max']: + idx_aux = copy(idx['idx_y_min']) + idx['idx_y_min'] = idx['idx_y_max'] + idx['idx_y_max'] = idx_aux + + # Axis X + + if self.lon_min is None: + idx['idx_x_min'] = 0 + else: + if len(self._lon['data'].shape) == 1: + axis = 0 + else: + axis = 1 + idx['idx_x_min'] = self.get_coordinate_id(self._lon['data'], self.lon_min, axis=axis) + if self.lon_max is None: + idx['idx_x_max'] = self._lon['data'].shape[-1] + 1 + else: + if len(self._lon['data'].shape) == 1: + axis = 0 + else: + axis = 1 + idx['idx_x_max'] = self.get_coordinate_id(self._lon['data'], self.lon_max, axis=axis) + 1 + + if idx['idx_x_min'] > idx['idx_x_max']: + idx_aux = copy(idx['idx_x_min']) + idx['idx_x_min'] = idx['idx_x_max'] + idx['idx_x_max'] = idx_aux + return idx + # ================================================================================================================== # Statistics # ================================================================================================================== def last_time_step(self): """ - Modify variables to keep only the last time step + Modify variables to keep only the last time step. """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") aux_time = self._time[0].replace(hour=0, minute=0, second=0, microsecond=0) @@ -549,19 +780,20 @@ class Nes(object): def daily_statistic(self, op, type_op='calendar'): """ - Calculate daily statistic + Calculate daily statistic. Parameters ---------- op : str - Statistic to perform. Accepted values: "max", "mean" and "min" + Statistic to perform. Accepted values: "max", "mean" and "min". type_op : str - Type of statistic to perform. Accepted values: "calendar", "alltsteps", and "withoutt0" + Type of statistic to perform. Accepted values: "calendar", "alltsteps", and "withoutt0". - "calendar": Calculate the statistic using the time metadata. It will avoid single time step by day calculations - "alltsteps": Calculate a single time statistic with all the time steps. - "withoutt0": Calculate a single time statistic with all the time steps avoiding the first one. """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") time_interval = self.get_time_interval() @@ -665,9 +897,46 @@ class Nes(object): return None + @staticmethod + def _get_axis_index_(axis): + if axis == 'T': + value = 0 + elif axis == 'Z': + value = 1 + elif axis == 'Y': + value = 2 + elif axis == 'X': + value = 3 + else: + raise ValueError("Unknown axis: {0}".format(axis)) + return value + + def sum_axis(self, axis='Z'): + if self.parallel_method == axis: + raise NotImplementedError("It is not possible to sum the axis with it is parallelized '{0}'".format( + self.parallel_method)) + + for var_name, var_info in self.variables.items(): + if var_info['data'] is not None: + self.variables[var_name]['data'] = self.variables[var_name]['data'].sum( + axis=self._get_axis_index_(axis), keepdims=True) + if axis == 'T': + self.variables[var_name]['cell_methods'] = "time: sum (interval: {0}hr)".format( + (self.time[-1] - self.time[0]).total_seconds() // 3600) + + if axis == 'T': + self.set_time_bnds([self.time[0], self.time[-1]]) + self.time = [self.time[0]] + self._time = [self._time[0]] + if axis == 'Z': + self.lev['data'] = [self.lev['data'][0]] + self._lev['data'] = [self._lev['data'][0]] + return None + # ================================================================================================================== # Reading # ================================================================================================================== + def get_read_axis_limits(self): """ Calculate the 4D reading axis limits depending on if them have to balanced or not. @@ -676,8 +945,9 @@ class Nes(object): ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + if self.balanced: return self.get_read_axis_limits_balanced() else: @@ -685,48 +955,73 @@ class Nes(object): def get_read_axis_limits_unbalanced(self): """ - Calculate the 4D reading axis limits + Calculate the 4D reading axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, 't_min': None, 't_max': None} - + + idx = self.get_idx_intervals() + if self.parallel_method == 'Y': - y_len = self._lat['data'].shape[0] + y_len = idx['idx_y_max'] - idx['idx_y_min'] if y_len < self.size: - raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format(self.size, y_len)) - axis_limits['y_min'] = (y_len // self.size) * self.rank + raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format( + self.size, y_len)) + axis_limits['y_min'] = ((y_len // self.size) * self.rank) + idx['idx_y_min'] if self.rank + 1 < self.size: - axis_limits['y_max'] = (y_len // self.size) * (self.rank + 1) - # Spin up - axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) - axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + axis_limits['y_max'] = ((y_len // self.size) * (self.rank + 1)) + idx['idx_y_max'] + else: + axis_limits['y_max'] = idx['idx_y_max'] + + # Non parallel filters + axis_limits['x_min'] = idx['idx_x_min'] + axis_limits['x_max'] = idx['idx_x_max'] + + axis_limits['t_min'] = idx['idx_t_min'] + axis_limits['t_max'] = idx['idx_t_max'] + elif self.parallel_method == 'X': - x_len = self._lon['data'].shape[-1] + x_len = idx['idx_x_max'] - idx['idx_x_min'] if x_len < self.size: - raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format(self.size, x_len)) - axis_limits['x_min'] = (x_len // self.size) * self.rank + raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format( + self.size, x_len)) + axis_limits['x_min'] = ((x_len // self.size) * self.rank) + idx['idx_x_min'] if self.rank + 1 < self.size: - axis_limits['x_max'] = (x_len // self.size) * (self.rank + 1) - # Spin up - axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) - axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + axis_limits['x_max'] = ((x_len // self.size) * (self.rank + 1)) + idx['idx_x_max'] + else: + axis_limits['x_max'] = idx['idx_x_max'] + + # Non parallel filters + axis_limits['y_min'] = idx['idx_y_min'] + axis_limits['y_max'] = idx['idx_y_max'] + + axis_limits['t_min'] = idx['idx_t_min'] + axis_limits['t_max'] = idx['idx_t_max'] + elif self.parallel_method == 'T': - first_time_idx = self.get_time_id(self.hours_start, first=True) - last_time_idx = self.get_time_id(self.hours_end, first=False) - t_len = last_time_idx - first_time_idx + t_len = idx['idx_t_min'] - idx['idx_t_max'] if t_len < self.size: - raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format(self.size, t_len)) - axis_limits['t_min'] = ((t_len // self.size) * self.rank) + first_time_idx + raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format( + self.size, t_len)) + axis_limits['t_min'] = ((t_len // self.size) * self.rank) + idx['idx_t_min'] if self.rank + 1 < self.size: - axis_limits['t_max'] = ((t_len // self.size) * (self.rank + 1)) + first_time_idx + axis_limits['t_max'] = ((t_len // self.size) * (self.rank + 1)) + idx['idx_t_max'] + + # Non parallel filters + axis_limits['y_min'] = idx['idx_y_min'] + axis_limits['y_max'] = idx['idx_y_max'] + + axis_limits['x_min'] = idx['idx_x_min'] + axis_limits['x_max'] = idx['idx_x_max'] else: raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( @@ -736,7 +1031,7 @@ class Nes(object): axis_limits['z_min'] = self.first_level if self.last_level == -1 or self.last_level is None: self.last_level = None - elif self.last_level +1 == len(self._lev['data']): + elif self.last_level + 1 == len(self._lev['data']): self.last_level = None else: self.last_level += 1 @@ -746,40 +1041,41 @@ class Nes(object): def get_read_axis_limits_balanced(self): """ - Calculate the 4D reading balanced axis limits + Calculate the 4D reading balanced axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + idx = self.get_idx_intervals() + fid_dist = {} - to_add = None if self.parallel_method == 'Y': - len_to_split = self._lat['data'].shape[0] + len_to_split = idx['idx_y_max'] - idx['idx_y_min'] if len_to_split < self.size: raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format( self.size, len_to_split)) min_axis = 'y_min' max_axis = 'y_max' + to_add = idx['idx_y_min'] elif self.parallel_method == 'X': - len_to_split = self._lon['data'].shape[-1] + len_to_split = idx['idx_x_max'] - idx['idx_x_min'] if len_to_split < self.size: raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format( self.size, len_to_split)) min_axis = 'x_min' max_axis = 'x_max' + to_add = idx['idx_x_min'] elif self.parallel_method == 'T': - first_time_idx = self.get_time_id(self.hours_start, first=True) - last_time_idx = self.get_time_id(self.hours_end, first=False) - len_to_split = last_time_idx - first_time_idx + len_to_split = idx['idx_t_max'] - idx['idx_t_min'] if len_to_split < self.size: raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format( self.size, len_to_split)) min_axis = 't_min' max_axis = 't_max' - to_add = first_time_idx + to_add = idx['idx_t_min'] else: raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( meth=self.parallel_method, accept=['X', 'Y', 'T'])) @@ -819,10 +1115,16 @@ class Nes(object): axis_limits = fid_dist[self.rank] + # Non parallel filters if self.parallel_method != 'T': - # Spin up - axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) - axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + axis_limits['t_min'] = idx['idx_t_min'] + axis_limits['t_max'] = idx['idx_t_max'] + if self.parallel_method != 'X': + axis_limits['x_min'] = idx['idx_x_min'] + axis_limits['x_max'] = idx['idx_x_max'] + if self.parallel_method != 'Y': + axis_limits['y_min'] = idx['idx_y_min'] + axis_limits['y_max'] = idx['idx_y_max'] # Vertical levels selection: axis_limits['z_min'] = self.first_level @@ -843,16 +1145,17 @@ class Nes(object): Parameters ---------- hours : int - Number of hours to avoid + Number of hours to avoid. first : bool - Indicates if you want to avoid from the first hours (True) or from the last (False) - Default: True + Indicates if you want to avoid from the first hours (True) or from the last (False). + Default: True. Returns ------- int - Index of the time array + Index of the time array. """ + from datetime import timedelta if first: @@ -862,10 +1165,32 @@ class Nes(object): return idx + def get_coordinate_id(self, array, value, axis=0): + """ + Get the index of the corresponding coordinate value. + + Parameters + ---------- + value : float + Coordinate value to search. + min : bool + Indicates if you want to apply the filter as minimum (True) or maximum (False) values + Default: True. + + Returns + ------- + int + Index of the coordinate array. + """ + idx = (np.abs(array - value)).argmin(axis=axis).min() + + return idx + def open(self): """ - Open the NetCDF + Open the NetCDF. """ + if self.is_xarray: self.dataset = self.__open_dataset() self.netcdf = None @@ -877,13 +1202,14 @@ class Nes(object): def __open_dataset(self): """ - Open the NetCDF with xarray + Open the NetCDF with xarray. Returns ------- dataset : xr.Dataset - Opened dataset + Opened dataset. """ + if self.master: warnings.filterwarnings('ignore') # Disabling warnings while reading MONARCH original file dataset = open_dataset(self.__ini_path, decode_coords='all') @@ -897,18 +1223,19 @@ class Nes(object): def __open_netcdf4(self, mode='r'): """ - Open the NetCDF with netcdf4-python + Open the NetCDF with netcdf4-python. Parameters ---------- mode : str Inheritance from mode parameter from https://unidata.github.io/netcdf4-python/#Dataset.__init__ - Default: 'r' (read-only) + Default: 'r' (read-only). Returns ------- netcdf : Dataset - Opened dataset + Opened dataset. """ + if self.size == 1: netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=False) else: @@ -920,8 +1247,9 @@ class Nes(object): def close(self): """ - Close the NetCDF with netcdf4-python + Close the NetCDF with netcdf4-python. """ + if self.netcdf is not None: self.netcdf.close() self.netcdf = None @@ -933,21 +1261,21 @@ class Nes(object): Calculates the number of days since the first date in the 'time' list and store in new list: This is useful when the units are 'months since', - which cannot be transformed to dates using num2date + which cannot be transformed to dates using num2date. Parameter --------- time: list - Original time + Original time. units: str - CF compliant time units + CF compliant time units. calendar: str - Original calendar + Original calendar. Returns ------- time: list - CF compliant time + CF compliant time. """ start_date_str = time.units.split('since')[1].lstrip() @@ -971,17 +1299,17 @@ class Nes(object): def __parse_time(self, time): """ - Parses the time to be CF compliant + Parses the time to be CF compliant. Parameters ---------- time: str - Original time + Original time. Returns ------- time : str - CF compliant time + CF compliant time. """ units = time.units @@ -999,18 +1327,19 @@ class Nes(object): @staticmethod def __parse_time_unit(t_units): """ - Parses the time units to be CF compliant + Parses the time units to be CF compliant. Parameters ---------- t_units : str - Original time units + Original time units. Returns ------- t_units : str - CF compliant time units + CF compliant time units. """ + if 'h @' in t_units: t_units = 'hour since {0}-{1}-{2} {3}:{4}:{5} UTC'.format( t_units[4:8], t_units[8:10], t_units[10:12], t_units[13:15], t_units[15:17], t_units[17:-4]) @@ -1019,13 +1348,14 @@ class Nes(object): def __get_time(self): """ - Get the NetCDF file time values + Get the NetCDF file time values. Returns ------- time : list - List of times (datetime.datetime) of the NetCDF data + List of times (datetime.datetime) of the NetCDF data. """ + if self.is_xarray: time = self.variables['time'] else: @@ -1043,18 +1373,19 @@ class Nes(object): def __get_time_bnds(self, create_nes=False): """ - Get the NetCDF time bounds values + Get the NetCDF time bounds values. Parameters ---------- create_nes : bool - Indicated if the object is created from scratch or from an existing file + Indicates if you want to create the object from scratch (True) or through an existing file. Returns ------- - time : list - List of time bounds (datetime) of the NetCDF data + time_bnds : list + List of time bounds (datetime) of the NetCDF data. """ + if self.is_xarray: time_bnds = self.variables['time_bnds'] else: @@ -1063,7 +1394,8 @@ class Nes(object): if 'time_bnds' in self.netcdf.variables.keys(): time = self.netcdf.variables['time'] nc_var = self.netcdf.variables['time_bnds'] - time_bnds = num2date(nc_var[:], self.__parse_time_unit(time.units), calendar=time.calendar).tolist() + time_bnds = num2date(nc_var[:], self.__parse_time_unit(time.units), + calendar=time.calendar).tolist() else: time_bnds = None else: @@ -1071,25 +1403,72 @@ class Nes(object): else: time_bnds = None time_bnds = self.comm.bcast(time_bnds, root=0) + self.free_vars('time_bnds') + return time_bnds + def __get_coordinates_bnds(self, create_nes=False): + """ + Get the NetCDF coordinates bounds values. + + Parameters + ---------- + create_nes : bool + Indicates if you want to create the object from scratch (True) or through an existing file. + + Returns + ------- + lat_bnds : list + List of latitude bounds of the NetCDF data. + lon_bnds : list + List of longitude bounds of the NetCDF data. + """ + + if self.is_xarray: + lat_bnds = self.variables['lat_bnds'] + lon_bnds = self.variables['lon_bnds'] + else: + if self.master: + if not create_nes: + if 'lat_bnds' in self.netcdf.variables.keys(): + lat_bnds = self.netcdf.variables['lat_bnds'][:] + else: + lat_bnds = None + if 'lon_bnds' in self.netcdf.variables.keys(): + lon_bnds = self.netcdf.variables['lon_bnds'][:] + else: + lon_bnds = None + else: + lat_bnds = None + lon_bnds = None + else: + lat_bnds = None + lon_bnds = None + lat_bnds = self.comm.bcast(lat_bnds, root=0) + lon_bnds = self.comm.bcast(lon_bnds, root=0) + + self.free_vars(['lat_bnds', 'lon_bnds']) + + return lat_bnds, lon_bnds + def _get_coordinate_dimension(self, possible_names): """ Read the coordinate dimension data. - This will read the complete data of the coordinate + This will read the complete data of the coordinate. Parameters ---------- possible_names: list, str - List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) + List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']). Returns ------- nc_var : dict Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. """ + if isinstance(possible_names, str): possible_names = [possible_names] @@ -1111,25 +1490,32 @@ class Nes(object): return nc_var - def _get_coordinate_values(self, coordinate_info, coordinate_axis): + def _get_coordinate_values(self, coordinate_info, coordinate_axis, bounds=False): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['Z', 'Y', 'X'] + Name of the coordinate to extract. Accepted values: ['Z', 'Y', 'X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ - values = deepcopy(coordinate_info) - if isinstance(coordinate_info, list): + if coordinate_info is None: + return None + + if not isinstance(coordinate_info, dict): values = {'data': deepcopy(coordinate_info)} + else: + values = deepcopy(coordinate_info) + coordinate_len = len(values['data'].shape) + if bounds: + coordinate_len -= 1 if coordinate_axis == 'Y': if coordinate_len == 1: @@ -1171,6 +1557,7 @@ class Nes(object): 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, ...} """ + if self.is_xarray: variables = self.dataset.variables else: @@ -1201,13 +1588,14 @@ class Nes(object): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- data: np.array Portion of the variable data corresponding to the rank. """ + nc_var = self.netcdf.variables[var_name] var_dims = nc_var.dimensions @@ -1254,8 +1642,9 @@ class Nes(object): Parameters ---------- var_list : list, str - List (or single string) of the variables to be loaded + List (or single string) of the variables to be loaded. """ + if self.netcdf is None: self.__open_dataset() close = True @@ -1268,11 +1657,11 @@ class Nes(object): var_list = list(self.variables.keys()) for i, var_name in enumerate(var_list): - if self.print_info: + if self.info: print("Rank {0:03d}: Loading {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(var_list))) if self.variables[var_name]['data'] is None: self.variables[var_name]['data'] = self._read_variable(var_name) - if self.print_info: + if self.info: print("Rank {0:03d}: Loaded {1} var ({2})".format( self.rank, var_name, self.variables[var_name]['data'].shape)) @@ -1282,14 +1671,16 @@ class Nes(object): return None def to_dtype(self, data_type='float32'): + for var_name, var_info in self.variables.items(): if var_info['data'] is not None: self.variables[var_name]['data'] = self.variables[var_name]['data'].astype(data_type) + return None def concatenate(self, aux_nessy): """ - Concatenate different variables into the same nes object + Concatenate different variables into the same nes object. Parameters ---------- @@ -1299,8 +1690,9 @@ class Nes(object): Returns ------- list - List of var names added + List of var names added. """ + if isinstance(aux_nessy, str): aux_nessy = self.new(path=aux_nessy, comm=self.comm, parallel_method=self.parallel_method, xarray=self.is_xarray, @@ -1326,18 +1718,19 @@ class Nes(object): def __get_global_attributes(self, create_nes=False): """ - Read the netcdf global attributes + Read the netcdf global attributes. Parameters ---------- create_nes : bool - Indicated if the object is created from scratch or from an existing file + Indicates if you want to create the object from scratch (True) or through an existing file. Returns ------- gl_attrs : dict - Dictionary with the netCDF global attributes + Dictionary with the netCDF global attributes. """ + gl_attrs = {} if self.is_xarray: gl_attrs = self.dataset.attrs @@ -1351,6 +1744,7 @@ class Nes(object): # ================================================================================================================== # Writing # ================================================================================================================== + def get_write_axis_limits(self): """ Calculate the 4D writing axis limits depending on if them have to balanced or not. @@ -1359,8 +1753,9 @@ class Nes(object): ------- dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + if self.balanced: return self.get_write_axis_limits_balanced() else: @@ -1368,14 +1763,15 @@ class Nes(object): def get_write_axis_limits_unbalanced(self): """ - Calculate the 4D writing axis limits + Calculate the 4D writing axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to write. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, @@ -1404,14 +1800,15 @@ class Nes(object): def get_write_axis_limits_balanced(self): """ - Calculate the 4D reading balanced axis limits + Calculate the 4D reading balanced axis limits. Returns ------- dict Dictionary with the 4D limits of the rank data to read. - t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max. """ + fid_dist = {} if self.parallel_method == 'Y': len_to_split = self._lat['data'].shape[0] @@ -1464,16 +1861,26 @@ class Nes(object): def _create_dimensions(self, netcdf): """ - Create the 'lev' and 'time' dimension. + Create 'time', 'time_bnds', 'lev', 'lon' and 'lat' dimensions. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. """ + + # Create time dimension netcdf.createDimension('time', None) + + # Create time_nv (number of vertices) dimension if self._time_bnds is not None: netcdf.createDimension('time_nv', 2) + + # Create spatial_nv (number of vertices) dimension + if (self._lat_bnds is not None) and (self._lon_bnds is not None): + netcdf.createDimension('spatial_nv', 2) + + # Create lev, lon and lat dimensions netcdf.createDimension('lev', len(self.lev['data'])) netcdf.createDimension('lon', len(self._lon['data'])) netcdf.createDimension('lat', len(self._lat['data'])) @@ -1482,20 +1889,21 @@ class Nes(object): def _create_dimension_variables(self, netcdf): """ - Create the 'lev' and 'time' variables. + Create the 'time', 'time_bnds', 'lev', 'lat', 'lat_bnds', 'lon' and 'lon_bnds' variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. """ + # TIMES time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) time_var.units = 'hours since {0}'.format( - self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) - time_var.standard_name = "time" + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' time_var.calendar = 'standard' - time_var.long_name = "time" + time_var.long_name = 'time' if self._time_bnds is not None: time_var.bounds = 'time_bnds' if self.size > 1: @@ -1524,41 +1932,62 @@ class Nes(object): lev[:] = self._lev['data'] # LATITUDES - lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" + lat = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' + if self._lat_bnds is not None: + lat.bounds = 'lat_bnds' if self.size > 1: - lats.set_collective(True) - lats[:] = self._lat['data'] + lat.set_collective(True) + lat[:] = self._lat['data'] + + # LATITUDES BOUNDS + if self._lat_bnds is not None: + lat_bnds_var = netcdf.createVariable('lat_bnds', np.float64, self._lat_dim + ('spatial_nv',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + if self.size > 1: + lat_bnds_var.set_collective(True) + lat_bnds_var[:] = self._lat_bnds[:] # LONGITUDES - lons = netcdf.createVariable('lon', np.float64, self._lon_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" + lon = netcdf.createVariable('lon', np.float64, self._lon_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' + if self._lon_bnds is not None: + lon.bounds = 'lon_bnds' if self.size > 1: - lons.set_collective(True) - lons[:] = self._lon['data'] + lon.set_collective(True) + lon[:] = self._lon['data'] + + # LONGITUDES BOUNDS + if self._lon_bnds is not None: + lon_bnds_var = netcdf.createVariable('lon_bnds', np.float64, self._lon_dim + ('spatial_nv',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + if self.size > 1: + lon_bnds_var.set_collective(True) + lon_bnds_var[:] = self._lon_bnds[:] return None def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ + for i, (var_name, var_dict) in enumerate(self.variables.items()): if var_dict['data'] is not None: - if self.print_info: + if self.info: print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) try: if not chunking: @@ -1574,18 +2003,18 @@ class Nes(object): chunk_size = self.comm.bcast(chunk_size, root=0) var = netcdf.createVariable(var_name, var_dict['data'].dtype, ('time', 'lev',) + self._var_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl, chunksizes=chunk_size) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} created ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) for att_name, att_value in var_dict.items(): if att_name == 'data': - if self.print_info: + if self.info: print("Rank {0:03d}: Filling {1})".format(self.rank, var_name)) try: var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], @@ -1607,18 +2036,18 @@ class Nes(object): self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: @@ -1626,66 +2055,75 @@ class Nes(object): msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) warnings.warn(msg) - def _create_centroids(self): + return None + + def _create_centre_coordinates(self): """ - Must be implemented on inner class + Must be implemented on inner class. """ + return None def _create_metadata(self, netcdf): """ - Must be implemented on inner class + Must be implemented on inner class. """ + return None def _set_crs(self, netcdf): """ - Must be implemented on inner class + Must be implemented on inner class. Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ + return None @staticmethod def _set_var_crs(var): """ - Must be implemented on inner class + Must be implemented on inner class. Parameters ---------- var : Variable netCDF4-python variable object. """ + return None def __to_netcdf_py(self, path, chunking=False): """ - Create the NetCDF using netcdf4-python methods + Create the NetCDF using netcdf4-python methods. Parameters ---------- path : str Path to the output netCDF file. chunking: bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ + # Open NetCDF - if self.print_info: + if self.info: print("Rank {0:03d}: Creating {1}".format(self.rank, path)) if self.size > 1: netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=True, comm=self.comm, info=MPI.Info()) else: netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=False) - if self.print_info: + if self.info: print("Rank {0:03d}: NetCDF ready to write".format(self.rank)) - # Create Dimensions + + # Create dimensions self._create_dimensions(netcdf) + # Create dimension variables self._create_dimension_variables(netcdf) - if self.print_info: + if self.info: print("Rank {0:03d}: Dimensions done".format(self.rank)) # Create variables @@ -1704,25 +2142,30 @@ class Nes(object): return None - def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + def __to_netcdf_cams_ra(self, path): + return to_netcdf_cams_ra(self, path) + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, + chunking=False, type='NES'): """ - Write the netCDF output file + Write the netCDF output file. Parameters ---------- path : str - Path to the output netCDF file + Path to the output netCDF file. compression_level : int - Level of compression (0 to 9) Default: 0 (no compression) + Level of compression (0 to 9) Default: 0 (no compression). serial : bool - Indicates if you want to write in serial or not. Default: False + Indicates if you want to write in serial or not. Default: False. info : bool - Indicates if you want to print the information of each writing step by stdout Default: False + Indicates if you want to print the information of each writing step by stdout Default: False. chunking : bool - Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. """ - old_info = self.print_info - self.print_info = info + + old_info = self.info + self.info = info self.zip_lvl = compression_level if self.is_xarray: @@ -1735,16 +2178,26 @@ class Nes(object): new_nc = self.copy(copy_vars=False) new_nc.set_communicator(MPI.COMM_SELF) new_nc.variables = data - new_nc.__to_netcdf_py(path) - + if type == 'NES': + new_nc.__to_netcdf_py(path) + elif type == 'CAMS_RA': + new_nc.__to_netcdf_cams_ra(path) + else: + raise ValueError( + "Unknown NetCDF type '{0}'. Use 'CAMS_RA' or 'NES'; default='NES'".format(type)) else: - self.__to_netcdf_py(path, chunking=chunking) + if type == 'NES': + self.__to_netcdf_py(path, chunking=chunking) + elif type == 'CAMS_RA': + self.__to_netcdf_cams_ra(path) + else: + raise ValueError("Unknown NetCDF type '{0}'. Use 'CAMS_RA' or 'NES'; default='NES'".format(type)) - self.print_info = old_info + self.info = old_info return None - def __to_grib2(self, path, grib_keys, grib_template_path, info=False): + def __to_grib2(self, path, grib_keys, grib_template_path, lat_flip=True, info=False): """ Private method to write output file with grib2 format. @@ -1753,12 +2206,13 @@ class Nes(object): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ + from eccodes import codes_grib_new_from_file from eccodes import codes_keys_iterator_new from eccodes import codes_keys_iterator_next @@ -1795,43 +2249,53 @@ class Nes(object): # Adding grib2 keys to file for key, value in grib_keys.items(): - if key not in ['typeOfFirstFixedSurface', 'level']: - if info: - print('key:', key, 'val:', value, 'type:', type(value)) - codes_set(clone_id, key, value) - # codes_set(clone_id, key, value) + if value not in ['', 'None', None]: + try: + codes_set(clone_id, key, value) + except Exception as e: + print("Something went wrong while writing the Grib key '{0}': {1}".format(key, value)) + raise e + + # Time dependent keys + if 'dataTime' in grib_keys.keys() and grib_keys['dataTime'] in ['', 'None', None]: + codes_set(clone_id, 'dataTime', int(i_time * 100)) + if 'stepRange' in grib_keys.keys() and grib_keys['stepRange'] in ['', 'None', None]: + n_secs = (time - self._time[0]).total_seconds() + codes_set(clone_id, 'stepRange', int(n_secs // 3600)) + if 'forecastTime' in grib_keys.keys() and grib_keys['forecastTime'] in ['', 'None', None]: + n_secs = (time - self._time[0]).total_seconds() + codes_set(clone_id, 'forecastTime', int(n_secs)) # Level dependent keys - if 'typeOfFirstFixedSurface' in grib_keys.keys(): + if 'typeOfFirstFixedSurface' in grib_keys.keys() and \ + grib_keys['typeOfFirstFixedSurface'] in ['', 'None', None]: if float(lev) == 0: codes_set(clone_id, 'typeOfFirstFixedSurface', 1) # grib_keys['typeOfFirstFixedSurface'] = 1 else: codes_set(clone_id, 'typeOfFirstFixedSurface', 103) # grib_keys['typeOfFirstFixedSurface'] = 103 - if 'level' in grib_keys.keys(): + if 'level' in grib_keys.keys() and grib_keys['level'] in ['', 'None', None]: codes_set(clone_id, 'level', float(lev)) - # grib_keys['level'] = float(lev) - - # # Adding grib2 keys to file - # for key, value in grib_keys.items(): - # print('key:', key, 'val:', value, 'type:', type(value)) - # codes_set(clone_id, key, value) - - # newval = vardata[step, nlev].round(int(keys['decimalPrecision'])) - newval = var_info['data'][i_time, i_lev] - newval = np.flipud(newval) - # newval = newval.reshape(newval.shape[-1], newval.shape[-2])[::-1, :] - # print(newval.dtype, newval) - codes_set_values(clone_id, newval.ravel()) - # print('write') + + newval = var_info['data'][i_time, i_lev, :, :] + if lat_flip: + newval = np.flipud(newval) + + # TODO Check default NaN Value + newval[np.isnan(newval)] = 0. + + codes_set_values(clone_id, np.array(newval.ravel(), dtype='float64')) + # codes_set_values(clone_id, newval.ravel()) codes_write(clone_id, fout) + del newval codes_release(gid) fout.close() fin.close() + return None - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=True, info=False): """ Write output file with grib2 format. @@ -1840,9 +2304,9 @@ class Nes(object): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ @@ -1854,11 +2318,207 @@ class Nes(object): new_nc = self.copy(copy_vars=False) new_nc.set_communicator(MPI.COMM_SELF) new_nc.variables = data - new_nc.__to_grib2(path, grib_keys, grib_template_path, info=info) + new_nc.__to_grib2(path, grib_keys, grib_template_path, lat_flip=lat_flip, info=info) else: - self.__to_grib2(path, grib_keys, grib_template_path, info=info) + self.__to_grib2(path, grib_keys, grib_template_path, lat_flip=lat_flip, info=info) + return None + def create_shapefile(self): + """ + Create spatial geodataframe (shapefile). + """ + + if self._lat_bnds is None or self._lon_bnds is None: + self.create_spatial_bounds() + + # Reshape arrays to create geometry + aux_shape = (self.lat_bnds.shape[0], self.lon_bnds.shape[0], 4) + lon_bnds_aux = np.empty(aux_shape) + lon_bnds_aux[:, :, 0] = self.lon_bnds[np.newaxis, :, 0] + lon_bnds_aux[:, :, 1] = self.lon_bnds[np.newaxis, :, 1] + lon_bnds_aux[:, :, 2] = self.lon_bnds[np.newaxis, :, 1] + lon_bnds_aux[:, :, 3] = self.lon_bnds[np.newaxis, :, 0] + + lon_bnds = lon_bnds_aux + del lon_bnds_aux + + lat_bnds_aux = np.empty(aux_shape) + lat_bnds_aux[:, :, 0] = self.lat_bnds[:, np.newaxis, 0] + lat_bnds_aux[:, :, 1] = self.lat_bnds[:, np.newaxis, 0] + lat_bnds_aux[:, :, 2] = self.lat_bnds[:, np.newaxis, 1] + lat_bnds_aux[:, :, 3] = self.lat_bnds[:, np.newaxis, 1] + + lat_bnds = lat_bnds_aux + del lat_bnds_aux + + aux_b_lats = lat_bnds.reshape((lat_bnds.shape[0] * lat_bnds.shape[1], lat_bnds.shape[2])) + aux_b_lons = lon_bnds.reshape((lon_bnds.shape[0] * lon_bnds.shape[1], lon_bnds.shape[2])) + + # Create dataframe cointaining all polygons + geometry = [] + for i in range(aux_b_lons.shape[0]): + geometry.append(Polygon([(aux_b_lons[i, 0], aux_b_lats[i, 0]), + (aux_b_lons[i, 1], aux_b_lats[i, 1]), + (aux_b_lons[i, 2], aux_b_lats[i, 2]), + (aux_b_lons[i, 3], aux_b_lats[i, 3]), + (aux_b_lons[i, 0], aux_b_lats[i, 0])])) + fids = np.arange(len(self._lat['data']) * len(self._lon['data'])) + fids = fids.reshape((len(self._lat['data']), len(self._lon['data']))) + fids = fids[self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + gdf = gpd.GeoDataFrame(index=pd.Index(name='FID', data=fids.ravel()), + geometry=geometry, + crs="EPSG:4326") + self.shapefile = gdf + + return gdf + + def write_shapefile(self, path): + """Save spatial geodataframe (shapefile). + + Parameters + ---------- + path : str + Path to the output file. + """ + + if self.shapefile is None: + raise ValueError('Shapefile was not created.') + + if self.size == 1: + # In serial, avoid gather + self.shapefile.to_file(path) + else: + # In parallel + data = self.comm.gather(self.shapefile, root=0) + if self.master: + data = pd.concat(data) + data.to_file(path) + + return None + + def spatial_join(self, mask, method=None): + """ + Compute overlay intersection of two GeoPandasDataFrames + + Parameters + ---------- + mask : GeoPandasDataFrame + File from where the data will be obtained on the intersection. + method : str + Overlay method. Accepted values: ['nearest', 'intersection', None]. + """ + + # Nearest centroids to the mask polygons + if method == 'nearest': + + # Get centroids of shapefile to mask + shapefile_aux = deepcopy(self.shapefile) + shapefile_aux.geometry = self.shapefile.centroid + + # Calculate spatial joint by distance + shapefile_aux = gpd.sjoin_nearest(shapefile_aux, mask.to_crs(self.shapefile.crs), distance_col='distance') + + # Get data from closest shapes to centroids + del shapefile_aux['geometry'], shapefile_aux['index_right'] + self.shapefile.loc[shapefile_aux.index, shapefile_aux.columns] = shapefile_aux + + # Intersect the areas of the mask polygons, outside of the mask there will be NaN + elif method == 'intersection': + + # Get intersected areas + inp, res = mask.sindex.query_bulk(self.shapefile.geometry, predicate='intersects') + print('Rank {0:03d}: {1} intersected areas found'.format(self.rank, len(inp))) + + # Calculate intersected areas and fractions + intersection = pd.concat([self.shapefile.geometry[inp].reset_index(), mask.geometry[res].reset_index()], + axis=1, ignore_index=True) + intersection.columns = (list(self.shapefile.geometry[inp].reset_index().columns) + + list(mask.geometry[res].reset_index().rename(columns={'geometry': 'geometry_mask', + 'index': 'index_mask'}).columns)) + intersection['area'] = intersection.apply(lambda x: x['geometry'].intersection(x['geometry_mask']).buffer(0).area, + axis=1) + intersection['fraction'] = intersection.apply(lambda x: x['area'] / x['geometry'].area, axis=1) + + # Choose biggest area from intersected areas with multiple options + intersection.sort_values('fraction', ascending=False, inplace=True) + intersection = intersection.drop_duplicates(subset='FID', keep="first") + intersection = intersection.sort_values('FID').set_index('FID') + + # Get data from mask + del mask['geometry'] + self.shapefile.loc[intersection.index, mask.columns] = np.array(mask.loc[intersection.index_mask, :]) + + # Centroids that fall on the mask polygons, outside of the mask there will be NaN + elif method is None: + + # Get centroids of shapefile to mask + shapefile_aux = deepcopy(self.shapefile) + shapefile_aux.geometry = self.shapefile.centroid + + # Calculate spatial joint + shapefile_aux = gpd.sjoin(shapefile_aux, mask.to_crs(self.shapefile.crs)) + + # Get data from shapes where there are centroids, rest will be NaN + del shapefile_aux['geometry'], shapefile_aux['index_right'] + self.shapefile.loc[shapefile_aux.index, shapefile_aux.columns] = shapefile_aux + + return None + + @staticmethod + def spatial_overlays(df1, df2, how='intersection'): + """ + Compute overlay intersection of two GeoPandasDataFrames df1 and df2 + + https://github.com/geopandas/geopandas/issues/400 + + :param df1: GeoDataFrame + :param df2: GeoDataFrame + :param how: Operation to do + :return: GeoDataFrame + """ + from functools import reduce + + df1 = df1.copy() + df2 = df2.copy() + df1['geometry'] = df1.geometry.buffer(0) + df2['geometry'] = df2.geometry.buffer(0) + if how == 'intersection': + # Spatial Index to create intersections + spatial_index = df2.sindex + df1['bbox'] = df1.geometry.apply(lambda x: x.bounds) + df1['histreg'] = df1.bbox.apply(lambda x: list(spatial_index.intersection(x))) + pairs = df1['histreg'].to_dict() + nei = [] + for i, j in pairs.items(): + for k in j: + nei.append([i, k]) + pairs = pd.DataFrame(nei, columns=['idx1', 'idx2']) + pairs = pairs.merge(df1, left_on='idx1', right_index=True) + pairs = pairs.merge(df2, left_on='idx2', right_index=True, suffixes=['_1', '_2']) + pairs['geometry'] = pairs.apply(lambda x: (x['geometry_1'].intersection(x['geometry_2'])).buffer(0), axis=1) + + pairs.drop(columns=['geometry_1', 'geometry_2', 'histreg', 'bbox'], inplace=True) + pairs = gpd.GeoDataFrame(pairs, columns=pairs.columns, crs=df1.crs) + pairs = pairs.loc[~pairs.geometry.is_empty] + + return_value = pairs + elif how == 'difference': + spatial_index = df2.sindex + df1['bbox'] = df1.geometry.apply(lambda x: x.bounds) + df1['histreg'] = df1.bbox.apply(lambda x: list(spatial_index.intersection(x))) + df1['new_g'] = df1.apply(lambda x: reduce(lambda x, y: x.difference(y).buffer(0), + [x.geometry] + list(df2.iloc[x.histreg].geometry)), axis=1) + df1.geometry = df1.new_g + df1 = df1.loc[~df1.geometry.is_empty].copy() + df1.drop(['bbox', 'histreg', 'new_g'], axis=1, inplace=True) + return_value = df1 + else: + raise NotImplementedError(how) + + return return_value + def __gather_data_py_object(self): """ Gather all the variable data into the MPI rank 0 to perform a serial write. @@ -1868,6 +2528,7 @@ class Nes(object): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name in data_list.keys(): try: @@ -1916,8 +2577,8 @@ class Nes(object): else: data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -1936,9 +2597,10 @@ class Nes(object): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name in data_list.keys(): - if self.print_info and self.master: + if self.info and self.master: print("Gathering {0}".format(var_name)) shp_len = len(data_list[var_name]['data'].shape) try: @@ -2000,8 +2662,8 @@ class Nes(object): else: data_list[var_name]['data'] = np.concatenate(recvbuf, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -2014,64 +2676,63 @@ class Nes(object): # ================================================================================================================== # Extra Methods # ================================================================================================================== + def add_4d_vertical_info(self, info_to_add): """ To add the vertical information from other source. Parameters ---------- - self : nes.Nes - info_to_add : nes.Nes, str Nes object with the vertical information as variable or str with the path to the NetCDF file that contains the vertical data. """ + return vertical_interpolation.add_4d_vertical_info(self, info_to_add) def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): """ - Vertical interpolation method + Vertical interpolation method. Parameters ---------- self : Nes - Source Nes object - + Source Nes object. new_levels : list - List of new vertical levels - + List of new vertical levels. new_src_vertical - kind : str Vertical interpolation type. - extrapolate : None, tuple, str - Extrapolate method (for non linear operations) - + Extrapolate method (for non linear operations). info: None, bool - Indicates if you want to print extra information - + Indicates if you want to print extra information. """ + return vertical_interpolation.interpolate_vertical( self, new_levels, new_src_vertical=new_src_vertical, kind=kind, extrapolate=extrapolate, info=info) def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='NearestNeighbour', n_neighbours=4, - info=False): + info=False, to_providentia=False): """ Horizontal interpolation from the current grid to another one. Parameters ---------- dst_grid : nes.Nes - Final projection Nes object + Final projection Nes object. weight_matrix_path : str, None - Path to the weight matrix to read/create + Path to the weight matrix to read/create. kind : str - Kind of horizontal interpolation. choices = ['NearestNeighbour'] + Kind of horizontal interpolation. choices = ['NearestNeighbour']. n_neighbours: int - Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. Default: 4. info: bool Indicates if you want to print extra info during the interpolation process. + to_providentia : bool + Indicates if we want the interpolated grid in Providentia format. """ + return horizontal_interpolation.interpolate_horizontal( - self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info) + self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info, + to_providentia=to_providentia) diff --git a/nes/nc_projections/latlon_nes.py b/nes/nc_projections/latlon_nes.py index b24796e66c648dbd8878bd73f6da69a86e5325a7..b1e996f4780f4a4a31d3c4b3c8174d016675b71e 100644 --- a/nes/nc_projections/latlon_nes.py +++ b/nes/nc_projections/latlon_nes.py @@ -23,28 +23,29 @@ class LatLonNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ - Initialize the LatLonNes class + Initialize the LatLonNes class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(LatLonNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, balanced=balanced, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -69,63 +70,123 @@ class LatLonNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ + new = LatLonNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. - Parameters + Returns ---------- - netcdf : Dataset - NetCDF object. + centre_lat : dict + Dictionary with data of centre latitudes in 1D + centre_lon : dict + Dictionary with data of centre longitudes in 1D """ - # Calculate center latitudes + # Calculate centre latitudes lat_c_orig = kwargs['lat_orig'] + (kwargs['inc_lat'] / 2) - self.center_lats = np.linspace( + centre_lat_data = np.linspace( lat_c_orig, lat_c_orig + (kwargs['inc_lat'] * (kwargs['n_lat'] - 1)), kwargs['n_lat']) + centre_lat = {'data': centre_lat_data} - # Calculate center longitudes + # Calculate centre longitudes lon_c_orig = kwargs['lon_orig'] + (kwargs['inc_lon'] / 2) - self.center_lons = np.linspace( + centre_lon_data = np.linspace( lon_c_orig, lon_c_orig + (kwargs['inc_lon'] * (kwargs['n_lon'] - 1)), kwargs['n_lon']) + centre_lon = {'data': centre_lon_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + model_centre_lon_data, model_centre_lat_data = np.meshgrid(self.lon['data'], self.lat['data']) + + # Calculate centre latitudes + model_centre_lat = {'data': model_centre_lat_data} - return {'data': self.center_lats}, {'data': self.center_lons} + # Calculate centre longitudes + model_centre_lon = {'data': model_centre_lon_data} - def _create_bounds(self, **kwargs): + return model_centre_lat, model_centre_lon - # This function is not being used - spatial_nv = 2 - boundary_lats = self.create_bounds(self.center_lats, kwargs['inc_lat'], spatial_nv) - boundary_lons = self.create_bounds(self.center_lons, kwargs['inc_lon'], spatial_nv) + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_lon = np.abs(np.mean(np.diff(self.lon['data']))) + inc_lat = np.abs(np.mean(np.diff(self.lat['data']))) + + # Get bounds + lat_bounds = self.create_single_spatial_bounds(self.lat['data'], inc_lat) + lon_bounds = self.create_single_spatial_bounds(self.lon['data'], inc_lon) - return boundary_lats, boundary_lons + # Get latitudes for grid edge + left_edge_lat = np.append(lat_bounds.flatten()[::2], lat_bounds.flatten()[-1]) + right_edge_lat = np.flip(left_edge_lat, 0) + top_edge_lat = np.repeat(lat_bounds[-1][-1], len(self.lon['data']) - 1) + bottom_edge_lat = np.repeat(lat_bounds[0][0], len(self.lon['data'])) + lat_grid_edge = np.concatenate((left_edge_lat, top_edge_lat, right_edge_lat, bottom_edge_lat)) + + # Get longitudes for grid edge + left_edge_lon = np.repeat(lon_bounds[0][0], len(self.lat['data']) + 1) + top_edge_lon = lon_bounds.flatten()[1:-1:2] + right_edge_lon = np.repeat(lon_bounds[-1][-1], len(self.lat['data']) + 1) + bottom_edge_lon = np.flip(lon_bounds.flatten()[:-1:2], 0) + lon_grid_edge = np.concatenate((left_edge_lon, top_edge_lon, right_edge_lon, bottom_edge_lon)) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((lon_grid_edge, lat_grid_edge)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon @staticmethod def _set_var_crs(var): @@ -137,7 +198,9 @@ class LatLonNes(Nes): var : Variable netCDF4-python variable object. """ + var.grid_mapping = 'crs' + var.coordinates = "lat lon" return None @@ -148,7 +211,7 @@ class LatLonNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('crs', 'c') @@ -159,7 +222,7 @@ class LatLonNes(Nes): return None - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): """ Write output file with grib2 format. @@ -168,10 +231,11 @@ class LatLonNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ - return super(LatLonNes, self).to_grib2(path, grib_keys, grib_template_path, info=info) + + return super(LatLonNes, self).to_grib2(path, grib_keys, grib_template_path, lat_flip=lat_flip, info=info) diff --git a/nes/nc_projections/lcc_nes.py b/nes/nc_projections/lcc_nes.py index 6bf09f4c6de858bfeb89cfc430b876577edf9626..35a825a59303a764bdaf2fc06c0fdf87b5b396a3 100644 --- a/nes/nc_projections/lcc_nes.py +++ b/nes/nc_projections/lcc_nes.py @@ -1,8 +1,12 @@ #!/usr/bin/env python import numpy as np +import pandas as pd from cfunits import Units from pyproj import Proj +from copy import deepcopy +import geopandas as gpd +from shapely.geometry import Polygon from .default_nes import Nes @@ -38,23 +42,24 @@ class LCCNes(Nes): Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(LCCNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, balanced=balanced, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -90,72 +95,96 @@ class LCCNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ + new = LCCNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + def filter_coordinates_selection(self): + + idx = self.get_idx_intervals() + + self.y = self._get_coordinate_values(self._y, 'Y') + self.x = self._get_coordinate_values(self._x, 'X') + + self._y['data'] = self._y['data'][idx['idx_y_min']:idx['idx_y_max']] + self._x['data'] = self._x['data'][idx['idx_x_min']:idx['idx_x_max']] + + super(LCCNes, self).filter_coordinates_selection() + + return None + def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- - projection : dict - Dictionary with the projection data + projection_data : dict + Dictionary with the projection data. """ if create_nes: - projection = {'data': None, - 'dimensions': (), - 'grid_mapping_name': 'lambert_conformal_conic', - 'standard_parallel': [kwargs['lat_2'], kwargs['lat_1']], - 'longitude_of_central_meridian': kwargs['lon_0'], - 'latitude_of_projection_origin': kwargs['lat_0'], - } + projection_data = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'lambert_conformal_conic', + 'standard_parallel': [kwargs['lat_1'], kwargs['lat_2']], + 'longitude_of_central_meridian': kwargs['lon_0'], + 'latitude_of_projection_origin': kwargs['lat_0'], + } else: - projection = self.variables['Lambert_conformal'] + projection_data = self.variables['Lambert_conformal'] + projection_data['standard_parallel'] = [projection_data['standard_parallel'].split(', ')[0], + projection_data['standard_parallel'].split(', ')[1]] self.free_vars('Lambert_conformal') - return projection + return projection_data def _create_dimensions(self, netcdf): """ - Create the 'y', 'x' dimensions and the super dimensions ('lev', 'time'). + Create 'y', 'x' and 'spatial_nv' dimensions and the super dimensions ('lev', 'time'). Parameters ---------- netcdf : Dataset NetCDF object. """ + super(LCCNes, self)._create_dimensions(netcdf) + # Create y and x dimensions netcdf.createDimension('y', len(self._y['data'])) netcdf.createDimension('x', len(self._x['data'])) + # Create spatial_nv (number of vertices) dimension + if (self._lat_bnds is not None) and (self._lon_bnds is not None): + netcdf.createDimension('spatial_nv', 4) + return None def _create_dimension_variables(self, netcdf): @@ -172,33 +201,33 @@ class LCCNes(Nes): # LCC Y COORDINATES y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) - y.long_name = "y coordinate of projection" + y.long_name = 'y coordinate of projection' if 'units' in self._y.keys(): y.units = Units(self._y['units'], formatted=True).units else: y.units = 'm' - y.standard_name = "projection_y_coordinate" + y.standard_name = 'projection_y_coordinate' if self.size > 1: y.set_collective(True) y[:] = self._y['data'] # LCC X COORDINATES x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) - x.long_name = "x coordinate of projection" + x.long_name = 'x coordinate of projection' if 'units' in self._x.keys(): x.units = Units(self._x['units'], formatted=True).units else: x.units = 'm' - x.standard_name = "projection_x_coordinate" + x.standard_name = 'projection_x_coordinate' if self.size > 1: x.set_collective(True) x[:] = self._x['data'] return None - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. Parameters ---------- @@ -220,25 +249,150 @@ class LCCNes(Nes): x = np.array([self._x['data']] * len(self._y['data'])) y = np.array([self._y['data']] * len(self._x['data'])).T - projection = Proj( - proj='lcc', - ellps='WGS84', - R=6370000.00, - lat_1=kwargs['lat_1'], - lat_2=kwargs['lat_2'], - lon_0=kwargs['lon_0'], - lat_0=kwargs['lat_0'], - to_meter=1, - x_0=0, - y_0=0, - a=6370000.00, - k_0=1.0) - - # Calculate center latitudes and longitudes (UTM to LCC) - self.center_lons, self.center_lats = projection(x, y, inverse=True) - - return {'data': self.center_lats}, {'data': self.center_lons} + self.projection = Proj( + proj='lcc', + ellps='WGS84', + R=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_1=kwargs['lat_1'], + lat_2=kwargs['lat_2'], + lon_0=kwargs['lon_0'], + lat_0=kwargs['lat_0'], + to_meter=1, + x_0=0, + y_0=0, + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + k_0=1.0 + ) + + # Calculate centre latitudes and longitudes (UTM to LCC) + centre_lon_data, centre_lat_data = self.projection(x, y, inverse=True) + centre_lat = {'data': centre_lat_data} + centre_lon = {'data': centre_lon_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + # Get centre latitudes + model_centre_lat = self.lat + + # Get centre longitudes + model_centre_lon = self.lon + + return model_centre_lat, model_centre_lon + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_x = np.abs(np.mean(np.diff(self.x['data']))) + inc_y = np.abs(np.mean(np.diff(self.y['data']))) + + # Get bounds for rotated coordinates + y_bnds = self.create_single_spatial_bounds(self.y['data'], inc_y) + x_bnds = self.create_single_spatial_bounds(self.x['data'], inc_x) + + # Get rotated latitudes for grid edge + left_edge_y = np.append(y_bnds.flatten()[::2], y_bnds.flatten()[-1]) + right_edge_y = np.flip(left_edge_y, 0) + top_edge_y = np.repeat(y_bnds[-1][-1], len(self.x['data']) - 1) + bottom_edge_y = np.repeat(y_bnds[0][0], len(self.x['data'])) + y_grid_edge = np.concatenate((left_edge_y, top_edge_y, right_edge_y, bottom_edge_y)) + + # Get rotated longitudes for grid edge + left_edge_x = np.repeat(x_bnds[0][0], len(self.y['data']) + 1) + top_edge_x = x_bnds.flatten()[1:-1:2] + right_edge_x = np.repeat(x_bnds[-1][-1], len(self.y['data']) + 1) + bottom_edge_x = np.flip(x_bnds.flatten()[:-1:2], 0) + x_grid_edge = np.concatenate((left_edge_x, top_edge_x, right_edge_x, bottom_edge_x)) + + # Get edges for regular coordinates + self.projection = Proj( + proj='lcc', + ellps='WGS84', + R=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_1=float(self.projection_data['standard_parallel'][0]), + lat_2=float(self.projection_data['standard_parallel'][1]), + lon_0=float(self.projection_data['longitude_of_central_meridian']), + lat_0=float(self.projection_data['latitude_of_projection_origin']), + to_meter=1, + x_0=0, + y_0=0, + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + k_0=1.0 + ) + grid_edge_lon_data, grid_edge_lat_data = self.projection(x_grid_edge, y_grid_edge, inverse=True) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon + + def create_spatial_bounds(self): + """ + Calculate longitude and latitude bounds and set them. + """ + + # Calculate LCC coordinates bounds + inc_x = np.abs(np.mean(np.diff(self._x['data']))) + x_bnds = self.create_single_spatial_bounds(np.array([self._y['data']] * len(self._x['data'])).T, + inc_x, spatial_nv=4, inverse=True) + + inc_y = np.abs(np.mean(np.diff(self._y['data']))) + y_bnds = self.create_single_spatial_bounds(np.array([self._x['data']] * len(self._y['data'])), + inc_y, spatial_nv=4) + + # Transform LCC bounds to regular bounds + self.projection = Proj( + proj='lcc', + ellps='WGS84', + R=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_1=float(self.projection_data['standard_parallel'][0]), + lat_2=float(self.projection_data['standard_parallel'][1]), + lon_0=float(self.projection_data['longitude_of_central_meridian']), + lat_0=float(self.projection_data['latitude_of_projection_origin']), + to_meter=1, + x_0=0, + y_0=0, + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + k_0=1.0 + ) + lon_bnds, lat_bnds = self.projection(x_bnds, y_bnds, inverse=True) + + # Obtain regular coordinates bounds + self._lat_bnds = deepcopy(lat_bnds) + self.lat_bnds = lat_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + self._lon_bnds = deepcopy(lon_bnds) + self.lon_bnds = lon_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + return None + @staticmethod def _set_var_crs(var): """ @@ -249,7 +403,9 @@ class LCCNes(Nes): var : Variable netCDF4-python variable object. """ + var.grid_mapping = 'Lambert_conformal' + var.coordinates = "lat lon" return None @@ -271,7 +427,7 @@ class LCCNes(Nes): return None - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): """ Write output file with grib2 format. @@ -280,10 +436,43 @@ class LCCNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ - raise NotImplementedError("Grib2 format cannot be write in a Lambert Conformal Conic projection.") + + raise NotImplementedError("Grib2 format cannot be written in a Lambert Conformal Conic projection.") + + def create_shapefile(self): + """ + Create spatial geodataframe (shapefile). + """ + + # Get latitude and longitude cell boundaries + if self._lat_bnds is None or self._lon_bnds is None: + self.create_spatial_bounds() + + # Reshape arrays to create geometry + aux_b_lats = self.lat_bnds.reshape((self.lat_bnds.shape[0] * self.lat_bnds.shape[1], self.lat_bnds.shape[2])) + aux_b_lons = self.lon_bnds.reshape((self.lon_bnds.shape[0] * self.lon_bnds.shape[1], self.lon_bnds.shape[2])) + + # Create dataframe cointaining all polygons + geometry = [] + for i in range(aux_b_lons.shape[0]): + geometry.append(Polygon([(aux_b_lons[i, 0], aux_b_lats[i, 0]), + (aux_b_lons[i, 1], aux_b_lats[i, 1]), + (aux_b_lons[i, 2], aux_b_lats[i, 2]), + (aux_b_lons[i, 3], aux_b_lats[i, 3]), + (aux_b_lons[i, 0], aux_b_lats[i, 0])])) + fids = np.arange(self._lat['data'].shape[0] * self._lat['data'].shape[1]) + fids = fids.reshape((self._lat['data'].shape[0],self._lat['data'].shape[1])) + fids = fids[self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + gdf = gpd.GeoDataFrame(index=pd.Index(name='FID', data=fids.ravel()), + geometry=geometry, + crs="EPSG:4326") + self.shapefile = gdf + + return gdf diff --git a/nes/nc_projections/mercator_nes.py b/nes/nc_projections/mercator_nes.py index 37dd6fdb166091204a09b2799a4e33508c4805ec..8b79b9e6aa2c5dc7e8605ba45ac74635db31563f 100644 --- a/nes/nc_projections/mercator_nes.py +++ b/nes/nc_projections/mercator_nes.py @@ -1,8 +1,12 @@ #!/usr/bin/env python import numpy as np +import pandas as pd from cfunits import Units from pyproj import Proj +from copy import deepcopy +import geopandas as gpd +from shapely.geometry import Polygon from nes.nc_projections.default_nes import Nes @@ -38,23 +42,24 @@ class MercatorNes(Nes): Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(MercatorNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, balanced=balanced, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -86,76 +91,97 @@ class MercatorNes(Nes): self.free_vars('crs') - @staticmethod def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ + new = MercatorNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + def filter_coordinates_selection(self): + + idx = self.get_idx_intervals() + + self.y = self._get_coordinate_values(self._y, 'Y') + self.x = self._get_coordinate_values(self._x, 'X') + + self._y['data'] = self._y['data'][idx['idx_y_min']:idx['idx_y_max']] + self._x['data'] = self._x['data'][idx['idx_x_min']:idx['idx_x_max']] + + super(MercatorNes, self).filter_coordinates_selection() + + return None + def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- - projection : dict - Dictionary with the projection data + projection _data: dict + Dictionary with the projection data. """ if create_nes: - projection = {'data': None, - 'dimensions': (), - 'grid_mapping_name': 'mercator', - 'standard_parallel': [kwargs['lat_ts']], # TODO: Check if True - 'longitude_of_projection_origin': kwargs['lon_0'], - } + projection_data = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'mercator', + 'standard_parallel': [kwargs['lat_ts']], # TODO: Check if True + 'longitude_of_projection_origin': kwargs['lon_0'], + } else: - projection = self.variables['mercator'] + projection_data = self.variables['mercator'] self.free_vars('mercator') - return projection + return projection_data def _create_dimensions(self, netcdf): """ - Create the 'y', 'x' dimensions and the super dimensions ('lev', 'time'). + Create 'y', 'x' and 'spatial_nv' dimensions and the super dimensions ('lev', 'time'). Parameters ---------- netcdf : Dataset NetCDF object. """ + super(MercatorNes, self)._create_dimensions(netcdf) + # Create y and x dimensions netcdf.createDimension('y', len(self._y['data'])) netcdf.createDimension('x', len(self._x['data'])) + # Create spatial_nv (number of vertices) dimension + if (self._lat_bnds is not None) and (self._lon_bnds is not None): + netcdf.createDimension('spatial_nv', 4) + return None def _create_dimension_variables(self, netcdf): @@ -172,33 +198,33 @@ class MercatorNes(Nes): # MERCATOR Y COORDINATES y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) - y.long_name = "y coordinate of projection" + y.long_name = 'y coordinate of projection' if 'units' in self._y.keys(): y.units = Units(self._y['units'], formatted=True).units else: y.units = 'm' - y.standard_name = "projection_y_coordinate" + y.standard_name = 'projection_y_coordinate' if self.size > 1: y.set_collective(True) y[:] = self._y['data'] # MERCATOR X COORDINATES x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) - x.long_name = "x coordinate of projection" + x.long_name = 'x coordinate of projection' if 'units' in self._x.keys(): x.units = Units(self._x['units'], formatted=True).units else: x.units = 'm' - x.standard_name = "projection_x_coordinate" + x.standard_name = 'projection_x_coordinate' if self.size > 1: x.set_collective(True) x[:] = self._x['data'] return None - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. Parameters ---------- @@ -222,19 +248,129 @@ class MercatorNes(Nes): x = np.array([self._x['data']] * len(self._y['data'])) y = np.array([self._y['data']] * len(self._x['data'])).T - projection = Proj( - proj='merc', - a=6370000.00, - b=6370000.00, - lat_ts=kwargs['lat_ts'], - lon_0=kwargs['lon_0'], - ) + self.projection = Proj( + proj='merc', + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + b=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_ts=kwargs['lat_ts'], + lon_0=kwargs['lon_0'], + ) + + # Calculate centre latitudes and longitudes (UTM to Mercator) + centre_lon_data, centre_lat_data = self.projection(x, y, inverse=True) + centre_lat = {'data': centre_lat_data} + centre_lon = {'data': centre_lon_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ - # Calculate center latitudes and longitudes (UTM to Mercator) - self.center_lons, self.center_lats = projection(x, y, inverse=True) + # Get centre latitudes + model_centre_lat = self.lat - return {'data': self.center_lats}, {'data': self.center_lons} + # Get centre longitudes + model_centre_lon = self.lon + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_x = np.abs(np.mean(np.diff(self.x['data']))) + inc_y = np.abs(np.mean(np.diff(self.y['data']))) + + # Get bounds for rotated coordinates + y_bounds = self.create_single_spatial_bounds(self.y['data'], inc_y) + x_bounds = self.create_single_spatial_bounds(self.x['data'], inc_x) + + # Get rotated latitudes for grid edge + left_edge_y = np.append(y_bounds.flatten()[::2], y_bounds.flatten()[-1]) + right_edge_y = np.flip(left_edge_y, 0) + top_edge_y = np.repeat(y_bounds[-1][-1], len(self.x['data']) - 1) + bottom_edge_y = np.repeat(y_bounds[0][0], len(self.x['data'])) + y_grid_edge = np.concatenate((left_edge_y, top_edge_y, right_edge_y, bottom_edge_y)) + + # Get rotated longitudes for grid edge + left_edge_x = np.repeat(x_bounds[0][0], len(self.y['data']) + 1) + top_edge_x = x_bounds.flatten()[1:-1:2] + right_edge_x = np.repeat(x_bounds[-1][-1], len(self.y['data']) + 1) + bottom_edge_x = np.flip(x_bounds.flatten()[:-1:2], 0) + x_grid_edge = np.concatenate((left_edge_x, top_edge_x, right_edge_x, bottom_edge_x)) + + # Get edges for regular coordinates + self.projection = Proj( + proj='merc', + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + b=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_ts=float(self.projection_data['standard_parallel'][0]), + lon_0=float(self.projection_data['longitude_of_projection_origin']), + ) + grid_edge_lon_data, grid_edge_lat_data = self.projection(x_grid_edge, y_grid_edge, inverse=True) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon + + def create_spatial_bounds(self): + """ + Calculate longitude and latitude bounds and set them. + """ + + # Calculate Mercator coordinates bounds + inc_x = np.abs(np.mean(np.diff(self._x['data']))) + x_bnds = self.create_single_spatial_bounds(np.array([self._y['data']] * len(self._x['data'])).T, + inc_x, spatial_nv=4, inverse=True) + + inc_y = np.abs(np.mean(np.diff(self._y['data']))) + y_bnds = self.create_single_spatial_bounds(np.array([self._x['data']] * len(self._y['data'])), + inc_y, spatial_nv=4) + + # Transform Mercator bounds to regular bounds + self.projection = Proj( + proj='merc', + a=6378137.0, # WGS84_SEMIMAJOR_AXIS (as defined in Cartopy source code) + b=6356752.3142, # WGS84_SEMIMINOR_AXIS (as defined in Cartopy source code) + lat_ts=float(self.projection_data['standard_parallel'][0]), + lon_0=float(self.projection_data['longitude_of_projection_origin']), + ) + lon_bnds, lat_bnds = self.projection(x_bnds, y_bnds, inverse=True) + + # Obtain regular coordinates bounds + self._lat_bnds = deepcopy(lat_bnds) + self.lat_bnds = lat_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + self._lon_bnds = deepcopy(lon_bnds) + self.lon_bnds = lon_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + return None + @staticmethod def _set_var_crs(var): """ @@ -245,7 +381,9 @@ class MercatorNes(Nes): var : Variable netCDF4-python variable object. """ + var.grid_mapping = 'mercator' + var.coordinates = "lat lon" return None @@ -256,7 +394,7 @@ class MercatorNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('mercator', 'c') @@ -266,7 +404,7 @@ class MercatorNes(Nes): return None - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): """ Write output file with grib2 format. @@ -275,10 +413,43 @@ class MercatorNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ - raise NotImplementedError("Grib2 format cannot be write in a Mercator projection.") + + raise NotImplementedError("Grib2 format cannot be written in a Mercator projection.") + + def create_shapefile(self): + """ + Create spatial geodataframe (shapefile). + """ + + # Get latitude and longitude cell boundaries + if self._lat_bnds is None or self._lon_bnds is None: + self.create_spatial_bounds() + + # Reshape arrays to create geometry + aux_b_lats = self.lat_bnds.reshape((self.lat_bnds.shape[0] * self.lat_bnds.shape[1], self.lat_bnds.shape[2])) + aux_b_lons = self.lon_bnds.reshape((self.lon_bnds.shape[0] * self.lon_bnds.shape[1], self.lon_bnds.shape[2])) + + # Create dataframe cointaining all polygons + geometry = [] + for i in range(aux_b_lons.shape[0]): + geometry.append(Polygon([(aux_b_lons[i, 0], aux_b_lats[i, 0]), + (aux_b_lons[i, 1], aux_b_lats[i, 1]), + (aux_b_lons[i, 2], aux_b_lats[i, 2]), + (aux_b_lons[i, 3], aux_b_lats[i, 3]), + (aux_b_lons[i, 0], aux_b_lats[i, 0])])) + fids = np.arange(self._lat['data'].shape[0] * self._lat['data'].shape[1]) + fids = fids.reshape((self._lat['data'].shape[0],self._lat['data'].shape[1])) + fids = fids[self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + gdf = gpd.GeoDataFrame(index=pd.Index(name='FID', data=fids.ravel()), + geometry=geometry, + crs="EPSG:4326") + self.shapefile = gdf + + return gdf diff --git a/nes/nc_projections/points_nes.py b/nes/nc_projections/points_nes.py index 3dd99455534488d692299f711afba6fcedb68d9e..d5ef1f2457f9dbfb8af23fcf7e463e7d976c6617 100644 --- a/nes/nc_projections/points_nes.py +++ b/nes/nc_projections/points_nes.py @@ -2,12 +2,12 @@ import sys import warnings -from copy import deepcopy - import numpy as np -from netCDF4 import Dataset, date2num, stringtochar +import pandas as pd +from copy import deepcopy +import geopandas as gpd +from netCDF4 import date2num, stringtochar from numpy.ma.core import MaskError - from .default_nes import Nes @@ -33,23 +33,25 @@ class PointsNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, times=None, strlen=75, **kwargs): """ - Initialize the PointsNes class + Initialize the PointsNes class. Parameters ---------- comm: MPI.Communicator - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset, None - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'T'] + Indicates the parallelization method that you want. Default: 'X'. + accepted values: ['X', 'T']. + strlen: int + Maximum length of strings in NetCDF. Default: 75. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int @@ -63,20 +65,20 @@ class PointsNes(Nes): times=times, **kwargs) if create_nes: - # Complete dimensions - self._station = {'data': np.arange(len(self._lon['data']))} - # Dimensions screening self.lat = self._get_coordinate_values(self._lat, 'X') self.lon = self._get_coordinate_values(self._lon, 'X') - self.station = deepcopy(self._station) self.strlen = strlen else: - self._station = self._get_coordinate_dimension(['station']) - self.station = self._get_coordinate_values(self._station, 'X') - + # Dimensions screening self.strlen = self._get_strlen() + # Complete dimensions + self._station = {'data': np.arange(len(self._lon['data']))} + + # Dimensions screening + self.station = self._get_coordinate_values(self._station, 'X') + # Set axis limits for parallel writing self.write_axis_limits = self.get_write_axis_limits() @@ -88,36 +90,38 @@ class PointsNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'T'] + Indicates the parallelization method that you want. Default: 'X'. + accepted values: ['X', 'T']. """ + new = PointsNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new def _create_dimensions(self, netcdf): """ - Create the 'lev', 'time_nv', 'station' dimensions. + Create 'lev', 'time_nv', 'station', 'spatial_nv' and 'strlen' dimensions. Parameters ---------- @@ -125,13 +129,18 @@ class PointsNes(Nes): NetCDF object. """ + # Create time dimension netcdf.createDimension('time', None) + + # Create time_nv (number of vertices) dimension if self._time_bnds is not None: netcdf.createDimension('time_nv', 2) + # Create station dimension # The number of longitudes is equal to the number of stations netcdf.createDimension('station', len(self._lon['data'])) + # Create string length dimension if hasattr(self, 'strlen'): if self.strlen is not None: netcdf.createDimension('strlen', self.strlen) @@ -140,7 +149,7 @@ class PointsNes(Nes): def _create_dimension_variables(self, netcdf): """ - Create the 'time', 'time_bnds' and 'station' variables. + Create the 'time', 'time_bnds', 'station', 'lat', 'lat_bnds', 'lon' and 'lon_bnds' variables. Parameters ---------- @@ -151,10 +160,10 @@ class PointsNes(Nes): # TIMES time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) time_var.units = 'hours since {0}'.format( - self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) - time_var.standard_name = "time" + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' time_var.calendar = 'standard' - time_var.long_name = "time" + time_var.long_name = 'time' if self._time_bnds is not None: time_var.bounds = 'time_bnds' if self.size > 1: @@ -166,64 +175,64 @@ class PointsNes(Nes): # TIME BOUNDS if self._time_bnds is not None: time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, - complevel=self.zip_lvl) + complevel=self.zip_lvl) if self.size > 1: time_bnds_var.set_collective(True) time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') # STATIONS - stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - stations.units = "" - stations.axis = "X" - stations.long_name = "" - stations.standard_name = "station" + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, + complevel=self.zip_lvl) + stations.units = '' + stations.axis = 'X' + stations.long_name = '' + stations.standard_name = 'station' if self.size > 1: stations.set_collective(True) stations[:] = self._station['data'] - return None - - def _get_coordinate_dimension(self, possible_names): - """ - Read the coordinate dimension data. - - This will read the complete data of the coordinate - - Parameters - ---------- - possible_names: list, str - List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) - - Returns - ------- - nc_var : dict - Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. - """ - - nc_var = super(PointsNes, self)._get_coordinate_dimension(possible_names) - - if isinstance(possible_names, str): - possible_names = [possible_names] + # LATITUDES + lat = netcdf.createVariable('lat', np.float64, self._lat_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' + if self._lat_bnds is not None: + lat.bounds = 'lat_bnds' + if self.size > 1: + lat.set_collective(True) + lat[:] = self._lat['data'] - if 'station' in possible_names: - nc_var['data'] = np.arange(len(self._lon['data'])) + # LONGITUDES + lon = netcdf.createVariable('lon', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' + if self._lon_bnds is not None: + lon.bounds = 'lon_bnds' + if self.size > 1: + lon.set_collective(True) + lon[:] = self._lon['data'] - return nc_var + return None def _get_coordinate_values(self, coordinate_info, coordinate_axis): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['X'] + Name of the coordinate to extract. Accepted values: ['X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ values = deepcopy(coordinate_info) @@ -243,50 +252,6 @@ class PointsNes(Nes): return values - def _get_lazy_variables_not_used(self): - """ - Get all the variables information. - - Returns - ------- - variables : dict - Dictionary with the variable name as key and another dictionary as value. - De value dictionary will have the 'data' key with None as value and all the variable attributes as the - other keys. - e.g. - {'var_name_1': {'data': None, 'attr_1': value_1_1, 'attr_2': value_1_2, ...}, - 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, - ...} - """ - - if self.is_xarray: - variables = self.dataset.variables - else: - if self.master: - variables = {} - for var_name, var_info in self.netcdf.variables.items(): - variables[var_name] = {} - variables[var_name]['data'] = None - # Remove strlen as a dimension - if 'strlen' in var_info.dimensions: - variables[var_name]['dimensions'] = tuple([dim for dim in var_info.dimensions - if dim != 'strlen']) - else: - variables[var_name]['dimensions'] = var_info.dimensions - - for attrname in var_info.ncattrs(): - # Avoiding some attributes - if attrname not in ['missing_value', '_FillValue']: - value = getattr(var_info, attrname) - if value in ['unitless', '-']: - value = '' - variables[var_name][attrname] = value - else: - variables = None - variables = self.comm.bcast(variables, root=0) - - return variables - def _get_strlen(self): """ Read the string length dimension of some variables. @@ -294,7 +259,7 @@ class PointsNes(Nes): Returns ------- int, None - String length. None means no string data + String length. None means no string data. """ if 'strlen' in self.netcdf.dimensions: @@ -311,7 +276,7 @@ class PointsNes(Nes): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- @@ -328,12 +293,12 @@ class PointsNes(Nes): elif len(var_dims) == 2: if 'strlen' in nc_var.dimensions: data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], :] - data = np.array([''.join(i) for i in np.char.decode(data)]) + data = np.array([''.join(i) for i in data]) else: data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] else: - raise NotImplementedError('Error with {0}. Only can be read netCDF with 2 dimensions or less'.format( + raise NotImplementedError("Error with {0}. Only can be read netCDF with 2 dimensions or less".format( var_name)) # Missing to nan @@ -346,24 +311,43 @@ class PointsNes(Nes): def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ if self.variables is not None: for i, (var_name, var_dict) in enumerate(self.variables.items()): - if var_dict['data'] is not None: - + + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + if var_dtype != var_dict['data'].dtype: + msg = "WARNING!!! " + msg += "Different data types for variable {0}".format(var_name) + msg += "Input dtype={0}, data dtype={1}".format(var_dtype, + var_dict['data'].dtype) + warnings.warn(msg) + try: + var_dict['data'] = var_dict['data'].astype(var_dtype) + except Exception as e: # TODO: Detect exception + raise e("It was not possible to cast the data to the input dtype.") + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + # Get dimensions when reading datasets if 'dimensions' in var_dict.keys(): - # Get dimensions var_dims = var_dict['dimensions'] # Get dimensions when creating new datasets else: @@ -373,18 +357,19 @@ class PointsNes(Nes): else: # For data that is dependent on time and station (e.g. PM10) var_dims = ('time',) + self._var_dim - - if var_dict['data'].dtype == np.str: - # Add strlen as a dimension if needed - var_dims += ('strlen',) - # Convert list of strings to chars + # Convert list of strings to chars for parallelization try: unicode_type = len(max(var_dict['data'], key=len)) if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl, chunksizes=chunk_size) - - if self.print_info: - print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + + if self.info: + print('Rank {0:03d}: Var {1} created ({2}/{3})'.format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: - print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + if self.info: + print('Rank {0:03d}: Var {1} collective ({2}/{3})'.format( self.rank, var_name, i + 1, len(self.variables))) - + for att_name, att_value in var_dict.items(): if att_name == 'data': if len(att_value.shape) == 1: @@ -457,7 +421,7 @@ class PointsNes(Nes): var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) elif len(att_value.shape) == 2: - if 'strlen' in var_dict['dimensions']: + if 'strlen' in var_dims: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value except IndexError: @@ -482,25 +446,27 @@ class PointsNes(Nes): var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) - if self.print_info: - print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + if self.info: + print('Rank {0:03d}: Var {1} data ({2}/{3})'.format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions', 'dtype']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: - print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + if self.info: + print('Rank {0:03d}: Var {1} completed ({2}/{3})'.format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: - msg = 'WARNING!!! ' - msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + msg = "WARNING!!! " + msg += "Variable {0} was not loaded. It will not be written.".format(var_name) warnings.warn(msg) + return None + def _gather_data(self): """ Gather all the variable data into the MPI rank 0 to perform a serial write. @@ -530,8 +496,8 @@ class PointsNes(Nes): # dimensions = (time, station) axis = 1 else: - msg = 'The points NetCDF must have ' - msg += 'surface values (without levels).' + msg = "The points NetCDF must have " + msg += "surface values (without levels)." raise NotImplementedError(msg) elif self.parallel_method == 'T': # concatenate over time @@ -548,15 +514,15 @@ class PointsNes(Nes): # dimensions = (time, station) axis = 0 else: - raise NotImplementedError('The points NetCDF must only have surface values (without levels).') + raise NotImplementedError("The points NetCDF must only have surface values (without levels).") else: raise NotImplementedError( "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( meth=self.parallel_method, accept=['X', 'T'])) data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -566,9 +532,9 @@ class PointsNes(Nes): return data_list - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from points. + Calculate centre latitudes and longitudes from points. Parameters ---------- @@ -576,13 +542,15 @@ class PointsNes(Nes): NetCDF object. """ - # Calculate center latitudes - self.center_lats = kwargs['lat'] + # Calculate centre latitudes + centre_lat_data = kwargs['lat'] + centre_lat = {'data': centre_lat_data} - # Calculate center longitudes - self.center_lons = kwargs['lon'] + # Calculate centre longitudes + centre_lon_data = kwargs['lon'] + centre_lon = {'data': centre_lon_data} - return {'data': self.center_lats}, {'data': self.center_lons} + return centre_lat, centre_lon def _create_metadata(self, netcdf): """ @@ -594,30 +562,63 @@ class PointsNes(Nes): NetCDF object. """ - # LATITUDES - lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lats.units = "degrees_north" - lats.axis = "Y" - lats.long_name = "latitude coordinate" - lats.standard_name = "latitude" - if self.size > 1: - lats.set_collective(True) - lats[:] = self._lat['data'] + return None - # LONGITUDES - lons = netcdf.createVariable('lon', np.float64, self._lon_dim, - zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - lons.units = "degrees_east" - lons.axis = "X" - lons.long_name = "longitude coordinate" - lons.standard_name = "longitude" - if self.size > 1: - lons.set_collective(True) - lons[:] = self._lon['data'] + def create_spatial_bounds(self): + """ + Calculate longitude and latitude bounds and set them. + """ - return None + raise NotImplementedError("Spatial bounds cannot be created for points datasets.") - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_providentia(self, model_centre_lon, model_centre_lat, grid_edge_lon, grid_edge_lat): + """ + Transform a PointsNes into a PointsNesProvidentia object + + Returns + ---------- + points_nes_providentia : nes.Nes + Points Nes Providentia Object + """ + + from .points_nes_providentia import PointsNesProvidentia + + points_nes_providentia = PointsNesProvidentia(comm=self.comm, + info=self.info, + balanced=self.balanced, + parallel_method=self.parallel_method, + avoid_first_hours=self.hours_start, + avoid_last_hours=self.hours_end, + first_level=self.first_level, + last_level=self.last_level, + create_nes=True, + times=self.time, + model_centre_lon=model_centre_lon, + model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, + grid_edge_lat=grid_edge_lat, + lat=self.lat['data'], + lon=self.lon['data'] + ) + + # Convert dimensions (time, lat, lon) to (time, station) for interpolated variables and reshape data + variables = {} + interpolated_variables = deepcopy(self.variables) + for var_name, var_info in interpolated_variables.items(): + variables[var_name] = {} + if var_info['dimensions'] == ('time', 'lat', 'lon') and len(var_info['data'].shape) == 2: + variables[var_name]['data'] = var_info['data'].T + variables[var_name]['dimensions'] = ('station', 'time') + else: + variables[var_name]['data'] = var_info['data'] + variables[var_name]['dimensions'] = var_info['dimensions'] + + # Set variables + points_nes_providentia.variables = variables + + return points_nes_providentia + + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): """ Write output file with grib2 format. @@ -626,10 +627,51 @@ class PointsNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ - raise NotImplementedError("Grib2 format cannot be write with point data.") + + raise NotImplementedError("Grib2 format cannot be written with point data.") + + def create_shapefile(self): + """ + Create spatial geodataframe (shapefile). + """ + + # Create dataframe cointaining all points + geometry = gpd.points_from_xy(self.lon['data'], self.lat['data']) + fids = np.arange(len(self._lon['data'])) + fids = fids[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + gdf = gpd.GeoDataFrame(index=pd.Index(name='FID', data=fids), + geometry=geometry, + crs="EPSG:4326") + self.shapefile = gdf + + return gdf + + @staticmethod + def _get_axis_index_(axis): + if axis == 'T': + value = 0 + elif axis == 'X': + value = 1 + else: + raise ValueError("Unknown axis: {0}".format(axis)) + return value + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + var.coordinates = "lat lon" + + return None diff --git a/nes/nc_projections/points_nes_ghost.py b/nes/nc_projections/points_nes_ghost.py index 9b8ab4c75d4af6660b6fcc2eaf2dcf40b95c7f39..7951bcfe77fbea75d2b68e82635f2435ef085fc7 100644 --- a/nes/nc_projections/points_nes_ghost.py +++ b/nes/nc_projections/points_nes_ghost.py @@ -4,6 +4,7 @@ import sys import warnings import numpy as np from numpy.ma.core import MaskError +from netCDF4 import stringtochar, date2num from copy import deepcopy from .points_nes import PointsNes @@ -13,51 +14,56 @@ class PointsNesGHOST(PointsNes): Attributes ---------- - _qa : tuple - Tuple with the name of the dimensions of the quality assurance (qa) flag values. - ('qa',) for a points grid. - _flag : tuple - Tuple with the name of the dimensions of the data flag values. - ('flag',) for a points grid. + _qa : dict + Quality flags (GHOST checks) dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _flag : dict + Data flags (given by data provider) dictionary with the complete 'data' key for all the values and the rest of + the attributes. + _qa : dict + Quality flags (GHOST checks) dictionary with the portion of 'data' corresponding to the rank values. + _flag : dict + Data flags (given by data provider) dictionary with the portion of 'data' corresponding to the rank values. """ def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, times=None, **kwargs): """ - Initialize the PointsNesGHOST class + Initialize the PointsNesGHOST class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X'] + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file. + Indicates if you want to create the object from scratch (True) or through an existing file. times : list, None List of times to substitute the current ones while creation. kwargs : - Projection dependent parameters to create it from scratch + Projection dependent parameters to create it from scratch. """ super(PointsNesGHOST, self).__init__(comm=comm, path=path, info=info, dataset=dataset, @@ -65,60 +71,66 @@ class PointsNesGHOST(PointsNes): avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=create_nes, times=times, **kwargs) - + + # Complete dimensions self._flag = self._get_coordinate_dimension(['flag']) - self.flag = self._get_coordinate_values(self._flag, 'X') - self._qa = self._get_coordinate_dimension(['qa']) + + # Dimensions screening + self.flag = self._get_coordinate_values(self._flag, 'X') self.qa = self._get_coordinate_values(self._qa, 'X') @staticmethod def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the PointsNesGHOST class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X'] + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. balanced : bool - Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. first_level : int - Index of the first level to use + Index of the first level to use. last_level : int, None Index of the last level to use. None if it is the last. create_nes : bool - Indicates if ypu want to create the object from scratch (True) or trough an existen file.:q + Indicates if you want to create the object from scratch (True) or through an existing file. """ + new = PointsNesGHOST(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new def _create_dimensions(self, netcdf): """ - Create the 'N_flag_codes' and 'N_qa_codes' dimensions and the super dimensions ('time', 'station'). + Create 'N_flag_codes' and 'N_qa_codes' dimensions and the super dimensions + ('lev', 'time_nv', 'station', 'spatial_nv' and 'strlen'). Parameters ---------- @@ -128,6 +140,7 @@ class PointsNesGHOST(PointsNes): super(PointsNesGHOST, self)._create_dimensions(netcdf) + # Create N_flag_codes and N_qa_codes dimensions netcdf.createDimension('N_flag_codes', self._flag['data'].shape[2]) netcdf.createDimension('N_qa_codes', self._qa['data'].shape[2]) @@ -135,7 +148,7 @@ class PointsNesGHOST(PointsNes): def _create_dimension_variables(self, netcdf): """ - Create the 'station' variables. + Create the 'time', 'time_bnds', 'station', 'lat', 'lat_bnds', 'lon' and 'lon_bnds' variables. Parameters ---------- @@ -143,55 +156,91 @@ class PointsNesGHOST(PointsNes): NetCDF object. """ - super(PointsNesGHOST, self)._create_dimension_variables(netcdf) + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + time_var.units = 'hours since {0}'.format( + self._time[self.get_time_id(self.hours_start, first=True)].strftime('%Y-%m-%d %H:%M:%S')) + time_var.standard_name = 'time' + time_var.calendar = 'standard' + time_var.long_name = 'time' + if self._time_bnds is not None: + time_var.bounds = 'time_bnds' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + # TIME BOUNDS + if self._time_bnds is not None: + time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, + complevel=self.zip_lvl) + if self.size > 1: + time_bnds_var.set_collective(True) + time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') + + # STATIONS + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, + complevel=self.zip_lvl) + stations.units = '' + stations.axis = 'X' + stations.long_name = '' + stations.standard_name = 'station' + if self.size > 1: + stations.set_collective(True) + stations[:] = self._station['data'] - # N FLAG CODES - flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + # LATITUDES + lat = netcdf.createVariable('latitude', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - flag.units = "" - flag.axis = "" - flag.long_name = "" - flag.standard_name = "flag" + lat.units = 'degrees_north' + lat.axis = 'Y' + lat.long_name = 'latitude coordinate' + lat.standard_name = 'latitude' + if self._lat_bnds is not None: + lat.bounds = 'lat_bnds' if self.size > 1: - flag.set_collective(True) - flag[:] = self._flag['data'] + lat.set_collective(True) + lat[:] = self._lat['data'] - # N QA CODES - qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), - zlib=self.zip_lvl > 0, complevel=self.zip_lvl) - qa.units = "" - qa.axis = "" - qa.long_name = "" - qa.standard_name = "N_qa_codes" + # LONGITUDES + lon = netcdf.createVariable('longitude', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lon.units = 'degrees_east' + lon.axis = 'X' + lon.long_name = 'longitude coordinate' + lon.standard_name = 'longitude' + if self._lon_bnds is not None: + lon.bounds = 'lon_bnds' if self.size > 1: - qa.set_collective(True) - qa[:] = self._qa['data'] - - self.free_vars(('flag', 'qa')) + lon.set_collective(True) + lon[:] = self._lon['data'] def erase_flags(self): + first_time_idx = self.get_time_id(self.hours_start, first=True) last_time_idx = self.get_time_id(self.hours_end, first=False) t_len = last_time_idx - first_time_idx self._qa['data'] = np.empty((len(self._lon['data']), t_len, 0)) self._flag['data'] = np.empty((len(self._lon['data']), t_len, 0)) + return None def _get_coordinate_values(self, coordinate_info, coordinate_axis): """ - Get the coordinate data of the current portion + Get the coordinate data of the current portion. Parameters ---------- coordinate_info : dict, list Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. coordinate_axis : str - Name of the coordinate to extract. Accepted values: ['X'] + Name of the coordinate to extract. Accepted values: ['X']. Returns ------- values : dict - Dictionary with the portion of data corresponding to the rank + Dictionary with the portion of data corresponding to the rank. """ values = deepcopy(coordinate_info) @@ -221,7 +270,7 @@ class PointsNesGHOST(PointsNes): Parameters ---------- var_name : str - Name of the variable to read + Name of the variable to read. Returns ------- @@ -232,13 +281,7 @@ class PointsNesGHOST(PointsNes): nc_var = self.netcdf.variables[var_name] var_dims = nc_var.dimensions - # Remove strlen (maximum number of characters that a string can have) from dimensions and join characters - if 'strlen' in nc_var.dimensions: - nc_var = np.array([''.join(i) for i in np.char.decode(nc_var[:].data)]) - var_dims = tuple([', '.join(dim for dim in var_dims if dim != 'strlen')]) - # Read data in 1 or 2 dimensions - # TODO: Ask Dene why x, t instead of t, x if len(var_dims) < 2: data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] elif len(var_dims) == 2: @@ -262,41 +305,57 @@ class PointsNesGHOST(PointsNes): def _create_variables(self, netcdf, chunking=False): """ - Create the netCDF file variables + Create the netCDF file variables. Parameters ---------- netcdf : Dataset - netcdf4-python opened Dataset + netcdf4-python opened Dataset. chunking : bool - Indicates if you want to chunk the output netCDF + Indicates if you want to chunk the output netCDF. """ if self.variables is not None: for i, (var_name, var_dict) in enumerate(self.variables.items()): if var_dict['data'] is not None: - # Define dimensions depending on the type of variable + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + else: + var_dtype = var_dict['data'].dtype + + # Get dimensions if len(var_dict['data'].shape) == 1: # Metadata var_dims = self._var_dim elif len(var_dict['data'].shape) == 2: # Different from metadata (e.g. concentrations of pm10) var_dims = self._var_dim + ('time',) - else: - # Flags and qa variables - if var_name == 'flag': - var_dims = self._var_dim + ('time', 'N_flag_codes',) - elif var_name == 'qa': - var_dims = self._var_dim + ('time', 'N_qa_codes',) - - # ESDAC iwahashi landform and other vars are given as objects, transform to strings - if var_dict['data'].dtype == np.dtype(object): - var_dtype = np.dtype(str) - else: - var_dtype = var_dict['data'].dtype - if self.print_info: + # Convert list of strings to chars for parallelization + try: + unicode_type = len(max(var_dict['data'], key=len)) + if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl, chunksizes=chunk_size) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} created ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) if self.size > 1: var.set_collective(True) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( self.rank, var_name, i + 1, len(self.variables))) for att_name, att_value in var_dict.items(): if att_name == 'data': - print(att_value) - print(att_value.shape) if len(att_value.shape) == 1: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value @@ -338,19 +395,31 @@ class PointsNesGHOST(PointsNes): var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, att_value.shape)) elif len(att_value.shape) == 2: - try: - var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value - except IndexError: - raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( - var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, - att_value.shape)) - except ValueError: - raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + if 'strlen' in var_dims: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], - self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) elif len(att_value.shape) == 3: try: var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], @@ -369,18 +438,18 @@ class PointsNesGHOST(PointsNes): :].shape, att_value.shape)) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: var.setncattr(att_name, att_value) self._set_var_crs(var) - if self.print_info: + if self.info: print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) except Exception as e: print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) - # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), # file=sys.stderr) raise e else: @@ -388,34 +457,8 @@ class PointsNesGHOST(PointsNes): msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) warnings.warn(msg) - def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): - """ - Write the netCDF output file - - Parameters - ---------- - path : str - Path to the output netCDF file - compression_level : int - Level of compression (0 to 9) Default: 0 (no compression) - serial : bool - Indicates if you want to write in serial or not. Default: False - info : bool - Indicates if you want to print the information of each writing step by stdout Default: False - chunking : bool - Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False - """ - - if not serial: - msg = 'WARNING!!! ' - msg += 'GHOST datasets cannot be written in parallel yet.' - msg += 'Changing to serial mode.' - warnings.warn(msg) - super(PointsNesGHOST, self).to_netcdf(path, compression_level=compression_level, - serial=True, info=info, chunking=chunking) - return None - + def _gather_data(self): """ Gather all the variable data into the MPI rank 0 to perform a serial write. @@ -425,6 +468,7 @@ class PointsNesGHOST(PointsNes): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name, var_info in data_list.items(): try: @@ -469,8 +513,8 @@ class PointsNesGHOST(PointsNes): meth=self.parallel_method, accept=['X', 'T'])) data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) except Exception as e: - print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) - sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) print(e) sys.stderr.write(str(e)) # print(e, file=sys.stderr) @@ -478,4 +522,254 @@ class PointsNesGHOST(PointsNes): self.comm.Abort(1) raise e - return data_list \ No newline at end of file + return data_list + + def _create_metadata(self, netcdf): + """ + Create metadata variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # N FLAG CODES + flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + flag.units = '' + flag.axis = '' + flag.long_name = '' + flag.standard_name = 'flag' + if self.size > 1: + flag.set_collective(True) + flag[:] = self._flag['data'] + + # N QA CODES + qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + qa.units = '' + qa.axis = '' + qa.long_name = '' + qa.standard_name = 'N_qa_codes' + if self.size > 1: + qa.set_collective(True) + qa[:] = self._qa['data'] + + return None + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file. + + Parameters + ---------- + path : str + Path to the output netCDF file. + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression). + serial : bool + Indicates if you want to write in serial or not. Default: False. + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False. + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. + """ + + if not serial: + msg = 'WARNING!!! ' + msg += 'GHOST datasets cannot be written in parallel yet. ' + msg += 'Changing to serial mode.' + warnings.warn(msg) + super(PointsNesGHOST, self).to_netcdf(path, compression_level=compression_level, + serial=True, info=info, chunking=chunking) + + return None + + def to_points(self): + """ + Transform a PointsNesGHOST into a PointsNes object + + Returns + ---------- + points_nes : nes.Nes + Points Nes Object (without GHOST metadata variables) + """ + + points_nes = PointsNes(comm=self.comm, + info=self.info, + balanced=self.balanced, + parallel_method=self.parallel_method, + avoid_first_hours=self.hours_start, + avoid_last_hours=self.hours_end, + first_level=self.first_level, + last_level=self.last_level, + create_nes=True, + lat=self.lat['data'], + lon=self.lon['data'], + times=self.time + ) + + GHOST_version = str(float(np.unique(self.variables['GHOST_version']['data']))) + metadata_variables = self.get_standard_metadata(GHOST_version) + self.free_vars(metadata_variables) + self.free_vars('station') + points_nes.variables = deepcopy(self.variables) + + return points_nes + + def get_standard_metadata(self, GHOST_version): + """ + Get all possible GHOST variables for each version. + + Parameters + ---------- + GHOST_version : str + Version of GHOST file. + + Returns + ---------- + metadata_variables[GHOST_version] : list + List of metadata variables for a certain GHOST version + """ + + # This metadata variables are + metadata_variables = {'1.4': ['GHOST_version', 'station_reference', 'station_timezone', 'latitude', 'longitude', + 'altitude', 'sampling_height', 'measurement_altitude', 'ellipsoid', + 'horizontal_datum', 'vertical_datum', 'projection', 'distance_to_building', + 'distance_to_kerb', 'distance_to_junction', 'distance_to_source', 'street_width', + 'street_type', 'daytime_traffic_speed', 'daily_passing_vehicles', 'data_level', + 'climatology', 'station_name', 'city', 'country', + 'administrative_country_division_1', 'administrative_country_division_2', + 'population', 'representative_radius', 'network', 'associated_networks', + 'area_classification', 'station_classification', 'main_emission_source', + 'land_use', 'terrain', 'measurement_scale', + 'ESDAC_Iwahashi_landform_classification', + 'ESDAC_modal_Iwahashi_landform_classification_5km', + 'ESDAC_modal_Iwahashi_landform_classification_25km', + 'ESDAC_Meybeck_landform_classification', + 'ESDAC_modal_Meybeck_landform_classification_5km', + 'ESDAC_modal_Meybeck_landform_classification_25km', + 'GHSL_settlement_model_classification', + 'GHSL_modal_settlement_model_classification_5km', + 'GHSL_modal_settlement_model_classification_25km', + 'Joly-Peuch_classification_code', 'Koppen-Geiger_classification', + 'Koppen-Geiger_modal_classification_5km', + 'Koppen-Geiger_modal_classification_25km', + 'MODIS_MCD12C1_v6_IGBP_land_use', 'MODIS_MCD12C1_v6_modal_IGBP_land_use_5km', + 'MODIS_MCD12C1_v6_modal_IGBP_land_use_25km', 'MODIS_MCD12C1_v6_UMD_land_use', + 'MODIS_MCD12C1_v6_modal_UMD_land_use_5km', + 'MODIS_MCD12C1_v6_modal_UMD_land_use_25km', 'MODIS_MCD12C1_v6_LAI', + 'MODIS_MCD12C1_v6_modal_LAI_5km', 'MODIS_MCD12C1_v6_modal_LAI_25km', + 'WMO_region', 'WWF_TEOW_terrestrial_ecoregion', 'WWF_TEOW_biogeographical_realm', + 'WWF_TEOW_biome', 'UMBC_anthrome_classification', + 'UMBC_modal_anthrome_classification_5km', + 'UMBC_modal_anthrome_classification_25km', + 'EDGAR_v4.3.2_annual_average_BC_emissions', + 'EDGAR_v4.3.2_annual_average_CO_emissions', + 'EDGAR_v4.3.2_annual_average_NH3_emissions', + 'EDGAR_v4.3.2_annual_average_NMVOC_emissions', + 'EDGAR_v4.3.2_annual_average_NOx_emissions', + 'EDGAR_v4.3.2_annual_average_OC_emissions', + 'EDGAR_v4.3.2_annual_average_PM10_emissions', + 'EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions', + 'EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions', + 'EDGAR_v4.3.2_annual_average_SO2_emissions', 'ASTER_v3_altitude', + 'ETOPO1_altitude', 'ETOPO1_max_altitude_difference_5km', + 'GHSL_built_up_area_density', 'GHSL_average_built_up_area_density_5km', + 'GHSL_average_built_up_area_density_25km', 'GHSL_max_built_up_area_density_5km', + 'GHSL_max_built_up_area_density_25km', 'GHSL_population_density', + 'GHSL_average_population_density_5km', 'GHSL_average_population_density_25km', + 'GHSL_max_population_density_5km', 'GHSL_max_population_density_25km', + 'GPW_population_density', 'GPW_average_population_density_5km', + 'GPW_average_population_density_25km', 'GPW_max_population_density_5km', + 'GPW_max_population_density_25km', + 'NOAA-DMSP-OLS_v4_nighttime_stable_lights', + 'NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km', + 'NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km', + 'NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km', + 'NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km', + 'OMI_level3_column_annual_average_NO2', + 'OMI_level3_column_cloud_screened_annual_average_NO2', + 'OMI_level3_tropospheric_column_annual_average_NO2', + 'OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2', + 'GSFC_coastline_proximity', 'primary_sampling_type', + 'primary_sampling_instrument_name', + 'primary_sampling_instrument_documented_flow_rate', + 'primary_sampling_instrument_reported_flow_rate', + 'primary_sampling_process_details', 'primary_sampling_instrument_manual_name', + 'primary_sampling_further_details', 'sample_preparation_types', + 'sample_preparation_techniques', 'sample_preparation_process_details', + 'sample_preparation_further_details', 'measurement_methodology', + 'measuring_instrument_name', 'measuring_instrument_sampling_type', + 'measuring_instrument_documented_flow_rate', + 'measuring_instrument_reported_flow_rate', 'measuring_instrument_process_details', + 'measuring_instrument_process_details', 'measuring_instrument_manual_name', + 'measuring_instrument_further_details', 'measuring_instrument_reported_units', + 'measuring_instrument_reported_lower_limit_of_detection', + 'measuring_instrument_documented_lower_limit_of_detection', + 'measuring_instrument_reported_upper_limit_of_detection', + 'measuring_instrument_documented_upper_limit_of_detection', + 'measuring_instrument_reported_uncertainty', + 'measuring_instrument_documented_uncertainty', + 'measuring_instrument_reported_accuracy', + 'measuring_instrument_documented_accuracy', + 'measuring_instrument_reported_precision', + 'measuring_instrument_documented_precision', + 'measuring_instrument_reported_zero_drift', + 'measuring_instrument_documented_zero_drift', + 'measuring_instrument_reported_span_drift', + 'measuring_instrument_documented_span_drift', + 'measuring_instrument_reported_zonal_drift', + 'measuring_instrument_documented_zonal_drift', + 'measuring_instrument_reported_measurement_resolution', + 'measuring_instrument_documented_measurement_resolution', + 'measuring_instrument_reported_absorption_cross_section', + 'measuring_instrument_documented_absorption_cross_section', + 'measuring_instrument_inlet_information', + 'measuring_instrument_calibration_scale', + 'network_provided_volume_standard_temperature', + 'network_provided_volume_standard_pressure', 'retrieval_algorithm', + 'principal_investigator_name', 'principal_investigator_institution', + 'principal_investigator_email_address', 'contact_name', + 'contact_institution', 'contact_email_address', 'meta_update_stamp', + 'data_download_stamp', 'data_revision_stamp', 'network_sampling_details', + 'network_uncertainty_details', 'network_maintenance_details', + 'network_qa_details', 'network_miscellaneous_details', 'data_licence', + 'process_warnings', 'temporal_resolution', + 'reported_lower_limit_of_detection_per_measurement', + 'reported_upper_limit_of_detection_per_measurement', + 'reported_uncertainty_per_measurement', 'derived_uncertainty_per_measurement', + 'day_night_code', 'weekday_weekend_code', 'season_code', + 'hourly_native_representativity_percent', 'hourly_native_max_gap_percent', + 'daily_native_representativity_percent', 'daily_representativity_percent', + 'daily_native_max_gap_percent', 'daily_max_gap_percent', + 'monthly_native_representativity_percent', 'monthly_representativity_percent', + 'monthly_native_max_gap_percent', 'monthly_max_gap_percent', + 'annual_native_representativity_percent', 'annual_native_max_gap_percent', + 'all_representativity_percent', 'all_max_gap_percent'], + } + + return metadata_variables[GHOST_version] + + @staticmethod + def _get_axis_index_(axis): + if axis == 'T': + value = 1 + elif axis == 'X': + value = 0 + else: + raise ValueError("Unknown axis: {0}".format(axis)) + return value + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + return None diff --git a/nes/nc_projections/points_nes_providentia.py b/nes/nc_projections/points_nes_providentia.py new file mode 100644 index 0000000000000000000000000000000000000000..499cd7af261e3fc43250ae905547d731d0b81f7e --- /dev/null +++ b/nes/nc_projections/points_nes_providentia.py @@ -0,0 +1,612 @@ +#!/usr/bin/env python + +import sys +import warnings +import numpy as np +from copy import deepcopy +from numpy.ma.core import MaskError +from netCDF4 import stringtochar +from .points_nes import PointsNes + + +class PointsNesProvidentia(PointsNes): + """ + + Attributes + ---------- + _model_centre_lon : dict + Model centre longitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _model_centre_lat : dict + Model centre latitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _grid_edge_lon : dict + Grid edge longitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + _grid_edge_lat : dict + Grid edge latitudes dictionary with the complete 'data' key for all the values and the rest of the + attributes. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + times=None, model_centre_lon=None, model_centre_lat=None, grid_edge_lon=None, grid_edge_lat=None, + **kwargs): + """ + Initialize the PointsNesProvidentia class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator. + path: str + Path to the NetCDF to initialize the object. + info: bool + Indicates if you want to get reading/writing info. + dataset: Dataset + NetCDF4-python Dataset to initialize the class. + xarray: bool: + (Not working) Indicates if you want to use xarray as default. + parallel_method : str + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + balanced : bool + Indicates if you want a balanced parallelization or not. + Balanced dataset cannot be written in chunking mode. + first_level : int + Index of the first level to use. + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if you want to create the object from scratch (True) or through an existing file. + times : list, None + List of times to substitute the current ones while creation. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + kwargs : + Projection dependent parameters to create it from scratch. + """ + + super(PointsNesProvidentia, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, + create_nes=create_nes, times=times, **kwargs) + + if create_nes: + # Complete dimensions + self._model_centre_lon = model_centre_lon + self._model_centre_lat = model_centre_lat + self._grid_edge_lon = grid_edge_lon + self._grid_edge_lat = grid_edge_lat + else: + # Complete dimensions + self._model_centre_lon = self._get_coordinate_dimension(['model_centre_longitude']) + self._model_centre_lat = self._get_coordinate_dimension(['model_centre_latitude']) + self._grid_edge_lon = self._get_coordinate_dimension(['grid_edge_longitude']) + self._grid_edge_lat = self._get_coordinate_dimension(['grid_edge_latitude']) + + # Dimensions screening + self.model_centre_lon = self._get_coordinate_values(self._model_centre_lon, '') + self.model_centre_lat = self._get_coordinate_values(self._model_centre_lat, '') + self.grid_edge_lon = self._get_coordinate_values(self._grid_edge_lon, '') + self.grid_edge_lat = self._get_coordinate_values(self._grid_edge_lat, '') + + # Set strlen to be None (avoid default strlen inherited from points) + self.strlen = None + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, + parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, + model_centre_lon=None, model_centre_lat=None, grid_edge_lon=None, grid_edge_lat=None): + """ + Initialize the PointsNesProvidentia class. + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator. + path: str + Path to the NetCDF to initialize the object. + info: bool + Indicates if you want to get reading/writing info. + dataset: Dataset + NetCDF4-python Dataset to initialize the class. + xarray: bool: + (Not working) Indicates if you want to use xarray as default. + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default: 'X'. + Accepted values: ['X']. + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if you want to create the object from scratch (True) or through an existing file. + model_centre_lon : dict + Model centre longitudes dictionary with the portion of 'data' corresponding to the rank values. + model_centre_lat : dict + Model centre latitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lon : dict + Grid edge longitudes dictionary with the portion of 'data' corresponding to the rank values. + grid_edge_lat : dict + Grid edge latitudes dictionary with the portion of 'data' corresponding to the rank values. + """ + + new = PointsNesProvidentia(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, + model_centre_lon=model_centre_lon, model_centre_lat=model_centre_lat, + grid_edge_lon=grid_edge_lon, grid_edge_lat=grid_edge_lat) + + return new + + def _create_dimensions(self, netcdf): + """ + Create 'grid_edge', 'model_latitude' and 'model_longitude' dimensions and the super dimensions + ('lev', 'time_nv', 'station', 'spatial_nv', 'strlen'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesProvidentia, self)._create_dimensions(netcdf) + + # Create grid_edge, model_latitude and model_longitude dimensions + netcdf.createDimension('grid_edge', len(self._grid_edge_lon['data'])) + netcdf.createDimension('model_latitude', self._model_centre_lon['data'].shape[0]) + netcdf.createDimension('model_longitude', self._model_centre_lon['data'].shape[1]) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'model_centre_lon', model_centre_lat', 'grid_edge_lon' and 'grid_edge_lat' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesProvidentia, self)._create_dimension_variables(netcdf) + + # MODEL CENTRE LONGITUDES + model_centre_lon = netcdf.createVariable('model_centre_longitude', 'f8', + ('model_latitude', 'model_longitude',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + model_centre_lon.units = 'degrees_east' + model_centre_lon.axis = 'X' + model_centre_lon.long_name = 'model centre longitude' + model_centre_lon.standard_name = 'model centre longitude' + if self.size > 1: + model_centre_lon.set_collective(True) + msg = '2D meshed grid centre longitudes with ' + msg += '{} longitudes in {} bands of latitude'.format(self._model_centre_lon['data'].shape[1], + self._model_centre_lat['data'].shape[0]) + model_centre_lon.description = msg + model_centre_lon[:] = self._model_centre_lon['data'] + + # MODEL CENTRE LATITUDES + model_centre_lat = netcdf.createVariable('model_centre_latitude', 'f8', + ('model_latitude','model_longitude',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + model_centre_lat.units = 'degrees_north' + model_centre_lat.axis = 'Y' + model_centre_lat.long_name = 'model centre latitude' + model_centre_lat.standard_name = 'model centre latitude' + if self.size > 1: + model_centre_lat.set_collective(True) + msg = '2D meshed grid centre longitudes with ' + msg += '{} longitudes in {} bands of latitude'.format(self._model_centre_lon['data'].shape[1], + self._model_centre_lat['data'].shape[0]) + model_centre_lat[:] = self._model_centre_lat['data'] + + # GRID EDGE DOMAIN LONGITUDES + grid_edge_lon = netcdf.createVariable('grid_edge_longitude', 'f8', ('grid_edge')) + grid_edge_lon.units = 'degrees_east' + grid_edge_lon.axis = 'X' + grid_edge_lon.long_name = 'grid edge longitude' + grid_edge_lon.standard_name = 'grid edge longitude' + if self.size > 1: + grid_edge_lon.set_collective(True) + msg = 'Longitude coordinate along edge of grid domain ' + msg += '(going clockwise around grid boundary from bottom-left corner).' + grid_edge_lon.description = msg + grid_edge_lon[:] = self._grid_edge_lon['data'] + + # GRID EDGE DOMAIN LATITUDES + grid_edge_lat = netcdf.createVariable('grid_edge_latitude', 'f8', ('grid_edge')) + grid_edge_lat.units = 'degrees_north' + grid_edge_lat.axis = 'Y' + grid_edge_lat.long_name = 'grid edge latitude' + grid_edge_lat.standard_name = 'grid edge latitude' + if self.size > 1: + grid_edge_lat.set_collective(True) + msg = 'Latitude coordinate along edge of grid domain ' + msg += '(going clockwise around grid boundary from bottom-left corner).' + grid_edge_lat.description = msg + grid_edge_lat[:] = self._grid_edge_lat['data'] + + self.free_vars(('model_centre_longitude', 'model_centre_latitude', 'grid_edge_longitude', 'grid_edge_latitude')) + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion. + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X']. + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank. + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif coordinate_len == 3: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], :] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + elif coordinate_axis == '': + # pass for 'model_centre_lon', 'model_centre_lat', 'grid_edge_lon' and 'grid_edge_lat' + pass + + return values + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read. + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Read data in 1, 2 or 3 dimensions + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif len(var_dims) == 3: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + :] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 3 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset. + chunking : bool + Indicates if you want to chunk the output netCDF. + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + + # Get data type + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dict['data'] = var_dict['data'].astype(str) + var_dtype = var_dict['data'].dtype + else: + var_dtype = var_dict['data'].dtype + + # Get dimensions + if len(var_dict['data'].shape) == 1: + # Metadata + var_dims = self._var_dim + elif len(var_dict['data'].shape) == 2: + # Different from metadata (e.g. concentrations of pm10) + var_dims = self._var_dim + ('time',) + + # Convert list of strings to chars for parallelization + try: + unicode_type = len(max(var_dict['data'], key=len)) + if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, zlib=self.zip_lvl > 0, + complevel=self.zip_lvl, chunksizes=chunk_size) + + if self.info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if len(att_value.shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 2: + if 'strlen' in var_dims: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 3: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + + if self.info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + return None + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + + data_list = deepcopy(self.variables) + for var_name, var_info in data_list.items(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + # concatenate over station + if self.parallel_method == 'X': + if shp_len == 1: + # dimensions = (station) + axis = 0 + elif shp_len == 2: + # dimensions = (station, strlen) or + # dimensions = (station, time) + axis = 0 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + elif self.parallel_method == 'T': + # concatenate over time + if shp_len == 1: + # dimensions = (station) + axis = None + continue + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # dimensions = (station, strlen) + axis = None + continue + else: + # dimensions = (station, time) + axis = 1 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'T'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error has occurred while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file. + + Parameters + ---------- + path : str + Path to the output netCDF file. + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression). + serial : bool + Indicates if you want to write in serial or not. Default: False. + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False. + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False. + """ + + if not serial: + msg = 'WARNING!!! ' + msg += 'Providentia datasets cannot be written in parallel yet. ' + msg += 'Changing to serial mode.' + warnings.warn(msg) + + super(PointsNesProvidentia, self).to_netcdf(path, compression_level=compression_level, + serial=True, info=info, chunking=chunking) + + return None + + @staticmethod + def _get_axis_index_(axis): + if axis == 'T': + value = 1 + elif axis == 'X': + value = 0 + else: + raise ValueError("Unknown axis: {0}".format(axis)) + return value + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + return None diff --git a/nes/nc_projections/rotated_nes.py b/nes/nc_projections/rotated_nes.py index bdeb992ff771233477826c41ac16d8dc0c6112a5..b968dd724f49a5577ca5654a84a1ae1a87c596ba 100644 --- a/nes/nc_projections/rotated_nes.py +++ b/nes/nc_projections/rotated_nes.py @@ -1,8 +1,12 @@ #!/usr/bin/env python import numpy as np +import pandas as pd import math from cfunits import Units +from copy import deepcopy +import geopandas as gpd +from shapely.geometry import Polygon from .default_nes import Nes @@ -21,7 +25,7 @@ class RotatedNes(Nes): Rotated longitudes dictionary with the portion of 'data' corresponding to the rank values. projection_data : dict Dictionary with the projection information. - 'grid_north_pole_latitude' and 'grid_north_pole_longitude' keys + 'grid_north_pole_latitude' and 'grid_north_pole_longitude' keys. _var_dim : tuple Tuple with the name of the Y and X dimensions for the variables. ('rlat', 'rlon') for a rotated projection. @@ -36,28 +40,29 @@ class RotatedNes(Nes): avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, balanced=False, times=None, **kwargs): """ - Initialize the RotatedNes class + Initialize the RotatedNes class. Parameters ---------- comm: MPI.COMM - Path to the CSV file that contains all the information. + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(RotatedNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, balanced=balanced, xarray=xarray, parallel_method=parallel_method, @@ -92,71 +97,92 @@ class RotatedNes(Nes): def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): """ - Initialize the Nes class + Initialize the Nes class. Parameters ---------- comm: MPI.COMM - MPI Communicator + MPI Communicator. path: str - Path to the NetCDF to initialize the object + Path to the NetCDF to initialize the object. info: bool - Indicates if you want to get reading/writing info + Indicates if you want to get reading/writing info. dataset: Dataset - NetCDF4-python Dataset to initialize the class + NetCDF4-python Dataset to initialize the class. xarray: bool: - (Not working) Indicates if you want to use xarray as default + (Not working) Indicates if you want to use xarray as default. avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. parallel_method : str - Indicates the parallelization method that you want. Default over Y axis - accepted values: ['X', 'Y', 'T'] + Indicates the parallelization method that you want. Default: 'Y'. + Accepted values: ['X', 'Y', 'T']. """ + new = RotatedNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) return new + def filter_coordinates_selection(self): + + idx = self.get_idx_intervals() + + self.rlat = self._get_coordinate_values(self._rlat, 'Y') + self.rlon = self._get_coordinate_values(self._rlon, 'X') + + self._rlat['data'] = self._rlat['data'][idx['idx_y_min']:idx['idx_y_max']] + self._rlon['data'] = self._rlon['data'][idx['idx_x_min']:idx['idx_x_max']] + + super(RotatedNes, self).filter_coordinates_selection() + + return None + def get_projection_data(self, create_nes, **kwargs): """ - Read the projection data + Read the projection data. Returns ------- - projection : dict - Dictionary with the projection data + projection_data : dict + Dictionary with the projection data. """ if create_nes: - projection = {'data': None, - 'dimensions': (), - 'grid_mapping_name': 'rotated_latitude_longitude', - 'grid_north_pole_latitude': 90 - kwargs['centre_lat'], - 'grid_north_pole_longitude': -180 + kwargs['centre_lon'], - } + projection_data = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'rotated_latitude_longitude', + 'grid_north_pole_latitude': 90 - kwargs['centre_lat'], + 'grid_north_pole_longitude': -180 + kwargs['centre_lon'], + } else: - projection = self.variables['rotated_pole'] + projection_data = self.variables['rotated_pole'] self.free_vars('rotated_pole') - return projection + return projection_data def _create_dimensions(self, netcdf): """ - Create the 'rlat', 'rlon' dimensions and the super dimensions ('lev', 'time'). + Create 'rlat', 'rlon' and 'spatial_nv' dimensions and the super dimensions ('lev', 'time'). Parameters ---------- netcdf : Dataset NetCDF object. """ + super(RotatedNes, self)._create_dimensions(netcdf) + # Create rlat and rlon dimensions netcdf.createDimension('rlon', len(self._rlon['data'])) netcdf.createDimension('rlat', len(self._rlat['data'])) + # Create spatial_nv (number of vertices) dimension + if (self._lat_bnds is not None) and (self._lon_bnds is not None): + netcdf.createDimension('spatial_nv', 4) + return None def _create_dimension_variables(self, netcdf): @@ -209,14 +235,14 @@ class RotatedNes(Nes): # Calculate rotated latitudes self.n_lat = int((abs(kwargs['south_boundary']) / kwargs['inc_rlat']) * 2 + 1) - self.rotated_lats = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + + self.rotated_lat = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + (kwargs['inc_rlat'] * (self.n_lat - 1)), self.n_lat) # Calculate rotated longitudes self.n_lon = int((abs(kwargs['west_boundary']) / kwargs['inc_rlon']) * 2 + 1) - self.rotated_lons = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + + self.rotated_lon = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + (kwargs['inc_rlon'] * (self.n_lon - 1)), self.n_lon) - return {'data': self.rotated_lats}, {'data': self.rotated_lons} + return {'data': self.rotated_lat}, {'data': self.rotated_lon} def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180, **kwargs): """ @@ -228,19 +254,29 @@ class RotatedNes(Nes): :param lat_deg: Rotated latitude coordinate. :type lat_deg: numpy.array - :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360) + :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360). :type lon_min: float - :return: Unrotated coordinates. Longitudes, Latitudes + :return: Unrotated coordinates. Longitudes, Latitudes. :rtype: tuple(numpy.array, numpy.array) """ + if 'centre_lat' in kwargs: + centre_lat = kwargs['centre_lat'] + else: + centre_lat = 90 - float(self.projection_data['grid_north_pole_latitude']) + + if 'centre_lon' in kwargs: + centre_lon = kwargs['centre_lon'] + else: + centre_lon = float(self.projection_data['grid_north_pole_longitude']) + 180 + degrees_to_radians = math.pi / 180. - tph0 = kwargs['centre_lat'] * degrees_to_radians + tph0 = centre_lat * degrees_to_radians tlm = lon_deg * degrees_to_radians tph = lat_deg * degrees_to_radians - tlm0d = -180 + kwargs['centre_lon'] + tlm0d = -180 + centre_lon ctph0 = np.cos(tph0) stph0 = np.sin(tph0) @@ -249,14 +285,14 @@ class RotatedNes(Nes): stph = np.sin(tph) ctph = np.cos(tph) - # Latitudes + # LATITUDES sph = (ctph0 * stph) + (stph0 * ctph * ctlm) sph[sph > 1.] = 1. sph[sph < -1.] = -1. aph = np.arcsin(sph) aphd = aph / degrees_to_radians - # Longitudes + # LONGITUDES anum = ctph * stlm denom = (ctlm * ctph - stph0 * sph) / ctph0 relm = np.arctan2(anum, denom) - math.pi @@ -266,25 +302,124 @@ class RotatedNes(Nes): return almd, aphd - def _create_centroids(self, **kwargs): + def _create_centre_coordinates(self, **kwargs): """ - Calculate center latitudes and longitudes from grid details. + Calculate centre latitudes and longitudes from grid details. - Parameters + Returns ---------- - netcdf : Dataset - NetCDF object. + centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). """ # Complete dimensions self._rlat, self._rlon = self._create_rotated_coordinates(**kwargs) - # Calculate center latitudes and longitudes (1D to 2D) - self.center_lons, self.center_lats = self.rotated2latlon(np.array([self.rotated_lons] * len(self.rotated_lats)), - np.array([self.rotated_lats] * len(self.rotated_lons)).T, - **kwargs) + # Calculate centre latitudes and longitudes (1D to 2D) + centre_lon_data, centre_lat_data = self.rotated2latlon(np.array([self.rotated_lon] * len(self.rotated_lat)), + np.array([self.rotated_lat] * len(self.rotated_lon)).T, + **kwargs) + centre_lon = {'data': centre_lon_data} + centre_lat = {'data': centre_lat_data} + + return centre_lat, centre_lon + + def create_providentia_exp_centre_coordinates(self): + """ + Calculate centre latitudes and longitudes from original coordinates and store as 2D arrays. + + Returns + ---------- + model_centre_lat : dict + Dictionary with data of centre coordinates for latitude in 2D (latitude, longitude). + model_centre_lon : dict + Dictionary with data of centre coordinates for longitude in 2D (latitude, longitude). + """ + + # Get centre latitudes + model_centre_lat = self.lat + + # Get centre longitudes + model_centre_lon = self.lon + + return model_centre_lat, model_centre_lon + + def create_providentia_exp_grid_edge_coordinates(self): + """ + Calculate grid edge latitudes and longitudes and get model grid outline. + + Returns + ---------- + grid_edge_lat : dict + Dictionary with data of grid edge latitudes. + grid_edge_lon : dict + Dictionary with data of grid edge longitudes. + """ + + # Get grid resolution + inc_rlon = np.abs(np.mean(np.diff(self.rlon['data']))) + inc_rlat = np.abs(np.mean(np.diff(self.rlat['data']))) + + # Get bounds for rotated coordinates + rlat_bounds = self.create_single_spatial_bounds(self.rlat['data'], inc_rlat) + rlon_bounds = self.create_single_spatial_bounds(self.rlon['data'], inc_rlon) + + # Get rotated latitudes for grid edge + left_edge_rlat = np.append(rlat_bounds.flatten()[::2], rlat_bounds.flatten()[-1]) + right_edge_rlat = np.flip(left_edge_rlat, 0) + top_edge_rlat = np.repeat(rlat_bounds[-1][-1], len(self.rlon['data']) - 1) + bottom_edge_rlat = np.repeat(rlat_bounds[0][0], len(self.rlon['data'])) + rlat_grid_edge = np.concatenate((left_edge_rlat, top_edge_rlat, right_edge_rlat, bottom_edge_rlat)) + + # Get rotated longitudes for grid edge + left_edge_rlon = np.repeat(rlon_bounds[0][0], len(self.rlat['data']) + 1) + top_edge_rlon = rlon_bounds.flatten()[1:-1:2] + right_edge_rlon = np.repeat(rlon_bounds[-1][-1], len(self.rlat['data']) + 1) + bottom_edge_rlon = np.flip(rlon_bounds.flatten()[:-1:2], 0) + rlon_grid_edge = np.concatenate((left_edge_rlon, top_edge_rlon, right_edge_rlon, bottom_edge_rlon)) + + # Get edges for regular coordinates + grid_edge_lon_data, grid_edge_lat_data = self.rotated2latlon(rlon_grid_edge, rlat_grid_edge) + + # Create grid outline by stacking the edges in both coordinates + model_grid_outline = np.vstack((grid_edge_lon_data, grid_edge_lat_data)).T + + grid_edge_lat = {'data': model_grid_outline[:,1]} + grid_edge_lon = {'data': model_grid_outline[:,0]} + + return grid_edge_lat, grid_edge_lon + + def create_spatial_bounds(self): + """ + Calculate longitude and latitude bounds and set them. + """ + + # Calculate rotated coordinates bounds + inc_rlat = np.abs(np.mean(np.diff(self._rlat['data']))) + rlat_bnds = self.create_single_spatial_bounds(np.array([self._rlat['data']] * len(self._rlon['data'])).T, + inc_rlat, spatial_nv=4, inverse=True) + + inc_rlon = np.abs(np.mean(np.diff(self._rlon['data']))) + rlon_bnds = self.create_single_spatial_bounds(np.array([self._rlon['data']] * len(self._rlat['data'])), + inc_rlon, spatial_nv=4) + + # Transform rotated bounds to regular bounds + lon_bnds, lat_bnds = self.rotated2latlon(rlon_bnds, rlat_bnds) + + # Obtain regular coordinates bounds + self._lat_bnds = deepcopy(lat_bnds) + self.lat_bnds = lat_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] + + self._lon_bnds = deepcopy(lon_bnds) + self.lon_bnds = lon_bnds[self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + :] - return {'data': self.center_lats}, {'data': self.center_lons} + return None @staticmethod def _set_var_crs(var): @@ -296,7 +431,9 @@ class RotatedNes(Nes): var : Variable netCDF4-python variable object. """ + var.grid_mapping = 'rotated_pole' + var.coordinates = "lat lon" return None @@ -307,7 +444,7 @@ class RotatedNes(Nes): Parameters ---------- netcdf : Dataset - netcdf4-python Dataset + netcdf4-python Dataset. """ mapping = netcdf.createVariable('rotated_pole', 'c') @@ -317,7 +454,7 @@ class RotatedNes(Nes): return None - def to_grib2(self, path, grib_keys, grib_template_path, info=False): + def to_grib2(self, path, grib_keys, grib_template_path, lat_flip=False, info=False): """ Write output file with grib2 format. @@ -326,10 +463,42 @@ class RotatedNes(Nes): path : str Path to the output file. grib_keys : dict - Dictionary with the grib2 keys + Dictionary with the grib2 keys. grib_template_path : str - Path to the grib2 file to use as template + Path to the grib2 file to use as template. info : bool Indicates if you want to print extra information during the process. """ - raise NotImplementedError("Grib2 format cannot be write in a Rotated pole projection.") + + raise NotImplementedError("Grib2 format cannot be written in a Rotated pole projection.") + + def create_shapefile(self): + """ + Create spatial geodataframe (shapefile). + """ + + if self._lat_bnds is None or self._lon_bnds is None: + self.create_spatial_bounds() + + # Reshape arrays to create geometry + aux_b_lats = self.lat_bnds.reshape((self.lat_bnds.shape[0] * self.lat_bnds.shape[1], self.lat_bnds.shape[2])) + aux_b_lons = self.lon_bnds.reshape((self.lon_bnds.shape[0] * self.lon_bnds.shape[1], self.lon_bnds.shape[2])) + + # Create dataframe cointaining all polygons + geometry = [] + for i in range(aux_b_lons.shape[0]): + geometry.append(Polygon([(aux_b_lons[i, 0], aux_b_lats[i, 0]), + (aux_b_lons[i, 1], aux_b_lats[i, 1]), + (aux_b_lons[i, 2], aux_b_lats[i, 2]), + (aux_b_lons[i, 3], aux_b_lats[i, 3]), + (aux_b_lons[i, 0], aux_b_lats[i, 0])])) + fids = np.arange(self._lat['data'].shape[0] * self._lat['data'].shape[1]) + fids = fids.reshape((self._lat['data'].shape[0],self._lat['data'].shape[1])) + fids = fids[self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + gdf = gpd.GeoDataFrame(index=pd.Index(name='FID', data=fids.ravel()), + geometry=geometry, + crs="EPSG:4326") + self.shapefile = gdf + + return gdf diff --git a/nes/nes_formats/__init__.py b/nes/nes_formats/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0b0a484ef1cfd82c0e60fb2488a621a42b5ce479 --- /dev/null +++ b/nes/nes_formats/__init__.py @@ -0,0 +1 @@ +from .cams_ra_format import to_netcdf_cams_ra diff --git a/nes/nes_formats/cams_ra_format.py b/nes/nes_formats/cams_ra_format.py new file mode 100644 index 0000000000000000000000000000000000000000..86bd49db0ad470b34176f9ee96135d506cb139db --- /dev/null +++ b/nes/nes_formats/cams_ra_format.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python + +import sys +import warnings +import numpy as np +import os +import nes +from netCDF4 import Dataset +from mpi4py import MPI +from copy import copy + + +def to_netcdf_cams_ra(self, path): + """ + Horizontal interpolation from one grid to another one. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object. + path : str + Path to the output netCDF file. + """ + if not isinstance(self, nes.LatLonNes): + raise TypeError("CAMS Re-Analysis format must have Regular Lat-Lon projection") + if '' not in path: + raise ValueError("AMS Re-Analysis path must contain '' as pattern; current: '{0}'".format(path)) + orig_path = copy(path) + for i_lev, level in enumerate(self.lev['data']): + path = orig_path.replace('', 'l{0}'.format(i_lev)) + # Open NetCDF + if self.info: + print("Rank {0:03d}: Creating {1}".format(self.rank, path)) + if self.size > 1: + netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=True, comm=self.comm, info=MPI.Info()) + else: + netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=False) + if self.info: + print("Rank {0:03d}: NetCDF ready to write".format(self.rank)) + self.to_dtype(data_type=np.float32) + + # Create dimensions + create_dimensions(self, netcdf) + + # Create variables + create_variables(self, netcdf, i_lev) + + # Create dimension variables + create_dimension_variables(self, netcdf) + if self.info: + print("Rank {0:03d}: Dimensions done".format(self.rank)) + + # Close NetCDF + if self.global_attrs is not None: + for att_name, att_value in self.global_attrs.items(): + netcdf.setncattr(att_name, att_value) + + netcdf.close() + + return None + + +def create_dimensions(self, netcdf): + """ + Create 'time', 'time_bnds', 'lev', 'lon' and 'lat' dimensions. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object. + netcdf : Dataset + netcdf4-python opened Dataset. + """ + + # Create time dimension + netcdf.createDimension('time', None) + + # Create lev, lon and lat dimensions + netcdf.createDimension('lat', len(self._lat['data'])) + netcdf.createDimension('lon', len(self._lon['data'])) + + return None + + +def create_dimension_variables(self, netcdf): + """ + Create the 'time', 'time_bnds', 'lev', 'lat', 'lat_bnds', 'lon' and 'lon_bnds' variables. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object. + netcdf : Dataset + netcdf4-python opened Dataset. + """ + # LATITUDES + lat = netcdf.createVariable('lat', np.float64, ('lat',)) + lat.standard_name = 'latitude' + lat.long_name = 'latitude' + lat.units = 'degrees_north' + lat.axis = 'Y' + + if self.size > 1: + lat.set_collective(True) + lat[:] = self._lat['data'] + + # LONGITUDES + lon = netcdf.createVariable('lon', np.float64, ('lon',)) + lon.long_name = 'longitude' + lon.standard_name = 'longitude' + lon.units = 'degrees_east' + lon.axis = 'X' + if self.size > 1: + lon.set_collective(True) + lon[:] = self._lon['data'] + + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',)) + time_var.standard_name = 'time' + time_var.units = 'day as %Y%m%d.%f' + time_var.calendar = 'proleptic_gregorian' + time_var.axis = 'T' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + return None + + +def create_variables(self, netcdf, i_lev): + """ + Create the netCDF file variables. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object. + netcdf : Dataset + netcdf4-python opened Dataset. + + """ + + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + if self.info: + print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) + try: + var = netcdf.createVariable(var_name, np.float32, ('time', 'lat', 'lon',), + zlib=True, complevel=7, least_significant_digit=3) + + if self.info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + if self.info: + print("Rank {0:03d}: Filling {1})".format(self.rank, var_name)) + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = var_dict['data'][:, i_lev, :, :] + + if self.info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + var.long_name = var_dict['long_name'] + var.units = var_dict['units'] + var.number_of_significant_digits = np.int32(3) + + if self.info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error has occurredred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + return None + + +def date2num(time_array, time_units=None, time_calendar=None): + time_res = [] + for aux_time in time_array: + time_res.append(float(aux_time.strftime("%Y%m%d")) + (float(aux_time.strftime("%H")) / 24)) + time_res = np.array(time_res, dtype=np.float64) + return time_res diff --git a/tests/1-nes_tests_by_size.py b/tests/1-test_read_write_size.py similarity index 100% rename from tests/1-nes_tests_by_size.py rename to tests/1-test_read_write_size.py diff --git a/tests/2-nes_tests_by_projection.py b/tests/2-test_read_write_projection.py similarity index 97% rename from tests/2-nes_tests_by_projection.py rename to tests/2-test_read_write_projection.py index 93b4b4734b92f93f74313317e6f12c12aff0613d..5f4590c1482f5e2690643caeaaf765ab9db2cbb2 100644 --- a/tests/2-nes_tests_by_projection.py +++ b/tests/2-test_read_write_projection.py @@ -20,12 +20,12 @@ paths = {'regular_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/or 'points_ghost_file': {'path': '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc', 'projection': 'points_ghost', 'variables': [], # all - 'parallel_methods': ['X']}, + 'parallel_methods': ['X', 'T']}, 'lcc_file': {'path': '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc', 'projection': 'lcc', 'variables': [], # all 'parallel_methods': ['X', 'Y', 'T']}, - 'mercator_file': {'path': '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/mercator_grid_example.nc', + 'mercator_file': {'path': '/esarchive/scratch/avilanova/software/NES/tutorials/data/mercator_grid_example.nc', 'projection': 'mercator', 'variables': [], # all 'parallel_methods': ['X', 'Y', 'T']} diff --git a/tests/2-test_read_write_projection_nord3v2.bash b/tests/2-test_read_write_projection_nord3v2.bash new file mode 100644 index 0000000000000000000000000000000000000000..4f7eac151bfa80b6a16328289db257bb4d9952d4 --- /dev/null +++ b/tests/2-test_read_write_projection_nord3v2.bash @@ -0,0 +1,24 @@ +#!/bin/bash + +#EXPORTPATH="/esarchive/scratch/avilanova/software/NES" +EXPORTPATH="/gpfs/projects/bsc32/models/NES" +SRCPATH="/gpfs/projects/bsc32/models/NES/tests" +EXE="2-test_read_write_projection.py" + +module purge +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load geopandas/0.8.1-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 + +for nprocs in 1 2 4 8 16 32 +do + JOB_ID=`sbatch --ntasks=${nprocs} --exclusive --job-name=nes_${nprocs} --output=./log_nord3v2_NES_${nprocs}_%J.out --error=./log_nord3v2_NES_${nprocs}_%J.err -D . --time=02:00:00 --wrap="export PYTHONPATH=${EXPORTPATH}:${PYTHONPATH}; cd ${SRCPATH}; mpirun --mca mpi_warn_on_fork 0 -np ${nprocs} python ${SRCPATH}/${EXE}"` +done \ No newline at end of file diff --git a/tests/3-test_spatial_join.py b/tests/3-test_spatial_join.py new file mode 100644 index 0000000000000000000000000000000000000000..e456c64ee2cd1ea24e63e864f851678d37cc5936 --- /dev/null +++ b/tests/3-test_spatial_join.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python +import geopandas as gpd +import pandas as pd +import timeit +import sys +from mpi4py import MPI +from nes import * + +# Hide warning +pd.options.mode.chained_assignment = None + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +size = comm.Get_size() + +results = [] +for method in ['spatial_overlay', 'spatial_join']: + for projection in ['regular', 'rotated']: + for projection_type in ['created', 'read']: + + # Regular projection + if projection == 'regular': + # Create dataset and get shapefile + if projection_type == 'created': + lat_orig = 41.1 + lon_orig = 1.8 + inc_lat = 0.2 + inc_lon = 0.2 + n_lat = 100 + n_lon = 100 + coordinates = create_nes(comm=None, info=True, + projection='regular', + lat_orig=lat_orig, + lon_orig=lon_orig, + inc_lat=inc_lat, + inc_lon=inc_lon, + n_lat=n_lat, + n_lon=n_lon) + coordinates.create_shapefile() + + # Open dataset and get shapefile + elif projection_type == 'read': + coordinates_path = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc' + coordinates = open_netcdf(path=coordinates_path, info=True) + coordinates.create_shapefile() + coordinates.keep_vars(['O3']) + coordinates.load() + coordinates.shapefile['O3'] = coordinates.variables['O3']['data'][-1, -1, :].ravel() + + # Rotated projection + elif projection == 'rotated': + # Create dataset and get shapefile + if projection_type == 'created': + centre_lat = 51 + centre_lon = 10 + west_boundary = -35 + south_boundary = -27 + inc_rlat = 0.2 + inc_rlon = 0.2 + coordinates = create_nes(comm=None, info=True, + projection='rotated', + centre_lat=centre_lat, + centre_lon=centre_lon, + west_boundary=west_boundary, + south_boundary=south_boundary, + inc_rlat=inc_rlat, + inc_rlon=inc_rlon) + coordinates.create_shapefile() + + # Open dataset and get shapefile + elif projection_type == 'read': + coordinates_path = '/gpfs/scratch/bsc32/bsc32538/original_files/CAMS_MONARCH_d01_2022070412.nc' + coordinates = open_netcdf(path=coordinates_path, info=True) + coordinates.create_shapefile() + coordinates.keep_vars(['O3']) + coordinates.load() + coordinates.shapefile['O3'] = coordinates.variables['O3']['data'][-1, -1, :].ravel() + + coordinates.write_shapefile('coordinates_{0}_{1}'.format(projection, + projection_type)) + + mask_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/timezones_2021c/timezones_2021c.shp' + mask = gpd.read_file(mask_path) + + # Spatial overlay (old method) + if method == 'spatial_overlay': + start_time = timeit.default_timer() + intersection = coordinates.shapefile.copy() + #intersection['area'] = intersection.geometry.area + intersection = coordinates.spatial_overlays(intersection, mask) + #intersection.rename(columns={'idx1': 'FID', 'idx2': 'shp_id'}, inplace=True) + #intersection['fraction'] = intersection.geometry.area / intersection['area'] + #intersection.sort_values('fraction', ascending=False, inplace=True) + #intersection = intersection.drop_duplicates(subset='FID', keep="first") + #intersection.set_index('FID', inplace=True) + #coordinates.loc[intersection.index, coordinates.shp_colname] = intersection[coordinates.shp_colname] + time = timeit.default_timer() - start_time + + # Spatial join (new method) + elif method == 'spatial_join': + start_time = timeit.default_timer() + coordinates.spatial_join(mask, method='intersection') + time = timeit.default_timer() - start_time + + coordinates.write_shapefile('masked_coordinates_{0}_{1}_{2}'.format(projection, + projection_type, + method)) + + results = [] + results.append({'Projection': projection, + 'Projection type': projection_type, + 'Method': '{min:02d}:{sec:02.3f}'.format( + min=int(time // 60), sec=time - (int(time // 60) * 60)) + }) + + comm.Barrier() + +comm.Barrier() + +if rank == 0: + table = pd.DataFrame(results) + print('RESULTS TABLE') + print(table) + sys.stdout.flush() diff --git a/tests/3-test_spatial_join_nord3v2.bash b/tests/3-test_spatial_join_nord3v2.bash new file mode 100644 index 0000000000000000000000000000000000000000..079aa7928696599a280f2c555704dbd543b65385 --- /dev/null +++ b/tests/3-test_spatial_join_nord3v2.bash @@ -0,0 +1,23 @@ +#!/bin/bash + +EXPORTPATH="/esarchive/scratch/avilanova/software/NES" +SRCPATH="/esarchive/scratch/avilanova/software/NES/tests" +EXE="3-test_spatial_join.py" + +module purge +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load geopandas/0.8.1-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 + +for nprocs in 1 +do + JOB_ID=`sbatch --ntasks=${nprocs} --exclusive --job-name=nes_${nprocs} --output=./log_nord3v2_NES_${nprocs}_%J.out --error=./log_nord3v2_NES_${nprocs}_%J.err -D . --time=02:00:00 --wrap="export PYTHONPATH=${EXPORTPATH}:${PYTHONPATH}; cd ${SRCPATH}; mpirun --mca mpi_warn_on_fork 0 -np ${nprocs} python ${SRCPATH}/${EXE}"` +done \ No newline at end of file diff --git a/tests/4-test_bounds.py b/tests/4-test_bounds.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d3063fddd238f0dc601a6a1c2837e1bcd98e94 --- /dev/null +++ b/tests/4-test_bounds.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +import sys +import timeit +import pandas as pd +from mpi4py import MPI +from nes import * + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() + +for projection_type in ['read', 'created']: + + # Open dataset + if projection_type == 'read': + test_path = "/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc" + nessy = open_netcdf(path=test_path, info=True) + + # Create dataset + elif projection_type == 'created': + lat_orig = 41.1 + lon_orig = 1.8 + inc_lat = 0.2 + inc_lon = 0.2 + n_lat = 100 + n_lon = 100 + nessy = create_nes(comm=None, info=True, projection='regular', + lat_orig=lat_orig, lon_orig=lon_orig, + inc_lat=inc_lat, inc_lon=inc_lon, + n_lat=n_lat, n_lon=n_lon) + + # Add bounds + nessy.create_spatial_bounds() + + print('NES', projection_type, '-', 'Rank', rank, '-', nessy) + print('NES', projection_type, '-', 'Rank', rank, '-', 'Lat bounds', + nessy.lat_bnds) + print('NES', projection_type, '-', 'Rank', rank, '-', 'Lon bounds', + nessy.lon_bnds) + + comm.Barrier() + sys.stdout.flush() \ No newline at end of file diff --git a/tests/scalability_test_nord3v2.bash b/tests/4-test_bounds_nord3v2.bash similarity index 81% rename from tests/scalability_test_nord3v2.bash rename to tests/4-test_bounds_nord3v2.bash index 7e398100060d08b481af1de54d51ae3525350023..fc6e06b0824d8682c80372867da0d429df1a80b6 100644 --- a/tests/scalability_test_nord3v2.bash +++ b/tests/4-test_bounds_nord3v2.bash @@ -1,9 +1,8 @@ #!/bin/bash -#EXPORTPATH="/esarchive/scratch/avilanova/software/NES" -EXPORTPATH="/gpfs/projects/bsc32/models/NES" +EXPORTPATH="/esarchive/scratch/avilanova/software/NES" SRCPATH="/esarchive/scratch/avilanova/software/NES/tests" -EXE="2-nes_tests_by_projection.py" +EXE="4-test_bounds.py" module purge module load Python/3.7.4-GCCcore-8.3.0 @@ -15,9 +14,10 @@ module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 module load filelock/3.7.1-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load geopandas/0.8.1-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 - -for nprocs in 1 2 4 8 +for nprocs in 1 2 do JOB_ID=`sbatch --ntasks=${nprocs} --exclusive --job-name=nes_${nprocs} --output=./log_nord3v2_NES_${nprocs}_%J.out --error=./log_nord3v2_NES_${nprocs}_%J.err -D . --time=02:00:00 --wrap="export PYTHONPATH=${EXPORTPATH}:${PYTHONPATH}; cd ${SRCPATH}; mpirun --mca mpi_warn_on_fork 0 -np ${nprocs} python ${SRCPATH}/${EXE}"` done \ No newline at end of file diff --git a/tests/test_bash_mn4.cmd b/tests/test_bash_mn4.cmd index f3d2da7e3665ebc2cead9c78ff006bc10ed11701..db2758fcffd54ce03e81b5a2b4e4d8af072ea07e 100644 --- a/tests/test_bash_mn4.cmd +++ b/tests/test_bash_mn4.cmd @@ -23,9 +23,10 @@ module load OpenMPI/4.0.5-GCC-8.3.0-mn4 module load filelock/3.7.1-foss-2019b-Python-3.7.4 module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 - +module load geopandas/0.8.1-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} -cd /gpfs/scratch/bsc32/bsc32538/NES_tests/NES/tests +cd /gpfs/projects/bsc32/models/NES/tests mpirun --mca mpi_warn_on_fork 0 -np 4 python basic_nes_tests.py diff --git a/tests/test_bash_nord3v2.cmd b/tests/test_bash_nord3v2.cmd index 8dd85bb8db5201407c6decd780c6644dce6fa3dc..da0f4d7bf1829f732d82e21237872333048060c8 100644 --- a/tests/test_bash_nord3v2.cmd +++ b/tests/test_bash_nord3v2.cmd @@ -23,9 +23,10 @@ module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 module load filelock/3.7.1-foss-2019b-Python-3.7.4 module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 +module load geopandas/0.8.1-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} - -cd /esarchive/scratch/avilanova/software/NES/tests +cd /gpfs/projects/bsc32/models/NES/tests mpirun --mca mpi_warn_on_fork 0 -np 4 python 2-nes_tests_by_projection.py diff --git a/Jupyter_notebooks/1.1-regular_grids.ipynb b/tutorials/1.Introduction/1.1.Read_Write_Regular.ipynb similarity index 75% rename from Jupyter_notebooks/1.1-regular_grids.ipynb rename to tutorials/1.Introduction/1.1.Read_Write_Regular.ipynb index c6733d6d81818c4b45234704f8b4fd7b1a539eb5..90d1d0cb6f733e4624976371a0694ab5918bb2e2 100644 --- a/Jupyter_notebooks/1.1-regular_grids.ipynb +++ b/tutorials/1.Introduction/1.1.Read_Write_Regular.ipynb @@ -27,16 +27,6 @@ "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc'" ] }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# ERROR when files have lat_bnds, lon_bnds \n", - "#nc_path_1 = '/esarchive/exp/ecearth/a2vx/original_files/cmorfiles-fixed/CMIP/EC-Earth-Consortium/EC-Earth3-AerChem/historical/r4i1p1f1/Amon/ch4/gn/v20200609/ch4_Amon_EC-Earth3-AerChem_historical_r4i1p1f1_gn_185001-185012.nc'" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1050,16 +1040,16 @@ " Domain: Global\n", " Conventions: None\n", " history: MONARCHv1.0 netcdf file.\n", - " comment: Generated on marenostrum4
  • Domain :
    Global
    Conventions :
    None
    history :
    MONARCHv1.0 netcdf file.
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", @@ -1271,7 +1261,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 5, @@ -1895,7 +1885,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -1908,6 +1898,13 @@ "nessy_2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, { "cell_type": "code", "execution_count": 15, @@ -2281,14 +2278,14 @@ " Domain: Global\n", " Conventions: CF-1.7\n", " history: MONARCHv1.0 netcdf file.\n", - " comment: Generated on marenostrum4
    • O3
      (time, lev, lat, lon)
      float32
      ...
      long_name :
      O3
      units :
      unknown
      standard_name :
      O3
      grid_mapping :
      crs
      [20095344 values with dtype=float32]
    • crs
      ()
      |S1
      ...
      grid_mapping_name :
      latitude_longitude
      semi_major_axis :
      6371000.0
      inverse_flattening :
      0
      array(b'', dtype='|S1')
  • Domain :
    Global
    Conventions :
    CF-1.7
    history :
    MONARCHv1.0 netcdf file.
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", diff --git a/Jupyter_notebooks/1.2-rotated_grids.ipynb b/tutorials/1.Introduction/1.2.Read_Write_Rotated.ipynb similarity index 87% rename from Jupyter_notebooks/1.2-rotated_grids.ipynb rename to tutorials/1.Introduction/1.2.Read_Write_Rotated.ipynb index 4cff1df6df2c0bf97ae9c90b55077140afc990b6..87ab4d22710cc11a032e21aefab605ef8358d692 100644 --- a/Jupyter_notebooks/1.2-rotated_grids.ipynb +++ b/tutorials/1.Introduction/1.2.Read_Write_Rotated.ipynb @@ -415,14 +415,14 @@ " rotated_pole |S1 b''\n", "Attributes:\n", " Conventions: CF-1.7\n", - " comment: Generated on marenostrum4
    • time_bnds
      (time, nv)
      datetime64[ns]
      ...
      array([['2021-08-03T00:00:00.000000000', '2021-08-07T00:00:00.000000000']],\n",
      +       "      dtype='datetime64[ns]')
    • O3_all
      (time, lev, rlat, rlon)
      float32
      ...
      units :
      kg/m3
      long_name :
      TRACERS_044
      cell_methods :
      time: maximum (interval: 1hr)
      grid_mapping :
      rotated_pole
      [2282904 values with dtype=float32]
    • rotated_pole
      ()
      |S1
      ...
      grid_mapping_name :
      rotated_latitude_longitude
      grid_north_pole_latitude :
      39.0
      grid_north_pole_longitude :
      -170.0
      array(b'', dtype='|S1')
  • Conventions :
    CF-1.7
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", @@ -483,7 +483,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -810,7 +810,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 12, @@ -823,6 +823,13 @@ "nessy_2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, { "cell_type": "code", "execution_count": 13, @@ -1198,14 +1205,14 @@ " rotated_pole |S1 b''\n", "Attributes:\n", " Conventions: CF-1.7\n", - " comment: Generated on marenostrum4
    • time_bnds
      (time, time_nv)
      datetime64[ns]
      ...
      array([['2021-08-03T00:00:00.000000000', '2021-08-07T00:00:00.000000000']],\n",
      +       "      dtype='datetime64[ns]')
    • O3_all
      (time, lev, rlat, rlon)
      float32
      ...
      units :
      kg/m3
      long_name :
      TRACERS_044
      cell_methods :
      time: maximum (interval: 1hr)
      grid_mapping :
      rotated_pole
      [2282904 values with dtype=float32]
    • rotated_pole
      ()
      |S1
      ...
      grid_mapping_name :
      rotated_latitude_longitude
      grid_north_pole_latitude :
      39.0
      grid_north_pole_longitude :
      -170.0
      array(b'', dtype='|S1')
  • Conventions :
    CF-1.7
    comment :
    Generated on marenostrum4
  • " ], "text/plain": [ "\n", diff --git a/Jupyter_notebooks/1.3-points_grids.ipynb b/tutorials/1.Introduction/1.3.Read_Write_Points.ipynb similarity index 74% rename from Jupyter_notebooks/1.3-points_grids.ipynb rename to tutorials/1.Introduction/1.3.Read_Write_Points.ipynb index 966ab17efc2bc372b374a42703695727d852c786..582007ae500a62734a4f35e80ca893c7864f68ff 100644 --- a/Jupyter_notebooks/1.3-points_grids.ipynb +++ b/tutorials/1.Introduction/1.3.Read_Write_Points.ipynb @@ -418,7 +418,7 @@ " station_end_date (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n", " station_rural_back (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n", " latitude (station) float32 46.81 47.48 ... 53.33 38.88\n", - " station_ozone_classification (station) |S75 b'rural' b'rural' ... b'nan'
  • " ], "text/plain": [ "\n", @@ -710,7 +710,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -805,8 +805,7 @@ " 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,\n", " 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,\n", " 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,\n", - " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83]),\n", - " 'units': ''}" + " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83])}" ] }, "execution_count": 7, @@ -1409,7 +1408,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -1789,26 +1788,27 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:                       (time: 31, station: 84)\n",
    +       "Dimensions:                       (time: 31, station: 84, strlen: 75)\n",
            "Coordinates:\n",
            "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
            "  * station                       (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n",
    +       "Dimensions without coordinates: strlen\n",
            "Data variables: (12/19)\n",
    -       "    station_start_date            (station) |S75 b'1980-01-01' ... b'nan'\n",
    -       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    country_code                  (station) |S75 b'CH' b'CH' ... b'NL' b'IT'\n",
    -       "    ccaa                          (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_name                  (station) |S75 b'payerne' ... b'lamezia terme'\n",
    -       "    ...                            ...\n",
    -       "    station_code                  (station) |S75 b'CH0002R' ... b'IT0016R'\n",
    -       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
    -       "    station_ozone_classification  (station) |S75 b'rural' b'rural' ... b'nan'\n",
            "    lat                           (station) float64 46.81 47.48 ... 53.33 38.88\n",
            "    lon                           (station) float64 6.944 8.905 ... 6.277 16.23\n",
    +       "    station_start_date            (station, strlen) object '1' '9' '8' ... '' ''\n",
    +       "    station_zone                  (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    street_type                   (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    country_code                  (station, strlen) object 'C' 'H' '' ... '' ''\n",
    +       "    ...                            ...\n",
    +       "    country                       (station, strlen) object 's' 'w' 'i' ... '' ''\n",
    +       "    altitude                      (station) float32 489.0 538.0 ... 1.0 6.0\n",
    +       "    station_code                  (station, strlen) object 'C' 'H' '0' ... '' ''\n",
    +       "    station_end_date              (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    station_rural_back            (station, strlen) object 'n' 'a' 'n' ... '' ''\n",
    +       "    station_ozone_classification  (station, strlen) object 'r' 'u' 'r' ... '' ''\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 31, station: 84)\n", + "Dimensions: (time: 31, station: 84, strlen: 75)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", " * station (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n", + "Dimensions without coordinates: strlen\n", "Data variables: (12/19)\n", - " station_start_date (station) |S75 ...\n", - " station_zone (station) |S75 ...\n", - " street_type (station) |S75 ...\n", - " country_code (station) |S75 ...\n", - " ccaa (station) |S75 ...\n", - " station_name (station) |S75 ...\n", - " ... ...\n", - " station_code (station) |S75 ...\n", - " station_end_date (station) |S75 ...\n", - " station_rural_back (station) |S75 ...\n", - " station_ozone_classification (station) |S75 ...\n", " lat (station) float64 ...\n", " lon (station) float64 ...\n", + " station_start_date (station, strlen) object ...\n", + " station_zone (station, strlen) object ...\n", + " street_type (station, strlen) object ...\n", + " country_code (station, strlen) object ...\n", + " ... ...\n", + " country (station, strlen) object ...\n", + " altitude (station) float32 ...\n", + " station_code (station, strlen) object ...\n", + " station_end_date (station, strlen) object ...\n", + " station_rural_back (station, strlen) object ...\n", + " station_ozone_classification (station, strlen) object ...\n", "Attributes:\n", " Conventions: CF-1.7" ] @@ -2501,7 +2411,7 @@ " source: Surface observations\n", " creator_name: Dene R. Bowdalo\n", " creator_email: dene.bowdalo@bsc.es\n", - " version: 1.4
  • title :
    Surface sulphate data in the EANET network in 2019-11.
    institution :
    Barcelona Supercomputing Center
    source :
    Surface observations
    creator_name :
    Dene R. Bowdalo
    creator_email :
    dene.bowdalo@bsc.es
    version :
    1.4
  • " ], "text/plain": [ "\n", @@ -2727,7 +2637,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 18, @@ -2817,7 +2727,7 @@ { "data": { "text/plain": [ - "{'data': array([0, 1, 2]), 'units': ''}" + "{'data': array([0, 1, 2])}" ] }, "execution_count": 21, @@ -3482,7 +3392,21 @@ "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/173)\n", "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/173)\n", "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/173)\n", - "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/173)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_ghost.py:570: UserWarning: WARNING!!! GHOST datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/173)\n", "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/173)\n", "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/173)\n", @@ -3978,18 +3902,373 @@ "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading station var (1/174)\n", + "Rank 000: Loaded station var ((3,))\n", + "Rank 000: Loading ASTER_v3_altitude var (2/174)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (3/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (4/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (5/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (6/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (7/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (8/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (9/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (10/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (11/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (12/174)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (13/174)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (14/174)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (15/174)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (16/174)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (17/174)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (18/174)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ETOPO1_altitude var (19/174)\n", + "Rank 000: Loaded ETOPO1_altitude var ((3,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (20/174)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((3,))\n", + "Rank 000: Loading GHOST_version var (21/174)\n", + "Rank 000: Loaded GHOST_version var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (22/174)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (23/174)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (24/174)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (25/174)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (26/174)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (27/174)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (28/174)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (29/174)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (30/174)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (31/174)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (32/174)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((3,))\n", + "Rank 000: Loading GHSL_population_density var (33/174)\n", + "Rank 000: Loaded GHSL_population_density var ((3,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (34/174)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (35/174)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (36/174)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (37/174)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (38/174)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_population_density var (39/174)\n", + "Rank 000: Loaded GPW_population_density var ((3,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (40/174)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((3,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (41/174)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (42/174)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (43/174)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (44/174)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (45/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (46/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (47/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (48/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (49/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (50/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (51/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (52/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (53/174)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (54/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (55/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (56/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (57/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (58/174)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((3,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (59/174)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (60/174)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (61/174)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (62/174)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (63/174)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (64/174)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (65/174)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((3,))\n", + "Rank 000: Loading WMO_region var (66/174)\n", + "Rank 000: Loaded WMO_region var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (67/174)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biome var (68/174)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((3,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (69/174)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((3,))\n", + "Rank 000: Loading administrative_country_division_1 var (70/174)\n", + "Rank 000: Loaded administrative_country_division_1 var ((3,))\n", + "Rank 000: Loading administrative_country_division_2 var (71/174)\n", + "Rank 000: Loaded administrative_country_division_2 var ((3,))\n", + "Rank 000: Loading altitude var (72/174)\n", + "Rank 000: Loaded altitude var ((3,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (73/174)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading annual_native_representativity_percent var (74/174)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading area_classification var (75/174)\n", + "Rank 000: Loaded area_classification var ((3,))\n", + "Rank 000: Loading associated_networks var (76/174)\n", + "Rank 000: Loaded associated_networks var ((3,))\n", + "Rank 000: Loading city var (77/174)\n", + "Rank 000: Loaded city var ((3,))\n", + "Rank 000: Loading climatology var (78/174)\n", + "Rank 000: Loaded climatology var ((3,))\n", + "Rank 000: Loading contact_email_address var (79/174)\n", + "Rank 000: Loaded contact_email_address var ((3,))\n", + "Rank 000: Loading contact_institution var (80/174)\n", + "Rank 000: Loaded contact_institution var ((3,))\n", + "Rank 000: Loading contact_name var (81/174)\n", + "Rank 000: Loaded contact_name var ((3,))\n", + "Rank 000: Loading country var (82/174)\n", + "Rank 000: Loaded country var ((3,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (83/174)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading daily_native_representativity_percent var (84/174)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading daily_passing_vehicles var (85/174)\n", + "Rank 000: Loaded daily_passing_vehicles var ((3,))\n", + "Rank 000: Loading data_level var (86/174)\n", + "Rank 000: Loaded data_level var ((3,))\n", + "Rank 000: Loading data_licence var (87/174)\n", + "Rank 000: Loaded data_licence var ((3,))\n", + "Rank 000: Loading day_night_code var (88/174)\n", + "Rank 000: Loaded day_night_code var ((3, 30))\n", + "Rank 000: Loading daytime_traffic_speed var (89/174)\n", + "Rank 000: Loaded daytime_traffic_speed var ((3,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (90/174)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading distance_to_building var (91/174)\n", + "Rank 000: Loaded distance_to_building var ((3,))\n", + "Rank 000: Loading distance_to_junction var (92/174)\n", + "Rank 000: Loaded distance_to_junction var ((3,))\n", + "Rank 000: Loading distance_to_kerb var (93/174)\n", + "Rank 000: Loaded distance_to_kerb var ((3,))\n", + "Rank 000: Loading distance_to_source var (94/174)\n", + "Rank 000: Loaded distance_to_source var ((3,))\n", + "Rank 000: Loading ellipsoid var (95/174)\n", + "Rank 000: Loaded ellipsoid var ((3,))\n", + "Rank 000: Loading horizontal_datum var (96/174)\n", + "Rank 000: Loaded horizontal_datum var ((3,))\n", + "Rank 000: Loading land_use var (97/174)\n", + "Rank 000: Loaded land_use var ((3,))\n", + "Rank 000: Loading main_emission_source var (98/174)\n", + "Rank 000: Loaded main_emission_source var ((3,))\n", + "Rank 000: Loading measurement_altitude var (99/174)\n", + "Rank 000: Loaded measurement_altitude var ((3,))\n", + "Rank 000: Loading measurement_methodology var (100/174)\n", + "Rank 000: Loaded measurement_methodology var ((3,))\n", + "Rank 000: Loading measurement_scale var (101/174)\n", + "Rank 000: Loaded measurement_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (102/174)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (103/174)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (104/174)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (105/174)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (106/174)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (107/174)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (108/174)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (109/174)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (110/174)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (111/174)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (112/174)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (113/174)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_further_details var (114/174)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (115/174)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((3,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (116/174)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_name var (117/174)\n", + "Rank 000: Loaded measuring_instrument_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_process_details var (118/174)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (119/174)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (120/174)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (121/174)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (122/174)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (123/174)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (124/174)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (125/174)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (126/174)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (127/174)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (128/174)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (129/174)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (130/174)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (131/174)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((3,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (132/174)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading monthly_native_representativity_percent var (133/174)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading network var (134/174)\n", + "Rank 000: Loaded network var ((3,))\n", + "Rank 000: Loading network_maintenance_details var (135/174)\n", + "Rank 000: Loaded network_maintenance_details var ((3,))\n", + "Rank 000: Loading network_miscellaneous_details var (136/174)\n", + "Rank 000: Loaded network_miscellaneous_details var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (137/174)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (138/174)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((3,))\n", + "Rank 000: Loading network_qa_details var (139/174)\n", + "Rank 000: Loaded network_qa_details var ((3,))\n", + "Rank 000: Loading network_sampling_details var (140/174)\n", + "Rank 000: Loaded network_sampling_details var ((3,))\n", + "Rank 000: Loading network_uncertainty_details var (141/174)\n", + "Rank 000: Loaded network_uncertainty_details var ((3,))\n", + "Rank 000: Loading population var (142/174)\n", + "Rank 000: Loaded population var ((3,))\n", + "Rank 000: Loading primary_sampling_further_details var (143/174)\n", + "Rank 000: Loaded primary_sampling_further_details var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (144/174)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (145/174)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (146/174)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (147/174)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_process_details var (148/174)\n", + "Rank 000: Loaded primary_sampling_process_details var ((3,))\n", + "Rank 000: Loading primary_sampling_type var (149/174)\n", + "Rank 000: Loaded primary_sampling_type var ((3,))\n", + "Rank 000: Loading principal_investigator_email_address var (150/174)\n", + "Rank 000: Loaded principal_investigator_email_address var ((3,))\n", + "Rank 000: Loading principal_investigator_institution var (151/174)\n", + "Rank 000: Loaded principal_investigator_institution var ((3,))\n", + "Rank 000: Loading principal_investigator_name var (152/174)\n", + "Rank 000: Loaded principal_investigator_name var ((3,))\n", + "Rank 000: Loading process_warnings var (153/174)\n", + "Rank 000: Loaded process_warnings var ((3,))\n", + "Rank 000: Loading projection var (154/174)\n", + "Rank 000: Loaded projection var ((3,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (155/174)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading representative_radius var (156/174)\n", + "Rank 000: Loaded representative_radius var ((3,))\n", + "Rank 000: Loading retrieval_algorithm var (157/174)\n", + "Rank 000: Loaded retrieval_algorithm var ((3,))\n", + "Rank 000: Loading sample_preparation_further_details var (158/174)\n", + "Rank 000: Loaded sample_preparation_further_details var ((3,))\n", + "Rank 000: Loading sample_preparation_process_details var (159/174)\n", + "Rank 000: Loaded sample_preparation_process_details var ((3,))\n", + "Rank 000: Loading sample_preparation_techniques var (160/174)\n", + "Rank 000: Loaded sample_preparation_techniques var ((3,))\n", + "Rank 000: Loading sample_preparation_types var (161/174)\n", + "Rank 000: Loaded sample_preparation_types var ((3,))\n", + "Rank 000: Loading sampling_height var (162/174)\n", + "Rank 000: Loaded sampling_height var ((3,))\n", + "Rank 000: Loading sconcso4 var (163/174)\n", + "Rank 000: Loaded sconcso4 var ((3, 30))\n", + "Rank 000: Loading season_code var (164/174)\n", + "Rank 000: Loaded season_code var ((3, 30))\n", + "Rank 000: Loading station_classification var (165/174)\n", + "Rank 000: Loaded station_classification var ((3,))\n", + "Rank 000: Loading station_name var (166/174)\n", + "Rank 000: Loaded station_name var ((3,))\n", + "Rank 000: Loading station_reference var (167/174)\n", + "Rank 000: Loaded station_reference var ((3,))\n", + "Rank 000: Loading station_timezone var (168/174)\n", + "Rank 000: Loaded station_timezone var ((3,))\n", + "Rank 000: Loading street_type var (169/174)\n", + "Rank 000: Loaded street_type var ((3,))\n", + "Rank 000: Loading street_width var (170/174)\n", + "Rank 000: Loaded street_width var ((3,))\n", + "Rank 000: Loading terrain var (171/174)\n", + "Rank 000: Loaded terrain var ((3,))\n", + "Rank 000: Loading vertical_datum var (172/174)\n", + "Rank 000: Loaded vertical_datum var ((3,))\n", + "Rank 000: Loading weekday_weekend_code var (173/174)\n", + "Rank 000: Loaded weekday_weekend_code var ((3, 30))\n", + "Rank 000: Loading sconcso4_prefiltered_defaultqa var (174/174)\n", + "Rank 000: Loaded sconcso4_prefiltered_defaultqa var ((3, 30))\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } ], "source": [ "nessy_ghost_2 = open_netcdf('points_file_2.nc', info=True, parallel_method='X')\n", + "nessy_ghost_2.load()\n", "nessy_ghost_2" ] }, @@ -4366,8 +4645,8 @@ " * station (station) float64 ...\n", "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", "Data variables: (12/177)\n", - " flag (station, time, N_flag_codes) int64 ...\n", - " qa (station, time, N_qa_codes) int64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", " ASTER_v3_altitude (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", @@ -4377,8 +4656,8 @@ " vertical_datum (station) object ...\n", " weekday_weekend_code (station, time) uint8 ...\n", " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", "Attributes:\n", " title: Surface sulphate data in the EANET network in 2019-11.\n", " institution: Barcelona Supercomputing Center\n", @@ -4386,7 +4665,7 @@ " creator_name: Dene R. Bowdalo\n", " creator_email: dene.bowdalo@bsc.es\n", " version: 1.4\n", - " Conventions: CF-1.7
  • title :
    Surface sulphate data in the EANET network in 2019-11.
    institution :
    Barcelona Supercomputing Center
    source :
    Surface observations
    creator_name :
    Dene R. Bowdalo
    creator_email :
    dene.bowdalo@bsc.es
    version :
    1.4
    Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -4567,8 +4846,8 @@ " * station (station) float64 ...\n", "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", "Data variables: (12/177)\n", - " flag (station, time, N_flag_codes) int64 ...\n", - " qa (station, time, N_qa_codes) int64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", " ASTER_v3_altitude (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", @@ -4578,8 +4857,8 @@ " vertical_datum (station) object ...\n", " weekday_weekend_code (station, time) uint8 ...\n", " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", - " lat (station) float64 ...\n", - " lon (station) float64 ...\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", "Attributes:\n", " title: Surface sulphate data in the EANET network in 2019-11.\n", " institution: Barcelona Supercomputing Center\n", @@ -4598,6 +4877,134 @@ "source": [ "xr.open_dataset('points_file_2.nc')" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Transform to points" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_3 = nessy_ghost_2.to_points()\n", + "nessy_ghost_3" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'sconcso4': {'data': masked_array(\n", + " data=[[ nan, nan, nan, nan, nan,\n", + " nan, nan, 2.31 , 2.31 , 1.12 ,\n", + " 1.12 , nan, nan, nan, nan,\n", + " 1.71 , 1.71 , nan, nan, nan,\n", + " nan, nan, nan, nan, nan,\n", + " nan, nan, 1.38 , 1.2841667, 1.28 ],\n", + " [ nan, nan, nan, 0.74 , 0.74 ,\n", + " nan, nan, nan, nan, 3.41 ,\n", + " 3.41 , nan, nan, nan, nan,\n", + " 0.74 , 0.74 , nan, nan, nan,\n", + " nan, 1.2 , 1.2 , nan, nan,\n", + " nan, nan, 1.76 , 1.76 , nan],\n", + " [ nan, nan, nan, 3.05 , 3.05 ,\n", + " nan, nan, nan, nan, 2.44 ,\n", + " 2.44 , nan, nan, nan, nan,\n", + " 2.24 , 2.24 , nan, nan, nan,\n", + " nan, 1.37 , 1.37 , nan, nan,\n", + " nan, nan, nan, nan, nan]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station', 'time'),\n", + " 'standard_name': 'sulphate',\n", + " 'long_name': 'sulphate',\n", + " 'units': 'ug m-3',\n", + " 'description': 'Measured value of surface sulphate for the stated temporal resolution.'},\n", + " 'sconcso4_prefiltered_defaultqa': {'data': masked_array(\n", + " data=[[ nan, nan, nan, nan, nan,\n", + " nan, nan, 2.31 , 2.31 , 1.12 ,\n", + " 1.12 , nan, nan, nan, nan,\n", + " 1.71 , 1.71 , nan, nan, nan,\n", + " nan, nan, nan, nan, nan,\n", + " nan, nan, 1.38 , 1.2841667, 1.28 ],\n", + " [ nan, nan, nan, 0.74 , 0.74 ,\n", + " nan, nan, nan, nan, 3.41 ,\n", + " 3.41 , nan, nan, nan, nan,\n", + " 0.74 , 0.74 , nan, nan, nan,\n", + " nan, 1.2 , 1.2 , nan, nan,\n", + " nan, nan, 1.76 , 1.76 , nan],\n", + " [ nan, nan, nan, 3.05 , 3.05 ,\n", + " nan, nan, nan, nan, 2.44 ,\n", + " 2.44 , nan, nan, nan, nan,\n", + " 2.24 , 2.24 , nan, nan, nan,\n", + " nan, 1.37 , 1.37 , nan, nan,\n", + " nan, nan, nan, nan, nan]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station', 'time'),\n", + " 'standard_name': 'sulphate',\n", + " 'long_name': 'sulphate',\n", + " 'units': 'ug m-3',\n", + " 'description': 'Measured value of surface sulphate for the stated temporal resolution. Prefiltered by default QA.'}}" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_3.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_file_3.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconcso4 var (1/2)\n", + "Rank 000: Var sconcso4 created (1/2)\n", + "Rank 000: Var sconcso4 data (1/2)\n", + "Rank 000: Var sconcso4 completed (1/2)\n", + "Rank 000: Writing sconcso4_prefiltered_defaultqa var (2/2)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa created (2/2)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa data (2/2)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa completed (2/2)\n" + ] + } + ], + "source": [ + "nessy_ghost_3.to_netcdf('points_file_3.nc', info=True)" + ] } ], "metadata": { diff --git a/Jupyter_notebooks/1.4-lcc_grids.ipynb b/tutorials/1.Introduction/1.4.Read_Write_LCC.ipynb similarity index 72% rename from Jupyter_notebooks/1.4-lcc_grids.ipynb rename to tutorials/1.Introduction/1.4.Read_Write_LCC.ipynb index 91f50cec892013d13f02c4d288a812ad34b0dd4d..e1a4f6499b0a6f23a9246da4d36c9882e2c0f04c 100644 --- a/Jupyter_notebooks/1.4-lcc_grids.ipynb +++ b/tutorials/1.Introduction/1.4.Read_Write_LCC.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "nc_path_1 = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc'" + "nc_path_1 = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/sconco3/sconco3_2021010100.nc'" ] }, { @@ -402,54 +402,54 @@ "
    <xarray.Dataset>\n",
            "Dimensions:            (time: 48, y: 398, x: 478, lev: 1)\n",
            "Coordinates:\n",
    -       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n",
            "    lat                (y, x) float32 ...\n",
            "    lon                (y, x) float32 ...\n",
            "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
            "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
            "  * lev                (lev) float32 0.0\n",
            "Data variables:\n",
    -       "    pm10               (time, lev, y, x) float32 ...\n",
    -       "    Lambert_conformal  int32 -2147483647
    " + " sconco3 (time, lev, y, x) float32 ...\n", + " Lambert_conformal int32 -2147483647" ], "text/plain": [ "\n", "Dimensions: (time: 48, y: 398, x: 478, lev: 1)\n", "Coordinates:\n", - " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n", " lat (y, x) float32 ...\n", " lon (y, x) float32 ...\n", " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", " * lev (lev) float32 0.0\n", "Data variables:\n", - " pm10 (time, lev, y, x) float32 ...\n", + " sconco3 (time, lev, y, x) float32 ...\n", " Lambert_conformal int32 ..." ] }, @@ -477,7 +477,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -498,54 +498,54 @@ { "data": { "text/plain": [ - "[datetime.datetime(2022, 6, 26, 0, 0),\n", - " datetime.datetime(2022, 6, 26, 1, 0),\n", - " datetime.datetime(2022, 6, 26, 2, 0),\n", - " datetime.datetime(2022, 6, 26, 3, 0),\n", - " datetime.datetime(2022, 6, 26, 4, 0),\n", - " datetime.datetime(2022, 6, 26, 5, 0),\n", - " datetime.datetime(2022, 6, 26, 6, 0),\n", - " datetime.datetime(2022, 6, 26, 7, 0),\n", - " datetime.datetime(2022, 6, 26, 8, 0),\n", - " datetime.datetime(2022, 6, 26, 9, 0),\n", - " datetime.datetime(2022, 6, 26, 10, 0),\n", - " datetime.datetime(2022, 6, 26, 11, 0),\n", - " datetime.datetime(2022, 6, 26, 12, 0),\n", - " datetime.datetime(2022, 6, 26, 13, 0),\n", - " datetime.datetime(2022, 6, 26, 14, 0),\n", - " datetime.datetime(2022, 6, 26, 15, 0),\n", - " datetime.datetime(2022, 6, 26, 16, 0),\n", - " datetime.datetime(2022, 6, 26, 17, 0),\n", - " datetime.datetime(2022, 6, 26, 18, 0),\n", - " datetime.datetime(2022, 6, 26, 19, 0),\n", - " datetime.datetime(2022, 6, 26, 20, 0),\n", - " datetime.datetime(2022, 6, 26, 21, 0),\n", - " datetime.datetime(2022, 6, 26, 22, 0),\n", - " datetime.datetime(2022, 6, 26, 23, 0),\n", - " datetime.datetime(2022, 6, 27, 0, 0),\n", - " datetime.datetime(2022, 6, 27, 1, 0),\n", - " datetime.datetime(2022, 6, 27, 2, 0),\n", - " datetime.datetime(2022, 6, 27, 3, 0),\n", - " datetime.datetime(2022, 6, 27, 4, 0),\n", - " datetime.datetime(2022, 6, 27, 5, 0),\n", - " datetime.datetime(2022, 6, 27, 6, 0),\n", - " datetime.datetime(2022, 6, 27, 7, 0),\n", - " datetime.datetime(2022, 6, 27, 8, 0),\n", - " datetime.datetime(2022, 6, 27, 9, 0),\n", - " datetime.datetime(2022, 6, 27, 10, 0),\n", - " datetime.datetime(2022, 6, 27, 11, 0),\n", - " datetime.datetime(2022, 6, 27, 12, 0),\n", - " datetime.datetime(2022, 6, 27, 13, 0),\n", - " datetime.datetime(2022, 6, 27, 14, 0),\n", - " datetime.datetime(2022, 6, 27, 15, 0),\n", - " datetime.datetime(2022, 6, 27, 16, 0),\n", - " datetime.datetime(2022, 6, 27, 17, 0),\n", - " datetime.datetime(2022, 6, 27, 18, 0),\n", - " datetime.datetime(2022, 6, 27, 19, 0),\n", - " datetime.datetime(2022, 6, 27, 20, 0),\n", - " datetime.datetime(2022, 6, 27, 21, 0),\n", - " datetime.datetime(2022, 6, 27, 22, 0),\n", - " datetime.datetime(2022, 6, 27, 23, 0)]" + "[datetime.datetime(2021, 1, 1, 0, 0),\n", + " datetime.datetime(2021, 1, 1, 1, 0),\n", + " datetime.datetime(2021, 1, 1, 2, 0),\n", + " datetime.datetime(2021, 1, 1, 3, 0),\n", + " datetime.datetime(2021, 1, 1, 4, 0),\n", + " datetime.datetime(2021, 1, 1, 5, 0),\n", + " datetime.datetime(2021, 1, 1, 6, 0),\n", + " datetime.datetime(2021, 1, 1, 7, 0),\n", + " datetime.datetime(2021, 1, 1, 8, 0),\n", + " datetime.datetime(2021, 1, 1, 9, 0),\n", + " datetime.datetime(2021, 1, 1, 10, 0),\n", + " datetime.datetime(2021, 1, 1, 11, 0),\n", + " datetime.datetime(2021, 1, 1, 12, 0),\n", + " datetime.datetime(2021, 1, 1, 13, 0),\n", + " datetime.datetime(2021, 1, 1, 14, 0),\n", + " datetime.datetime(2021, 1, 1, 15, 0),\n", + " datetime.datetime(2021, 1, 1, 16, 0),\n", + " datetime.datetime(2021, 1, 1, 17, 0),\n", + " datetime.datetime(2021, 1, 1, 18, 0),\n", + " datetime.datetime(2021, 1, 1, 19, 0),\n", + " datetime.datetime(2021, 1, 1, 20, 0),\n", + " datetime.datetime(2021, 1, 1, 21, 0),\n", + " datetime.datetime(2021, 1, 1, 22, 0),\n", + " datetime.datetime(2021, 1, 1, 23, 0),\n", + " datetime.datetime(2021, 1, 2, 0, 0),\n", + " datetime.datetime(2021, 1, 2, 1, 0),\n", + " datetime.datetime(2021, 1, 2, 2, 0),\n", + " datetime.datetime(2021, 1, 2, 3, 0),\n", + " datetime.datetime(2021, 1, 2, 4, 0),\n", + " datetime.datetime(2021, 1, 2, 5, 0),\n", + " datetime.datetime(2021, 1, 2, 6, 0),\n", + " datetime.datetime(2021, 1, 2, 7, 0),\n", + " datetime.datetime(2021, 1, 2, 8, 0),\n", + " datetime.datetime(2021, 1, 2, 9, 0),\n", + " datetime.datetime(2021, 1, 2, 10, 0),\n", + " datetime.datetime(2021, 1, 2, 11, 0),\n", + " datetime.datetime(2021, 1, 2, 12, 0),\n", + " datetime.datetime(2021, 1, 2, 13, 0),\n", + " datetime.datetime(2021, 1, 2, 14, 0),\n", + " datetime.datetime(2021, 1, 2, 15, 0),\n", + " datetime.datetime(2021, 1, 2, 16, 0),\n", + " datetime.datetime(2021, 1, 2, 17, 0),\n", + " datetime.datetime(2021, 1, 2, 18, 0),\n", + " datetime.datetime(2021, 1, 2, 19, 0),\n", + " datetime.datetime(2021, 1, 2, 20, 0),\n", + " datetime.datetime(2021, 1, 2, 21, 0),\n", + " datetime.datetime(2021, 1, 2, 22, 0),\n", + " datetime.datetime(2021, 1, 2, 23, 0)]" ] }, "execution_count": 5, @@ -751,7 +751,7 @@ " mask=False,\n", " fill_value=1e+20),\n", " 'dimensions': ('x',),\n", - " 'units': 'm',\n", + " 'units': '1000 m',\n", " 'long_name': 'x coordinate of projection',\n", " 'standard_name': 'projection_x_coordinate'}" ] @@ -975,7 +975,7 @@ " mask=False,\n", " fill_value=1e+20),\n", " 'dimensions': ('y',),\n", - " 'units': 'm',\n", + " 'units': '1000 m',\n", " 'long_name': 'y coordinate of projection',\n", " 'standard_name': 'projection_y_coordinate'}" ] @@ -1080,8 +1080,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Rank 000: Loading pm10 var (1/1)\n", - "Rank 000: Loaded pm10 var ((48, 1, 398, 478))\n" + "Rank 000: Loading sconco3 var (1/1)\n", + "Rank 000: Loaded sconco3 var ((48, 1, 398, 478))\n" ] } ], @@ -1097,103 +1097,103 @@ { "data": { "text/plain": [ - "{'pm10': {'data': masked_array(\n", - " data=[[[[2.05903081e-08, 2.10736815e-08, 2.16505036e-08, ...,\n", - " 1.57139212e-07, 1.56582516e-07, 1.58654302e-07],\n", - " [2.02237249e-08, 2.07961541e-08, 2.16525038e-08, ...,\n", - " 1.56947792e-07, 1.52752250e-07, 1.51975840e-07],\n", - " [1.92542160e-08, 1.95532017e-08, 2.09430890e-08, ...,\n", - " 1.56698391e-07, 1.52042290e-07, 1.49590434e-07],\n", + "{'sconco3': {'data': masked_array(\n", + " data=[[[[0.03952214, 0.0395867 , 0.03965145, ..., 0.03198181,\n", + " 0.03125041, 0.03153064],\n", + " [0.03945881, 0.03952107, 0.03958255, ..., 0.03184386,\n", + " 0.03127047, 0.03173521],\n", + " [0.03940514, 0.03945867, 0.03951972, ..., 0.03147388,\n", + " 0.031453 , 0.03222592],\n", " ...,\n", - " [2.00155412e-08, 1.88844460e-08, 1.72373600e-08, ...,\n", - " 1.03697766e-10, 1.24570437e-10, 1.35568029e-10],\n", - " [1.88825418e-08, 1.78339921e-08, 1.65571699e-08, ...,\n", - " 1.29691299e-10, 1.19366197e-10, 1.26047242e-10],\n", - " [1.75334254e-08, 1.67024794e-08, 1.57620299e-08, ...,\n", - " 5.90659299e-10, 5.99817251e-10, 6.93886892e-10]]],\n", + " [0.03757998, 0.03767523, 0.03775784, ..., 0.02404686,\n", + " 0.02617675, 0.02804444],\n", + " [0.03747029, 0.03767779, 0.03778836, ..., 0.02497317,\n", + " 0.02633872, 0.02821054],\n", + " [0.03744229, 0.03762932, 0.03774261, ..., 0.02464963,\n", + " 0.02615741, 0.02828379]]],\n", " \n", " \n", - " [[[2.04419166e-08, 2.10894218e-08, 2.14073363e-08, ...,\n", - " 1.50338764e-07, 1.46800986e-07, 1.45907649e-07],\n", - " [1.97089882e-08, 2.07061799e-08, 2.14063096e-08, ...,\n", - " 1.50352193e-07, 1.44729924e-07, 1.41249188e-07],\n", - " [1.88334628e-08, 1.97772980e-08, 2.10062652e-08, ...,\n", - " 1.50320858e-07, 1.45472683e-07, 1.40802484e-07],\n", + " [[[0.03965769, 0.03971414, 0.03978673, ..., 0.03103125,\n", + " 0.03015577, 0.03036901],\n", + " [0.03960922, 0.03966256, 0.03972849, ..., 0.03101462,\n", + " 0.03021824, 0.03060081],\n", + " [0.03957865, 0.03962931, 0.03968912, ..., 0.03126283,\n", + " 0.03049327, 0.0311233 ],\n", " ...,\n", - " [1.58354645e-08, 1.31688500e-08, 1.04182032e-08, ...,\n", - " 1.36057263e-10, 1.61256425e-10, 1.82640861e-10],\n", - " [1.58553863e-08, 1.35575196e-08, 1.12653220e-08, ...,\n", - " 4.73602046e-10, 5.22056454e-10, 5.99001682e-10],\n", - " [1.51028274e-08, 1.29572175e-08, 1.06422196e-08, ...,\n", - " 1.20180965e-09, 1.22420163e-09, 1.27862843e-09]]],\n", + " [0.03786875, 0.03777716, 0.0376072 , ..., 0.02531419,\n", + " 0.02676462, 0.02868378],\n", + " [0.03781448, 0.03783737, 0.03778312, ..., 0.02607518,\n", + " 0.02714914, 0.02904003],\n", + " [0.03779451, 0.03781895, 0.03778093, ..., 0.02577951,\n", + " 0.02662436, 0.02910396]]],\n", " \n", " \n", - " [[[2.06775415e-08, 2.09295870e-08, 2.10403801e-08, ...,\n", - " 1.43437418e-07, 1.36887849e-07, 1.33009308e-07],\n", - " [2.03318944e-08, 2.06964064e-08, 2.09895372e-08, ...,\n", - " 1.43851324e-07, 1.36785971e-07, 1.30620521e-07],\n", - " [1.96500309e-08, 1.97185361e-08, 2.00775236e-08, ...,\n", - " 1.43990789e-07, 1.38915652e-07, 1.32079862e-07],\n", + " [[[0.03982776, 0.03989794, 0.04000261, ..., 0.03058425,\n", + " 0.02924641, 0.02938778],\n", + " [0.03979794, 0.03985743, 0.03995337, ..., 0.03089899,\n", + " 0.02948623, 0.02973373],\n", + " [0.03978007, 0.03982415, 0.03991285, ..., 0.03140561,\n", + " 0.03000267, 0.0304553 ],\n", " ...,\n", - " [1.15522543e-08, 1.03884448e-08, 8.92967922e-09, ...,\n", - " 2.96768943e-10, 5.31864996e-10, 6.76325274e-10],\n", - " [1.30444580e-08, 1.24477344e-08, 1.19089290e-08, ...,\n", - " 9.38010669e-10, 1.02875852e-09, 1.09171505e-09],\n", - " [1.33038087e-08, 1.28954767e-08, 1.27622268e-08, ...,\n", - " 1.10067289e-09, 1.11675491e-09, 1.12590814e-09]]],\n", + " [0.03821166, 0.03814133, 0.0379876 , ..., 0.02608518,\n", + " 0.02720294, 0.02940145],\n", + " [0.03820223, 0.03822928, 0.03817524, ..., 0.02666686,\n", + " 0.02796096, 0.02975006],\n", + " [0.03819539, 0.03822353, 0.03819501, ..., 0.02689083,\n", + " 0.02732235, 0.02981647]]],\n", " \n", " \n", " ...,\n", " \n", " \n", - " [[[1.99427657e-08, 2.07774260e-08, 2.12375095e-08, ...,\n", - " 1.42070576e-07, 1.23106801e-07, 1.05545318e-07],\n", - " [1.94999394e-08, 2.03327897e-08, 2.09769428e-08, ...,\n", - " 1.40088332e-07, 1.22823366e-07, 1.06322936e-07],\n", - " [1.92740401e-08, 2.01404546e-08, 2.12538360e-08, ...,\n", - " 1.37738098e-07, 1.25881471e-07, 1.11278482e-07],\n", + " [[[0.04243098, 0.04246398, 0.0425217 , ..., 0.03174707,\n", + " 0.02986301, 0.02911444],\n", + " [0.04233237, 0.0423856 , 0.0424809 , ..., 0.03209906,\n", + " 0.03045077, 0.02993134],\n", + " [0.04244579, 0.04248011, 0.04255648, ..., 0.03262969,\n", + " 0.03137314, 0.03106195],\n", " ...,\n", - " [1.62301383e-09, 3.29047856e-09, 4.25983115e-09, ...,\n", - " 5.10578968e-10, 7.78555886e-10, 7.58658358e-10],\n", - " [1.61382108e-09, 3.05706660e-09, 3.85214838e-09, ...,\n", - " 1.44986870e-10, 4.45782633e-10, 6.53098131e-10],\n", - " [1.51593449e-09, 2.81125856e-09, 3.49995122e-09, ...,\n", - " 3.83349671e-11, 5.17163673e-11, 1.41100784e-10]]],\n", + " [0.03979023, 0.03997482, 0.04005315, ..., 0.03456535,\n", + " 0.03498862, 0.0354754 ],\n", + " [0.03967334, 0.0398716 , 0.04000713, ..., 0.03471798,\n", + " 0.03485566, 0.03500457],\n", + " [0.03964297, 0.03981132, 0.03993002, ..., 0.03497454,\n", + " 0.03504148, 0.03509094]]],\n", " \n", " \n", - " [[[2.12318145e-08, 2.15663487e-08, 2.17239737e-08, ...,\n", - " 1.33982709e-07, 1.17447051e-07, 1.01946490e-07],\n", - " [2.09607443e-08, 2.13947366e-08, 2.17022080e-08, ...,\n", - " 1.30945295e-07, 1.15195341e-07, 1.00572883e-07],\n", - " [2.06122941e-08, 2.12153246e-08, 2.20146017e-08, ...,\n", - " 1.27737934e-07, 1.15350041e-07, 1.01470562e-07],\n", + " [[[0.04243221, 0.04248913, 0.04257801, ..., 0.03141348,\n", + " 0.02992571, 0.02943683],\n", + " [0.04236476, 0.0424497 , 0.04255884, ..., 0.03173555,\n", + " 0.03054 , 0.03031359],\n", + " [0.04247259, 0.04253475, 0.0426126 , ..., 0.03197961,\n", + " 0.03136487, 0.03140653],\n", " ...,\n", - " [1.74116932e-09, 3.57977159e-09, 4.66885952e-09, ...,\n", - " 1.35524800e-10, 3.73824971e-10, 5.27946020e-10],\n", - " [1.69331171e-09, 3.35175421e-09, 4.35682335e-09, ...,\n", - " 6.59049343e-11, 8.77121500e-11, 1.72610246e-10],\n", - " [1.66064651e-09, 3.09451931e-09, 3.84808585e-09, ...,\n", - " 3.81935802e-11, 3.20906392e-11, 2.95690208e-11]]],\n", + " [0.03982663, 0.04001996, 0.04009991, ..., 0.03445901,\n", + " 0.0349366 , 0.03539021],\n", + " [0.03969444, 0.03990471, 0.04005693, ..., 0.03466238,\n", + " 0.03480245, 0.03491453],\n", + " [0.0396612 , 0.03983796, 0.03996958, ..., 0.03486277,\n", + " 0.03491549, 0.03492822]]],\n", " \n", " \n", - " [[[2.20933263e-08, 2.23298162e-08, 2.22625207e-08, ...,\n", - " 1.25896420e-07, 1.11788722e-07, 9.83487993e-08],\n", - " [2.17691785e-08, 2.21585328e-08, 2.22608545e-08, ...,\n", - " 1.21806693e-07, 1.07571751e-07, 9.48248058e-08],\n", - " [2.10832365e-08, 2.17355911e-08, 2.23796377e-08, ...,\n", - " 1.17739212e-07, 1.04824068e-07, 9.16652283e-08],\n", + " [[[0.04249088, 0.04256602, 0.04264675, ..., 0.03092884,\n", + " 0.02980646, 0.02965403],\n", + " [0.042451 , 0.04252698, 0.04259988, ..., 0.03106567,\n", + " 0.03033506, 0.03051317],\n", + " [0.04252941, 0.04258011, 0.04264118, ..., 0.0312073 ,\n", + " 0.03103344, 0.03165624],\n", " ...,\n", - " [1.82784676e-09, 3.94770527e-09, 5.16965803e-09, ...,\n", - " 7.80837409e-11, 9.47864148e-11, 1.31354164e-10],\n", - " [1.82815707e-09, 3.68124264e-09, 4.70819206e-09, ...,\n", - " 5.68534525e-11, 4.92194792e-11, 5.04330119e-11],\n", - " [1.79193316e-09, 3.41961126e-09, 4.36335901e-09, ...,\n", - " 4.09052167e-11, 3.92018085e-11, 3.67623848e-11]]]],\n", + " [0.03986304, 0.04006038, 0.04014875, ..., 0.03444035,\n", + " 0.03486974, 0.03531818],\n", + " [0.03971234, 0.03994204, 0.04009009, ..., 0.03459954,\n", + " 0.0347504 , 0.0348453 ],\n", + " [0.03967606, 0.03986803, 0.03999919, ..., 0.03475965,\n", + " 0.03481083, 0.03478444]]]],\n", " mask=False,\n", " fill_value=1e+20,\n", " dtype=float32),\n", " 'dimensions': ('time', 'lev', 'y', 'x'),\n", - " 'units': 'kgm-3',\n", + " 'units': 'ppm',\n", " 'coordinates': 'lat lon',\n", " 'grid_mapping': 'Lambert_conformal'}}" ] @@ -1219,11 +1219,11 @@ "Rank 000: Creating lcc_file_1.nc\n", "Rank 000: NetCDF ready to write\n", "Rank 000: Dimensions done\n", - "Rank 000: Writing pm10 var (1/1)\n", - "Rank 000: Var pm10 created (1/1)\n", - "Rank 000: Filling pm10)\n", - "Rank 000: Var pm10 data (1/1)\n", - "Rank 000: Var pm10 completed (1/1)\n" + "Rank 000: Writing sconco3 var (1/1)\n", + "Rank 000: Var sconco3 created (1/1)\n", + "Rank 000: Filling sconco3)\n", + "Rank 000: Var sconco3 data (1/1)\n", + "Rank 000: Var sconco3 completed (1/1)\n" ] } ], @@ -1246,7 +1246,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -1259,6 +1259,13 @@ "nessy_2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, { "cell_type": "code", "execution_count": 15, @@ -1621,56 +1628,56 @@ "
    <xarray.Dataset>\n",
            "Dimensions:            (time: 48, lev: 1, y: 398, x: 478)\n",
            "Coordinates:\n",
    -       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
    +       "  * time               (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n",
            "  * lev                (lev) float64 0.0\n",
            "    lat                (y, x) float64 ...\n",
            "    lon                (y, x) float64 ...\n",
            "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
            "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
            "Data variables:\n",
    -       "    pm10               (time, lev, y, x) float32 ...\n",
    +       "    sconco3            (time, lev, y, x) float32 ...\n",
            "    Lambert_conformal  |S1 b''\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
    " + " Conventions: CF-1.7" ], "text/plain": [ "\n", "Dimensions: (time: 48, lev: 1, y: 398, x: 478)\n", "Coordinates:\n", - " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " * time (time) datetime64[ns] 2021-01-01 ... 2021-01-02T23:00:00\n", " * lev (lev) float64 0.0\n", " lat (y, x) float64 ...\n", " lon (y, x) float64 ...\n", " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", "Data variables:\n", - " pm10 (time, lev, y, x) float32 ...\n", + " sconco3 (time, lev, y, x) float32 ...\n", " Lambert_conformal |S1 ...\n", "Attributes:\n", " Conventions: CF-1.7" diff --git a/Jupyter_notebooks/1.5-mercator_grids.ipynb b/tutorials/1.Introduction/1.5.Read_Write_Mercator.ipynb similarity index 64% rename from Jupyter_notebooks/1.5-mercator_grids.ipynb rename to tutorials/1.Introduction/1.5.Read_Write_Mercator.ipynb index ad8cc7fa57036fb76c1b25bebd93af76b950032f..d4cb3f39016ba56ef84ad85a5204515898be6e1d 100644 --- a/Jupyter_notebooks/1.5-mercator_grids.ipynb +++ b/tutorials/1.Introduction/1.5.Read_Write_Mercator.ipynb @@ -44,15 +44,6 @@ "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [], - "source": [ - "# xr.open_dataset(nc_path_1, decode_times=False).drop(['lat_bnds', 'lon_bnds']).to_netcdf('input/mercator_grid_example.nc')" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, "outputs": [ { "data": { @@ -409,17 +400,20 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:    (time: 1, y: 236, x: 210)\n",
    +       "Dimensions:    (time: 1, y: 236, x: 210, nv: 4)\n",
            "Coordinates:\n",
            "  * time       (time) float64 0.0\n",
            "    lat        (y, x) float32 -43.52 -43.52 -43.52 -43.52 ... 49.6 49.6 49.6\n",
            "    lon        (y, x) float32 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
            "  * x          (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n",
            "  * y          (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
    +       "Dimensions without coordinates: nv\n",
            "Data variables:\n",
    +       "    lat_bnds   (y, x, nv) float32 ...\n",
    +       "    lon_bnds   (y, x, nv) float32 ...\n",
            "    var_aux    (time, y, x) float32 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0\n",
            "    mercator   int32 -2147483647\n",
    -       "    cell_area  (y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09 1.051e+09
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 1, y: 236, x: 210)\n", + "Dimensions: (time: 1, y: 236, x: 210, nv: 4)\n", "Coordinates:\n", " * time (time) float64 0.0\n", " lat (y, x) float32 ...\n", " lon (y, x) float32 ...\n", " * x (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n", " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", + "Dimensions without coordinates: nv\n", "Data variables:\n", + " lat_bnds (y, x, nv) float32 ...\n", + " lon_bnds (y, x, nv) float32 ...\n", " var_aux (time, y, x) float32 ...\n", " mercator int32 ...\n", " cell_area (y, x) float32 ..." ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "xr.open_dataset('input/mercator_grid_example.nc', decode_times=False)" + "xr.open_dataset(nc_path_1, decode_times=False)" ] }, { @@ -495,28 +492,28 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nessy_1 = open_netcdf(path='input/mercator_grid_example.nc', info=True)\n", + "nessy_1 = open_netcdf(nc_path_1, info=True)\n", "nessy_1" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -525,7 +522,7 @@ "[datetime.datetime(2000, 1, 1, 0, 0)]" ] }, - "execution_count": 6, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -536,7 +533,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -545,7 +542,7 @@ "{'data': array([0]), 'units': ''}" ] }, - "execution_count": 7, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -556,7 +553,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -640,7 +637,7 @@ " 'standard_name': 'projection_x_coordinate'}" ] }, - "execution_count": 8, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -651,7 +648,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -713,7 +710,7 @@ " 'standard_name': 'projection_y_coordinate'}" ] }, - "execution_count": 9, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -724,7 +721,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -755,7 +752,7 @@ " 'bounds': 'lat_bnds'}" ] }, - "execution_count": 10, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -766,7 +763,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -797,7 +794,7 @@ " 'bounds': 'lon_bnds'}" ] }, - "execution_count": 11, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -806,10 +803,322 @@ "nessy_1.lon" ] }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "masked_array(\n", + " data=[[[-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766],\n", + " [-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766],\n", + " [-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766],\n", + " ...,\n", + " [-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766],\n", + " [-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766],\n", + " [-43.68109893798828, -43.68109893798828, -43.354862213134766,\n", + " -43.354862213134766]],\n", + "\n", + " [[-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453],\n", + " [-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453],\n", + " [-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453],\n", + " ...,\n", + " [-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453],\n", + " [-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453],\n", + " [-43.354862213134766, -43.354862213134766, -43.02686309814453,\n", + " -43.02686309814453]],\n", + "\n", + " [[-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031],\n", + " [-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031],\n", + " [-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031],\n", + " ...,\n", + " [-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031],\n", + " [-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031],\n", + " [-43.02686309814453, -43.02686309814453, -42.69709777832031,\n", + " -42.69709777832031]],\n", + "\n", + " ...,\n", + "\n", + " [[48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281],\n", + " [48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281],\n", + " [48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281],\n", + " ...,\n", + " [48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281],\n", + " [48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281],\n", + " [48.86896514892578, 48.86896514892578, 49.16401672363281,\n", + " 49.16401672363281]],\n", + "\n", + " [[49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945],\n", + " [49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945],\n", + " [49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945],\n", + " ...,\n", + " [49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945],\n", + " [49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945],\n", + " [49.16401672363281, 49.16401672363281, 49.45732498168945,\n", + " 49.45732498168945]],\n", + "\n", + " [[49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117],\n", + " [49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117],\n", + " [49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117],\n", + " ...,\n", + " [49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117],\n", + " [49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117],\n", + " [49.45732498168945, 49.45732498168945, 49.74888229370117,\n", + " 49.74888229370117]]],\n", + " mask=[[[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " ...,\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]]],\n", + " fill_value=1e+20,\n", + " dtype=float32)" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat_bnds" + ] + }, { "cell_type": "code", "execution_count": 12, "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "masked_array(\n", + " data=[[[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]],\n", + "\n", + " [[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]],\n", + "\n", + " [[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]],\n", + "\n", + " ...,\n", + "\n", + " [[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]],\n", + "\n", + " [[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]],\n", + "\n", + " [[-19.13384246826172, -18.683956146240234, -18.683956146240234,\n", + " -19.13384246826172],\n", + " [-18.683956146240234, -18.234071731567383, -18.234071731567383,\n", + " -18.683956146240234],\n", + " [-18.234071731567383, -17.7841854095459, -17.7841854095459,\n", + " -18.234071731567383],\n", + " ...,\n", + " [73.99246978759766, 74.44235229492188, 74.44235229492188,\n", + " 73.99246978759766],\n", + " [74.44235229492188, 74.89224243164062, 74.89224243164062,\n", + " 74.44235229492188],\n", + " [74.89224243164062, 75.34212493896484, 75.34212493896484,\n", + " 74.89224243164062]]],\n", + " mask=[[[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " ...,\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]],\n", + "\n", + " [[False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " ...,\n", + " [False, False, False, False],\n", + " [False, False, False, False],\n", + " [False, False, False, False]]],\n", + " fill_value=1e+20,\n", + " dtype=float32)" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon_bnds" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, "outputs": [ { "name": "stdout", @@ -828,7 +1137,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -847,9 +1156,9 @@ " dtype=float32),\n", " 'dimensions': ('time', 'y', 'x'),\n", " 'units': '?',\n", + " 'coordinates': 'lat lon',\n", " 'cell_measures': 'area: cell_area',\n", - " 'grid_mapping': 'mercator',\n", - " 'coordinates': 'lat lon'},\n", + " 'grid_mapping': 'mercator'},\n", " 'cell_area': {'data': masked_array(\n", " data=[[[[1.31594240e+09, 1.31593690e+09, 1.31594240e+09, ...,\n", " 1.31593126e+09, 1.31595354e+09, 1.31593126e+09],\n", @@ -870,11 +1179,10 @@ " 'dimensions': ('y', 'x'),\n", " 'long_name': 'area of the grid cell',\n", " 'standard_name': 'cell_area',\n", - " 'units': 'm2',\n", - " 'coordinates': 'lat lon'}}" + " 'units': 'm2'}}" ] }, - "execution_count": 13, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -885,7 +1193,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -921,16 +1229,16 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 15, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -940,9 +1248,16 @@ "nessy_2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -1300,7 +1615,7 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:    (time: 1, lev: 1, y: 236, x: 210)\n",
    +       "Dimensions:    (time: 1, lev: 1, y: 236, x: 210, spatial_nv: 4)\n",
            "Coordinates:\n",
            "  * time       (time) datetime64[ns] 2000-01-01\n",
            "  * lev        (lev) float64 0.0\n",
    @@ -1308,12 +1623,15 @@
            "    lon        (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
            "  * y          (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
            "  * x          (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n",
    +       "Dimensions without coordinates: spatial_nv\n",
            "Data variables:\n",
    +       "    lat_bnds   (y, x, spatial_nv) float64 ...\n",
    +       "    lon_bnds   (y, x, spatial_nv) float64 ...\n",
            "    var_aux    (time, lev, y, x) float32 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0\n",
            "    cell_area  (time, lev, y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09\n",
            "    mercator   |S1 b''\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 1, lev: 1, y: 236, x: 210)\n", + "Dimensions: (time: 1, lev: 1, y: 236, x: 210, spatial_nv: 4)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2000-01-01\n", " * lev (lev) float64 0.0\n", @@ -1354,7 +1672,10 @@ " lon (y, x) float64 ...\n", " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", " * x (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n", + "Dimensions without coordinates: spatial_nv\n", "Data variables:\n", + " lat_bnds (y, x, spatial_nv) float64 ...\n", + " lon_bnds (y, x, spatial_nv) float64 ...\n", " var_aux (time, lev, y, x) float32 ...\n", " cell_area (time, lev, y, x) float32 ...\n", " mercator |S1 ...\n", @@ -1362,7 +1683,7 @@ " Conventions: CF-1.7" ] }, - "execution_count": 16, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } diff --git a/tutorials/1.Introduction/1.6.Read_Write_Providentia.ipynb b/tutorials/1.Introduction/1.6.Read_Write_Providentia.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..11985c0eb74d8c6ad76077bbeff0d5b9e08230c8 --- /dev/null +++ b/tutorials/1.Introduction/1.6.Read_Write_Providentia.ipynb @@ -0,0 +1,15409 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write Providentia files" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from netCDF4 import Dataset\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Observations" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "obs_path = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.3.3/hourly/sconco3/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                                                           (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n",
    +       "Coordinates:\n",
    +       "  * time                                                              (time) datetime64[ns] ...\n",
    +       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
    +       "Data variables: (12/179)\n",
    +       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
    +       "    ...                                                                ...\n",
    +       "    station_timezone                                                  (station) object ...\n",
    +       "    street_type                                                       (station) object ...\n",
    +       "    street_width                                                      (station) float32 ...\n",
    +       "    terrain                                                           (station) object ...\n",
    +       "    vertical_datum                                                    (station) object ...\n",
    +       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
    +       "Attributes:\n",
    +       "    title:                     Surface ozone data in the EBAS network in 2018...\n",
    +       "    institution:               Barcelona Supercomputing Center\n",
    +       "    source:                    Surface observations\n",
    +       "    creator_name:              Dene R. Bowdalo\n",
    +       "    creator_email:             dene.bowdalo@bsc.es\n",
    +       "    conventions:               CF-1.7\n",
    +       "    data_version:              1.3.3\n",
    +       "    history:                   Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n",
    +       "    NCO:                       4.7.2\n",
    +       "    nco_openmp_thread_number:  1
    " + ], + "text/plain": [ + "\n", + "Dimensions: (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", + "Data variables: (12/179)\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", + " ... ...\n", + " station_timezone (station) object ...\n", + " street_type (station) object ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + "Attributes:\n", + " title: Surface ozone data in the EBAS network in 2018...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.3.3\n", + " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", + " NCO: 4.7.2\n", + " nco_openmp_thread_number: 1" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(obs_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "obs_nes = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", + "obs_nes" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "obs_nes.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "obs_nes.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", + " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", + " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", + " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", + " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", + " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", + " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", + " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", + " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", + " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", + " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", + " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", + " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", + " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", + " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", + " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", + " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", + " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", + " 43.616667 , 47.3 , 46.65 , 45. ,\n", + " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", + " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", + " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", + " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", + " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", + " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", + " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", + " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", + " 78.90715 , 59. , 69.45 , 59.2 ,\n", + " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", + " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", + " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", + " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", + " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", + " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", + " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", + " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", + " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "obs_nes.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", + " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", + " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", + " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", + " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", + " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", + " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", + " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", + " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", + " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", + " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", + " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", + " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", + " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", + " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", + " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", + " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", + " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", + " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", + " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", + " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", + " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", + " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", + " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", + " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", + " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", + " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", + " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", + " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "obs_nes.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading ASTER_v3_altitude var (1/175)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ETOPO1_altitude var (18/175)\n", + "Rank 000: Loaded ETOPO1_altitude var ((168,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((168,))\n", + "Rank 000: Loading GHOST_version var (20/175)\n", + "Rank 000: Loaded GHOST_version var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((168,))\n", + "Rank 000: Loading GHSL_population_density var (32/175)\n", + "Rank 000: Loaded GHSL_population_density var ((168,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_population_density var (38/175)\n", + "Rank 000: Loaded GPW_population_density var ((168,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((168,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((168,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((168,))\n", + "Rank 000: Loading WMO_region var (65/175)\n", + "Rank 000: Loaded WMO_region var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biome var (67/175)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((168,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((168,))\n", + "Rank 000: Loading administrative_country_division_1 var (69/175)\n", + "Rank 000: Loaded administrative_country_division_1 var ((168,))\n", + "Rank 000: Loading administrative_country_division_2 var (70/175)\n", + "Rank 000: Loaded administrative_country_division_2 var ((168,))\n", + "Rank 000: Loading altitude var (71/175)\n", + "Rank 000: Loaded altitude var ((168,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading annual_native_representativity_percent var (73/175)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading area_classification var (74/175)\n", + "Rank 000: Loaded area_classification var ((168,))\n", + "Rank 000: Loading associated_networks var (75/175)\n", + "Rank 000: Loaded associated_networks var ((168,))\n", + "Rank 000: Loading city var (76/175)\n", + "Rank 000: Loaded city var ((168,))\n", + "Rank 000: Loading climatology var (77/175)\n", + "Rank 000: Loaded climatology var ((168,))\n", + "Rank 000: Loading contact_email_address var (78/175)\n", + "Rank 000: Loaded contact_email_address var ((168,))\n", + "Rank 000: Loading contact_institution var (79/175)\n", + "Rank 000: Loaded contact_institution var ((168,))\n", + "Rank 000: Loading contact_name var (80/175)\n", + "Rank 000: Loaded contact_name var ((168,))\n", + "Rank 000: Loading country var (81/175)\n", + "Rank 000: Loaded country var ((168,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading daily_native_representativity_percent var (83/175)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading daily_passing_vehicles var (84/175)\n", + "Rank 000: Loaded daily_passing_vehicles var ((168,))\n", + "Rank 000: Loading data_level var (85/175)\n", + "Rank 000: Loaded data_level var ((168,))\n", + "Rank 000: Loading data_licence var (86/175)\n", + "Rank 000: Loaded data_licence var ((168,))\n", + "Rank 000: Loading day_night_code var (87/175)\n", + "Rank 000: Loaded day_night_code var ((168, 720))\n", + "Rank 000: Loading daytime_traffic_speed var (88/175)\n", + "Rank 000: Loaded daytime_traffic_speed var ((168,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading distance_to_building var (90/175)\n", + "Rank 000: Loaded distance_to_building var ((168,))\n", + "Rank 000: Loading distance_to_junction var (91/175)\n", + "Rank 000: Loaded distance_to_junction var ((168,))\n", + "Rank 000: Loading distance_to_kerb var (92/175)\n", + "Rank 000: Loaded distance_to_kerb var ((168,))\n", + "Rank 000: Loading distance_to_source var (93/175)\n", + "Rank 000: Loaded distance_to_source var ((168,))\n", + "Rank 000: Loading ellipsoid var (94/175)\n", + "Rank 000: Loaded ellipsoid var ((168,))\n", + "Rank 000: Loading horizontal_datum var (95/175)\n", + "Rank 000: Loaded horizontal_datum var ((168,))\n", + "Rank 000: Loading hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Loaded hourly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Loaded hourly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading land_use var (98/175)\n", + "Rank 000: Loaded land_use var ((168,))\n", + "Rank 000: Loading local_time var (99/175)\n", + "Rank 000: Loaded local_time var ((168, 720))\n", + "Rank 000: Loading main_emission_source var (100/175)\n", + "Rank 000: Loaded main_emission_source var ((168,))\n", + "Rank 000: Loading mean_solar_time var (101/175)\n", + "Rank 000: Loaded mean_solar_time var ((168, 720))\n", + "Rank 000: Loading measurement_altitude var (102/175)\n", + "Rank 000: Loaded measurement_altitude var ((168,))\n", + "Rank 000: Loading measurement_methodology var (103/175)\n", + "Rank 000: Loaded measurement_methodology var ((168,))\n", + "Rank 000: Loading measurement_scale var (104/175)\n", + "Rank 000: Loaded measurement_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_further_details var (117/175)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((168,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_name var (120/175)\n", + "Rank 000: Loaded measuring_instrument_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_process_details var (121/175)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((168,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading network var (137/175)\n", + "Rank 000: Loaded network var ((168,))\n", + "Rank 000: Loading network_maintenance_details var (138/175)\n", + "Rank 000: Loaded network_maintenance_details var ((168,))\n", + "Rank 000: Loading network_miscellaneous_details var (139/175)\n", + "Rank 000: Loaded network_miscellaneous_details var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((168,))\n", + "Rank 000: Loading network_qa_details var (142/175)\n", + "Rank 000: Loaded network_qa_details var ((168,))\n", + "Rank 000: Loading network_sampling_details var (143/175)\n", + "Rank 000: Loaded network_sampling_details var ((168,))\n", + "Rank 000: Loading network_uncertainty_details var (144/175)\n", + "Rank 000: Loaded network_uncertainty_details var ((168,))\n", + "Rank 000: Loading population var (145/175)\n", + "Rank 000: Loaded population var ((168,))\n", + "Rank 000: Loading primary_sampling_further_details var (146/175)\n", + "Rank 000: Loaded primary_sampling_further_details var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_process_details var (151/175)\n", + "Rank 000: Loaded primary_sampling_process_details var ((168,))\n", + "Rank 000: Loading primary_sampling_type var (152/175)\n", + "Rank 000: Loaded primary_sampling_type var ((168,))\n", + "Rank 000: Loading principal_investigator_email_address var (153/175)\n", + "Rank 000: Loaded principal_investigator_email_address var ((168,))\n", + "Rank 000: Loading principal_investigator_institution var (154/175)\n", + "Rank 000: Loaded principal_investigator_institution var ((168,))\n", + "Rank 000: Loading principal_investigator_name var (155/175)\n", + "Rank 000: Loaded principal_investigator_name var ((168,))\n", + "Rank 000: Loading process_warnings var (156/175)\n", + "Rank 000: Loaded process_warnings var ((168,))\n", + "Rank 000: Loading projection var (157/175)\n", + "Rank 000: Loaded projection var ((168,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading representative_radius var (159/175)\n", + "Rank 000: Loaded representative_radius var ((168,))\n", + "Rank 000: Loading sample_preparation_further_details var (160/175)\n", + "Rank 000: Loaded sample_preparation_further_details var ((168,))\n", + "Rank 000: Loading sample_preparation_process_details var (161/175)\n", + "Rank 000: Loaded sample_preparation_process_details var ((168,))\n", + "Rank 000: Loading sample_preparation_techniques var (162/175)\n", + "Rank 000: Loaded sample_preparation_techniques var ((168,))\n", + "Rank 000: Loading sample_preparation_types var (163/175)\n", + "Rank 000: Loaded sample_preparation_types var ((168,))\n", + "Rank 000: Loading sampling_height var (164/175)\n", + "Rank 000: Loaded sampling_height var ((168,))\n", + "Rank 000: Loading sconco3 var (165/175)\n", + "Rank 000: Loaded sconco3 var ((168, 720))\n", + "Rank 000: Loading season_code var (166/175)\n", + "Rank 000: Loaded season_code var ((168, 720))\n", + "Rank 000: Loading station_classification var (167/175)\n", + "Rank 000: Loaded station_classification var ((168,))\n", + "Rank 000: Loading station_name var (168/175)\n", + "Rank 000: Loaded station_name var ((168,))\n", + "Rank 000: Loading station_reference var (169/175)\n", + "Rank 000: Loaded station_reference var ((168,))\n", + "Rank 000: Loading station_timezone var (170/175)\n", + "Rank 000: Loaded station_timezone var ((168,))\n", + "Rank 000: Loading street_type var (171/175)\n", + "Rank 000: Loaded street_type var ((168,))\n", + "Rank 000: Loading street_width var (172/175)\n", + "Rank 000: Loaded street_width var ((168,))\n", + "Rank 000: Loading terrain var (173/175)\n", + "Rank 000: Loaded terrain var ((168,))\n", + "Rank 000: Loading vertical_datum var (174/175)\n", + "Rank 000: Loaded vertical_datum var ((168,))\n", + "Rank 000: Loading weekday_weekend_code var (175/175)\n", + "Rank 000: Loaded weekday_weekend_code var ((168, 720))\n" + ] + } + ], + "source": [ + "obs_nes.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating prv_obs_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing ASTER_v3_altitude var (1/175)\n", + "Rank 000: Var ASTER_v3_altitude created (1/175)\n", + "Rank 000: Var ASTER_v3_altitude data (1/175)\n", + "Rank 000: Var ASTER_v3_altitude completed (1/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/175)\n", + "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/175)\n", + "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/175)\n", + "Rank 000: Writing ETOPO1_altitude var (18/175)\n", + "Rank 000: Var ETOPO1_altitude created (18/175)\n", + "Rank 000: Var ETOPO1_altitude data (18/175)\n", + "Rank 000: Var ETOPO1_altitude completed (18/175)\n", + "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/175)\n", + "Rank 000: Writing GHOST_version var (20/175)\n", + "Rank 000: Var GHOST_version created (20/175)\n", + "Rank 000: Var GHOST_version data (20/175)\n", + "Rank 000: Var GHOST_version completed (20/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/175)\n", + "Rank 000: Writing GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km created (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km data (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km completed (23/175)\n", + "Rank 000: Writing GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km created (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km data (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km completed (24/175)\n", + "Rank 000: Writing GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density created (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density data (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density completed (25/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/175)\n", + "Rank 000: Writing GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km created (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km data (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km completed (28/175)\n", + "Rank 000: Writing GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km created (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km data (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km completed (29/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/175)\n", + "Rank 000: Writing GHSL_population_density var (32/175)\n", + "Rank 000: Var GHSL_population_density created (32/175)\n", + "Rank 000: Var GHSL_population_density data (32/175)\n", + "Rank 000: Var GHSL_population_density completed (32/175)\n", + "Rank 000: Writing GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification created (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification data (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification completed (33/175)\n", + "Rank 000: Writing GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km created (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km data (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km completed (34/175)\n", + "Rank 000: Writing GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km created (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km data (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km completed (35/175)\n", + "Rank 000: Writing GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km created (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km data (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km completed (36/175)\n", + "Rank 000: Writing GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km created (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km data (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km completed (37/175)\n", + "Rank 000: Writing GPW_population_density var (38/175)\n", + "Rank 000: Var GPW_population_density created (38/175)\n", + "Rank 000: Var GPW_population_density data (38/175)\n", + "Rank 000: Var GPW_population_density completed (38/175)\n", + "Rank 000: Writing GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity created (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity data (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity completed (39/175)\n", + "Rank 000: Writing Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code created (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code data (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code completed (40/175)\n", + "Rank 000: Writing Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification created (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification data (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification completed (41/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/175)\n", + "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/175)\n", + "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/175)\n", + "Rank 000: Writing UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification created (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification data (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification completed (62/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/175)\n", + "Rank 000: Writing WMO_region var (65/175)\n", + "Rank 000: Var WMO_region created (65/175)\n", + "Rank 000: Var WMO_region data (65/175)\n", + "Rank 000: Var WMO_region completed (65/175)\n", + "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/175)\n", + "Rank 000: Writing WWF_TEOW_biome var (67/175)\n", + "Rank 000: Var WWF_TEOW_biome created (67/175)\n", + "Rank 000: Var WWF_TEOW_biome data (67/175)\n", + "Rank 000: Var WWF_TEOW_biome completed (67/175)\n", + "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/175)\n", + "Rank 000: Writing administrative_country_division_1 var (69/175)\n", + "Rank 000: Var administrative_country_division_1 created (69/175)\n", + "Rank 000: Var administrative_country_division_1 data (69/175)\n", + "Rank 000: Var administrative_country_division_1 completed (69/175)\n", + "Rank 000: Writing administrative_country_division_2 var (70/175)\n", + "Rank 000: Var administrative_country_division_2 created (70/175)\n", + "Rank 000: Var administrative_country_division_2 data (70/175)\n", + "Rank 000: Var administrative_country_division_2 completed (70/175)\n", + "Rank 000: Writing altitude var (71/175)\n", + "Rank 000: Var altitude created (71/175)\n", + "Rank 000: Var altitude data (71/175)\n", + "Rank 000: Var altitude completed (71/175)\n", + "Rank 000: Writing annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent created (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent data (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent completed (72/175)\n", + "Rank 000: Writing annual_native_representativity_percent var (73/175)\n", + "Rank 000: Var annual_native_representativity_percent created (73/175)\n", + "Rank 000: Var annual_native_representativity_percent data (73/175)\n", + "Rank 000: Var annual_native_representativity_percent completed (73/175)\n", + "Rank 000: Writing area_classification var (74/175)\n", + "Rank 000: Var area_classification created (74/175)\n", + "Rank 000: Var area_classification data (74/175)\n", + "Rank 000: Var area_classification completed (74/175)\n", + "Rank 000: Writing associated_networks var (75/175)\n", + "Rank 000: Var associated_networks created (75/175)\n", + "Rank 000: Var associated_networks data (75/175)\n", + "Rank 000: Var associated_networks completed (75/175)\n", + "Rank 000: Writing city var (76/175)\n", + "Rank 000: Var city created (76/175)\n", + "Rank 000: Var city data (76/175)\n", + "Rank 000: Var city completed (76/175)\n", + "Rank 000: Writing climatology var (77/175)\n", + "Rank 000: Var climatology created (77/175)\n", + "Rank 000: Var climatology data (77/175)\n", + "Rank 000: Var climatology completed (77/175)\n", + "Rank 000: Writing contact_email_address var (78/175)\n", + "Rank 000: Var contact_email_address created (78/175)\n", + "Rank 000: Var contact_email_address data (78/175)\n", + "Rank 000: Var contact_email_address completed (78/175)\n", + "Rank 000: Writing contact_institution var (79/175)\n", + "Rank 000: Var contact_institution created (79/175)\n", + "Rank 000: Var contact_institution data (79/175)\n", + "Rank 000: Var contact_institution completed (79/175)\n", + "Rank 000: Writing contact_name var (80/175)\n", + "Rank 000: Var contact_name created (80/175)\n", + "Rank 000: Var contact_name data (80/175)\n", + "Rank 000: Var contact_name completed (80/175)\n", + "Rank 000: Writing country var (81/175)\n", + "Rank 000: Var country created (81/175)\n", + "Rank 000: Var country data (81/175)\n", + "Rank 000: Var country completed (81/175)\n", + "Rank 000: Writing daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/175)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_ghost.py:570: UserWarning: WARNING!!! GHOST datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Var daily_native_max_gap_percent data (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent completed (82/175)\n", + "Rank 000: Writing daily_native_representativity_percent var (83/175)\n", + "Rank 000: Var daily_native_representativity_percent created (83/175)\n", + "Rank 000: Var daily_native_representativity_percent data (83/175)\n", + "Rank 000: Var daily_native_representativity_percent completed (83/175)\n", + "Rank 000: Writing daily_passing_vehicles var (84/175)\n", + "Rank 000: Var daily_passing_vehicles created (84/175)\n", + "Rank 000: Var daily_passing_vehicles data (84/175)\n", + "Rank 000: Var daily_passing_vehicles completed (84/175)\n", + "Rank 000: Writing data_level var (85/175)\n", + "Rank 000: Var data_level created (85/175)\n", + "Rank 000: Var data_level data (85/175)\n", + "Rank 000: Var data_level completed (85/175)\n", + "Rank 000: Writing data_licence var (86/175)\n", + "Rank 000: Var data_licence created (86/175)\n", + "Rank 000: Var data_licence data (86/175)\n", + "Rank 000: Var data_licence completed (86/175)\n", + "Rank 000: Writing day_night_code var (87/175)\n", + "Rank 000: Var day_night_code created (87/175)\n", + "Rank 000: Var day_night_code data (87/175)\n", + "Rank 000: Var day_night_code completed (87/175)\n", + "Rank 000: Writing daytime_traffic_speed var (88/175)\n", + "Rank 000: Var daytime_traffic_speed created (88/175)\n", + "Rank 000: Var daytime_traffic_speed data (88/175)\n", + "Rank 000: Var daytime_traffic_speed completed (88/175)\n", + "Rank 000: Writing derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement created (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement data (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement completed (89/175)\n", + "Rank 000: Writing distance_to_building var (90/175)\n", + "Rank 000: Var distance_to_building created (90/175)\n", + "Rank 000: Var distance_to_building data (90/175)\n", + "Rank 000: Var distance_to_building completed (90/175)\n", + "Rank 000: Writing distance_to_junction var (91/175)\n", + "Rank 000: Var distance_to_junction created (91/175)\n", + "Rank 000: Var distance_to_junction data (91/175)\n", + "Rank 000: Var distance_to_junction completed (91/175)\n", + "Rank 000: Writing distance_to_kerb var (92/175)\n", + "Rank 000: Var distance_to_kerb created (92/175)\n", + "Rank 000: Var distance_to_kerb data (92/175)\n", + "Rank 000: Var distance_to_kerb completed (92/175)\n", + "Rank 000: Writing distance_to_source var (93/175)\n", + "Rank 000: Var distance_to_source created (93/175)\n", + "Rank 000: Var distance_to_source data (93/175)\n", + "Rank 000: Var distance_to_source completed (93/175)\n", + "Rank 000: Writing ellipsoid var (94/175)\n", + "Rank 000: Var ellipsoid created (94/175)\n", + "Rank 000: Var ellipsoid data (94/175)\n", + "Rank 000: Var ellipsoid completed (94/175)\n", + "Rank 000: Writing horizontal_datum var (95/175)\n", + "Rank 000: Var horizontal_datum created (95/175)\n", + "Rank 000: Var horizontal_datum data (95/175)\n", + "Rank 000: Var horizontal_datum completed (95/175)\n", + "Rank 000: Writing hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent created (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent data (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent completed (96/175)\n", + "Rank 000: Writing hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent created (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent data (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent completed (97/175)\n", + "Rank 000: Writing land_use var (98/175)\n", + "Rank 000: Var land_use created (98/175)\n", + "Rank 000: Var land_use data (98/175)\n", + "Rank 000: Var land_use completed (98/175)\n", + "Rank 000: Writing local_time var (99/175)\n", + "Rank 000: Var local_time created (99/175)\n", + "Rank 000: Var local_time data (99/175)\n", + "Rank 000: Var local_time completed (99/175)\n", + "Rank 000: Writing main_emission_source var (100/175)\n", + "Rank 000: Var main_emission_source created (100/175)\n", + "Rank 000: Var main_emission_source data (100/175)\n", + "Rank 000: Var main_emission_source completed (100/175)\n", + "Rank 000: Writing mean_solar_time var (101/175)\n", + "Rank 000: Var mean_solar_time created (101/175)\n", + "Rank 000: Var mean_solar_time data (101/175)\n", + "Rank 000: Var mean_solar_time completed (101/175)\n", + "Rank 000: Writing measurement_altitude var (102/175)\n", + "Rank 000: Var measurement_altitude created (102/175)\n", + "Rank 000: Var measurement_altitude data (102/175)\n", + "Rank 000: Var measurement_altitude completed (102/175)\n", + "Rank 000: Writing measurement_methodology var (103/175)\n", + "Rank 000: Var measurement_methodology created (103/175)\n", + "Rank 000: Var measurement_methodology data (103/175)\n", + "Rank 000: Var measurement_methodology completed (103/175)\n", + "Rank 000: Writing measurement_scale var (104/175)\n", + "Rank 000: Var measurement_scale created (104/175)\n", + "Rank 000: Var measurement_scale data (104/175)\n", + "Rank 000: Var measurement_scale completed (104/175)\n", + "Rank 000: Writing measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale created (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale data (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale completed (105/175)\n", + "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (106/175)\n", + "Rank 000: Writing measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy created (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy data (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy completed (107/175)\n", + "Rank 000: Writing measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate created (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate data (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate completed (108/175)\n", + "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (109/175)\n", + "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution created (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution data (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (110/175)\n", + "Rank 000: Writing measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision created (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision data (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision completed (111/175)\n", + "Rank 000: Writing measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift created (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift data (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift completed (112/175)\n", + "Rank 000: Writing measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty created (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty data (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty completed (113/175)\n", + "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (114/175)\n", + "Rank 000: Writing measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift created (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift data (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift completed (115/175)\n", + "Rank 000: Writing measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift created (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift data (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift completed (116/175)\n", + "Rank 000: Writing measuring_instrument_further_details var (117/175)\n", + "Rank 000: Var measuring_instrument_further_details created (117/175)\n", + "Rank 000: Var measuring_instrument_further_details data (117/175)\n", + "Rank 000: Var measuring_instrument_further_details completed (117/175)\n", + "Rank 000: Writing measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information created (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information data (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information completed (118/175)\n", + "Rank 000: Writing measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name created (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name data (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name completed (119/175)\n", + "Rank 000: Writing measuring_instrument_name var (120/175)\n", + "Rank 000: Var measuring_instrument_name created (120/175)\n", + "Rank 000: Var measuring_instrument_name data (120/175)\n", + "Rank 000: Var measuring_instrument_name completed (120/175)\n", + "Rank 000: Writing measuring_instrument_process_details var (121/175)\n", + "Rank 000: Var measuring_instrument_process_details created (121/175)\n", + "Rank 000: Var measuring_instrument_process_details data (121/175)\n", + "Rank 000: Var measuring_instrument_process_details completed (121/175)\n", + "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (122/175)\n", + "Rank 000: Writing measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy created (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy data (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy completed (123/175)\n", + "Rank 000: Writing measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate created (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate data (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate completed (124/175)\n", + "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (125/175)\n", + "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution created (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution data (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (126/175)\n", + "Rank 000: Writing measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision created (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision data (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision completed (127/175)\n", + "Rank 000: Writing measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift created (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift data (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift completed (128/175)\n", + "Rank 000: Writing measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty created (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty data (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty completed (129/175)\n", + "Rank 000: Writing measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units created (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units data (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units completed (130/175)\n", + "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (131/175)\n", + "Rank 000: Writing measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift created (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift data (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift completed (132/175)\n", + "Rank 000: Writing measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift created (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift data (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift completed (133/175)\n", + "Rank 000: Writing measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type created (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type data (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type completed (134/175)\n", + "Rank 000: Writing monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent created (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent data (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent completed (135/175)\n", + "Rank 000: Writing monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent created (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent data (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent completed (136/175)\n", + "Rank 000: Writing network var (137/175)\n", + "Rank 000: Var network created (137/175)\n", + "Rank 000: Var network data (137/175)\n", + "Rank 000: Var network completed (137/175)\n", + "Rank 000: Writing network_maintenance_details var (138/175)\n", + "Rank 000: Var network_maintenance_details created (138/175)\n", + "Rank 000: Var network_maintenance_details data (138/175)\n", + "Rank 000: Var network_maintenance_details completed (138/175)\n", + "Rank 000: Writing network_miscellaneous_details var (139/175)\n", + "Rank 000: Var network_miscellaneous_details created (139/175)\n", + "Rank 000: Var network_miscellaneous_details data (139/175)\n", + "Rank 000: Var network_miscellaneous_details completed (139/175)\n", + "Rank 000: Writing network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure created (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure data (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure completed (140/175)\n", + "Rank 000: Writing network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature created (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature data (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature completed (141/175)\n", + "Rank 000: Writing network_qa_details var (142/175)\n", + "Rank 000: Var network_qa_details created (142/175)\n", + "Rank 000: Var network_qa_details data (142/175)\n", + "Rank 000: Var network_qa_details completed (142/175)\n", + "Rank 000: Writing network_sampling_details var (143/175)\n", + "Rank 000: Var network_sampling_details created (143/175)\n", + "Rank 000: Var network_sampling_details data (143/175)\n", + "Rank 000: Var network_sampling_details completed (143/175)\n", + "Rank 000: Writing network_uncertainty_details var (144/175)\n", + "Rank 000: Var network_uncertainty_details created (144/175)\n", + "Rank 000: Var network_uncertainty_details data (144/175)\n", + "Rank 000: Var network_uncertainty_details completed (144/175)\n", + "Rank 000: Writing population var (145/175)\n", + "Rank 000: Var population created (145/175)\n", + "Rank 000: Var population data (145/175)\n", + "Rank 000: Var population completed (145/175)\n", + "Rank 000: Writing primary_sampling_further_details var (146/175)\n", + "Rank 000: Var primary_sampling_further_details created (146/175)\n", + "Rank 000: Var primary_sampling_further_details data (146/175)\n", + "Rank 000: Var primary_sampling_further_details completed (146/175)\n", + "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (147/175)\n", + "Rank 000: Writing primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name created (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name data (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name completed (148/175)\n", + "Rank 000: Writing primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name created (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name data (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name completed (149/175)\n", + "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (150/175)\n", + "Rank 000: Writing primary_sampling_process_details var (151/175)\n", + "Rank 000: Var primary_sampling_process_details created (151/175)\n", + "Rank 000: Var primary_sampling_process_details data (151/175)\n", + "Rank 000: Var primary_sampling_process_details completed (151/175)\n", + "Rank 000: Writing primary_sampling_type var (152/175)\n", + "Rank 000: Var primary_sampling_type created (152/175)\n", + "Rank 000: Var primary_sampling_type data (152/175)\n", + "Rank 000: Var primary_sampling_type completed (152/175)\n", + "Rank 000: Writing principal_investigator_email_address var (153/175)\n", + "Rank 000: Var principal_investigator_email_address created (153/175)\n", + "Rank 000: Var principal_investigator_email_address data (153/175)\n", + "Rank 000: Var principal_investigator_email_address completed (153/175)\n", + "Rank 000: Writing principal_investigator_institution var (154/175)\n", + "Rank 000: Var principal_investigator_institution created (154/175)\n", + "Rank 000: Var principal_investigator_institution data (154/175)\n", + "Rank 000: Var principal_investigator_institution completed (154/175)\n", + "Rank 000: Writing principal_investigator_name var (155/175)\n", + "Rank 000: Var principal_investigator_name created (155/175)\n", + "Rank 000: Var principal_investigator_name data (155/175)\n", + "Rank 000: Var principal_investigator_name completed (155/175)\n", + "Rank 000: Writing process_warnings var (156/175)\n", + "Rank 000: Var process_warnings created (156/175)\n", + "Rank 000: Var process_warnings data (156/175)\n", + "Rank 000: Var process_warnings completed (156/175)\n", + "Rank 000: Writing projection var (157/175)\n", + "Rank 000: Var projection created (157/175)\n", + "Rank 000: Var projection data (157/175)\n", + "Rank 000: Var projection completed (157/175)\n", + "Rank 000: Writing reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement created (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement data (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement completed (158/175)\n", + "Rank 000: Writing representative_radius var (159/175)\n", + "Rank 000: Var representative_radius created (159/175)\n", + "Rank 000: Var representative_radius data (159/175)\n", + "Rank 000: Var representative_radius completed (159/175)\n", + "Rank 000: Writing sample_preparation_further_details var (160/175)\n", + "Rank 000: Var sample_preparation_further_details created (160/175)\n", + "Rank 000: Var sample_preparation_further_details data (160/175)\n", + "Rank 000: Var sample_preparation_further_details completed (160/175)\n", + "Rank 000: Writing sample_preparation_process_details var (161/175)\n", + "Rank 000: Var sample_preparation_process_details created (161/175)\n", + "Rank 000: Var sample_preparation_process_details data (161/175)\n", + "Rank 000: Var sample_preparation_process_details completed (161/175)\n", + "Rank 000: Writing sample_preparation_techniques var (162/175)\n", + "Rank 000: Var sample_preparation_techniques created (162/175)\n", + "Rank 000: Var sample_preparation_techniques data (162/175)\n", + "Rank 000: Var sample_preparation_techniques completed (162/175)\n", + "Rank 000: Writing sample_preparation_types var (163/175)\n", + "Rank 000: Var sample_preparation_types created (163/175)\n", + "Rank 000: Var sample_preparation_types data (163/175)\n", + "Rank 000: Var sample_preparation_types completed (163/175)\n", + "Rank 000: Writing sampling_height var (164/175)\n", + "Rank 000: Var sampling_height created (164/175)\n", + "Rank 000: Var sampling_height data (164/175)\n", + "Rank 000: Var sampling_height completed (164/175)\n", + "Rank 000: Writing sconco3 var (165/175)\n", + "Rank 000: Var sconco3 created (165/175)\n", + "Rank 000: Var sconco3 data (165/175)\n", + "Rank 000: Var sconco3 completed (165/175)\n", + "Rank 000: Writing season_code var (166/175)\n", + "Rank 000: Var season_code created (166/175)\n", + "Rank 000: Var season_code data (166/175)\n", + "Rank 000: Var season_code completed (166/175)\n", + "Rank 000: Writing station_classification var (167/175)\n", + "Rank 000: Var station_classification created (167/175)\n", + "Rank 000: Var station_classification data (167/175)\n", + "Rank 000: Var station_classification completed (167/175)\n", + "Rank 000: Writing station_name var (168/175)\n", + "Rank 000: Var station_name created (168/175)\n", + "Rank 000: Var station_name data (168/175)\n", + "Rank 000: Var station_name completed (168/175)\n", + "Rank 000: Writing station_reference var (169/175)\n", + "Rank 000: Var station_reference created (169/175)\n", + "Rank 000: Var station_reference data (169/175)\n", + "Rank 000: Var station_reference completed (169/175)\n", + "Rank 000: Writing station_timezone var (170/175)\n", + "Rank 000: Var station_timezone created (170/175)\n", + "Rank 000: Var station_timezone data (170/175)\n", + "Rank 000: Var station_timezone completed (170/175)\n", + "Rank 000: Writing street_type var (171/175)\n", + "Rank 000: Var street_type created (171/175)\n", + "Rank 000: Var street_type data (171/175)\n", + "Rank 000: Var street_type completed (171/175)\n", + "Rank 000: Writing street_width var (172/175)\n", + "Rank 000: Var street_width created (172/175)\n", + "Rank 000: Var street_width data (172/175)\n", + "Rank 000: Var street_width completed (172/175)\n", + "Rank 000: Writing terrain var (173/175)\n", + "Rank 000: Var terrain created (173/175)\n", + "Rank 000: Var terrain data (173/175)\n", + "Rank 000: Var terrain completed (173/175)\n", + "Rank 000: Writing vertical_datum var (174/175)\n", + "Rank 000: Var vertical_datum created (174/175)\n", + "Rank 000: Var vertical_datum data (174/175)\n", + "Rank 000: Var vertical_datum completed (174/175)\n", + "Rank 000: Writing weekday_weekend_code var (175/175)\n", + "Rank 000: Var weekday_weekend_code created (175/175)\n", + "Rank 000: Var weekday_weekend_code data (175/175)\n", + "Rank 000: Var weekday_weekend_code completed (175/175)\n" + ] + } + ], + "source": [ + "obs_nes.to_netcdf('prv_obs_file.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                                                           (time: 720, station: 168, N_flag_codes: 186, N_qa_codes: 77)\n",
    +       "Coordinates:\n",
    +       "  * time                                                              (time) datetime64[ns] ...\n",
    +       "  * station                                                           (station) float64 ...\n",
    +       "Dimensions without coordinates: N_flag_codes, N_qa_codes\n",
    +       "Data variables: (12/179)\n",
    +       "    latitude                                                          (station) float64 ...\n",
    +       "    longitude                                                         (station) float64 ...\n",
    +       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
    +       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
    +       "    ...                                                                ...\n",
    +       "    street_width                                                      (station) float32 ...\n",
    +       "    terrain                                                           (station) object ...\n",
    +       "    vertical_datum                                                    (station) object ...\n",
    +       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
    +       "    flag                                                              (station, time, N_flag_codes) int64 ...\n",
    +       "    qa                                                                (station, time, N_qa_codes) int64 ...\n",
    +       "Attributes:\n",
    +       "    title:                     Surface ozone data in the EBAS network in 2018...\n",
    +       "    institution:               Barcelona Supercomputing Center\n",
    +       "    source:                    Surface observations\n",
    +       "    creator_name:              Dene R. Bowdalo\n",
    +       "    creator_email:             dene.bowdalo@bsc.es\n",
    +       "    conventions:               CF-1.7\n",
    +       "    data_version:              1.3.3\n",
    +       "    history:                   Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n",
    +       "    NCO:                       4.7.2\n",
    +       "    nco_openmp_thread_number:  1\n",
    +       "    Conventions:               CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 720, station: 168, N_flag_codes: 186, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + " * station (station) float64 ...\n", + "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", + "Data variables: (12/179)\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " ... ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", + "Attributes:\n", + " title: Surface ozone data in the EBAS network in 2018...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.3.3\n", + " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", + " NCO: 4.7.2\n", + " nco_openmp_thread_number: 1\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('prv_obs_file.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Experiments" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "### 2.1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    +       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
    +       "Data variables:\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment cams61_chimere_ph2\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            4.7.2
    " + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes = open_netcdf(path=exp_path, info=True, parallel_method='X')\n", + "exp_interp_nes" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "exp_interp_nes.time" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, -22.10333333, -31.66861111,\n", + " 47.76666641, 46.677778 , 48.721111 , 47.529167 ,\n", + " 47.05407 , 46.693611 , 47.348056 , 47.973056 ,\n", + " 48.878611 , 48.106111 , 48.371111 , 48.334722 ,\n", + " 48.050833 , 47.838611 , 47.040277 , 47.06694444,\n", + " 49.877778 , 50.629421 , 50.503333 , 41.695833 ,\n", + " 32.27000046, 80.05000305, 46.5475 , 46.813056 ,\n", + " 47.479722 , 47.049722 , 47.0675 , 47.18961391,\n", + " -30.17254 , 16.86403 , 35.0381 , 49.73508444,\n", + " 49.573394 , 49.066667 , 54.925556 , 52.802222 ,\n", + " 47.914722 , 53.166667 , 50.65 , 54.4368 ,\n", + " 47.80149841, 47.4165 , -70.666 , 54.746495 ,\n", + " 81.6 , 55.693588 , 72.58000183, 56.290424 ,\n", + " 59.5 , 58.383333 , 39.54694 , 42.72056 ,\n", + " 39.87528 , 37.23722 , 43.43917 , 41.27417 ,\n", + " 42.31917 , 38.47278 , 39.08278 , 41.23889 ,\n", + " 41.39389 , 42.63472 , 37.05194 , 28.309 ,\n", + " 59.779167 , 60.53002 , 66.320278 , 67.97333333,\n", + " 48.5 , 49.9 , 47.266667 , 43.616667 ,\n", + " 47.3 , 46.65 , 45. , 45.8 ,\n", + " 48.633333 , 42.936667 , 48.70861111, 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , -75.62 , 51.149617 ,\n", + " 38.366667 , 35.316667 , 46.966667 , 46.91 ,\n", + " -0.20194 , 51.939722 , 53.32583 , 45.8 ,\n", + " 44.183333 , 37.571111 , 35.5182 , 42.805462 ,\n", + " -69.005 , 39.0319 , 24.2883 , 24.466941 ,\n", + " 36.53833389, 33.293917 , 55.37611111, 56.161944 ,\n", + " 57.135278 , 41.536111 , 36.0722 , 52.083333 ,\n", + " 53.333889 , 51.541111 , 52.3 , 51.974444 ,\n", + " 58.38853 , 65.833333 , 62.783333 , 78.90715 ,\n", + " 59. , 69.45 , 59.2 , 60.372386 ,\n", + " -72.0117 , 59.2 , -41.40819168, -77.83200073,\n", + " -45.0379982 , 51.814408 , 50.736444 , 54.753894 ,\n", + " 54.15 , 43.4 , 71.58616638, 63.85 ,\n", + " 67.883333 , 57.394 , 57.1645 , 57.9525 ,\n", + " 56.0429 , 60.0858 , 57.816667 , 64.25 ,\n", + " 59.728 , 45.566667 , 46.428611 , 46.299444 ,\n", + " 48.933333 , 49.15 , 49.05 , 47.96 ,\n", + " 71.32301331, 40.12498 , 19.53623009, -89.99694824,\n", + " 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'long_name': 'latitude',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", + " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", + " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", + " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", + " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", + " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", + " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", + " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", + " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", + " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", + " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", + " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", + " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", + " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", + " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", + " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", + " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", + " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", + " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", + " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", + " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", + " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", + " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", + " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", + " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", + " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", + " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", + " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", + " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", + " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", + " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", + " 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'long_name': 'longitude',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp_interp_nes.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading grid_edge_latitude var (1/6)\n", + "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", + "Rank 000: Loading grid_edge_longitude var (2/6)\n", + "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", + "Rank 000: Loading model_centre_latitude var (3/6)\n", + "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", + "Rank 000: Loading model_centre_longitude var (4/6)\n", + "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", + "Rank 000: Loading sconco3 var (5/6)\n", + "Rank 000: Loaded sconco3 var ((175, 720))\n", + "Rank 000: Loading station_reference var (6/6)\n", + "Rank 000: Loaded station_reference var ((175,))\n" + ] + } + ], + "source": [ + "exp_interp_nes.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating prv_exp_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing sconco3 var (1/2)\n", + "Rank 000: Var sconco3 created (1/2)\n", + "Rank 000: Var sconco3 data (1/2)\n", + "Rank 000: Var sconco3 completed (1/2)\n", + "Rank 000: Writing station_reference var (2/2)\n", + "Rank 000: Var station_reference created (2/2)\n", + "Rank 000: Var station_reference data (2/2)\n", + "Rank 000: Var station_reference completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:587: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "exp_interp_nes.to_netcdf('prv_exp_file.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    +       "  * station                 (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
    +       "Data variables:\n",
    +       "    lat                     (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    lon                     (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    +       "    sconco3                 (station, time) float32 ...\n",
    +       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
    +       "Attributes:\n",
    +       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
    +       "    institution:    Barcelona Supercomputing Center\n",
    +       "    source:         Experiment cams61_chimere_ph2\n",
    +       "    creator_name:   Dene R. Bowdalo\n",
    +       "    creator_email:  dene.bowdalo@bsc.es\n",
    +       "    conventions:    CF-1.7\n",
    +       "    data_version:   1.0\n",
    +       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
    +       "    NCO:            4.7.2\n",
    +       "    Conventions:    CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 720, station: 175, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + " * station (station) float64 0.0 1.0 2.0 ... 172.0 173.0 174.0\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('prv_exp_file.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/2.Creation/2.1.Create_Regular.ipynb b/tutorials/2.Creation/2.1.Create_Regular.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..f2d3455856d85d686d6f9af1d17ae7136f5dfa16 --- /dev/null +++ b/tutorials/2.Creation/2.1.Create_Regular.ipynb @@ -0,0 +1,472 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create regular grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.1\n", + "inc_lon = 0.1\n", + "n_lat = 10\n", + "n_lon = 10\n", + "regular_grid = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating regular_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "regular_grid.to_netcdf('regular_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:  (time: 1, lev: 1, lat: 10, lon: 10)\n",
    +       "Coordinates:\n",
    +       "  * time     (time) datetime64[ns] 1996-12-31\n",
    +       "  * lev      (lev) float64 0.0\n",
    +       "  * lat      (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n",
    +       "  * lon      (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n",
    +       "Data variables:\n",
    +       "    crs      |S1 b''\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, lat: 10, lon: 10)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * lat (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n", + " * lon (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n", + "Data variables:\n", + " crs |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('regular_grid.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/2.Creation/2.2.Create_Rotated.ipynb b/tutorials/2.Creation/2.2.Create_Rotated.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5493bd5cd55484a20f4a78cd6f9c038515d2fb4d --- /dev/null +++ b/tutorials/2.Creation/2.2.Create_Rotated.ipynb @@ -0,0 +1,495 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create rotated grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "centre_lat = 51\n", + "centre_lon = 10\n", + "west_boundary = -35\n", + "south_boundary = -27\n", + "inc_rlat = 0.2\n", + "inc_rlon = 0.2\n", + "rotated_grid = create_nes(comm=None, info=False, projection='rotated',\n", + " centre_lat=centre_lat, centre_lon=centre_lon,\n", + " west_boundary=west_boundary, south_boundary=south_boundary,\n", + " inc_rlat=inc_rlat, inc_rlon=inc_rlon)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating rotated_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "rotated_grid.to_netcdf('rotated_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 1, lev: 1, rlat: 271, rlon: 351)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 1996-12-31\n",
    +       "  * lev           (lev) float64 0.0\n",
    +       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
    +       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
    +       "Data variables:\n",
    +       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
    +       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
    +       "    rotated_pole  |S1 b''\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Data variables:\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " rotated_pole |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('rotated_grid.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/2.Creation/2.3.Create-Points.ipynb b/tutorials/2.Creation/2.3.Create-Points.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e55ef7abbc0c5a0723abbddb4f500e1c2acce92f --- /dev/null +++ b/tutorials/2.Creation/2.3.Create-Points.ipynb @@ -0,0 +1,1845 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create points grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import pandas as pd\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example 1" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    station.codelatlonstandardised_network_provided_area_classification
    0ES0266A41.3793222.086140urban-centre
    1ES0392A41.7277041.838531urban-suburban
    2ES0395A41.5678242.014598urban-centre
    3ES0559A41.3874242.164918urban-centre
    4ES0567A41.3849062.119574urban-centre
    ...............
    129ES2087A41.9292832.257302NaN
    130ES2091A40.5799000.553500NaN
    131ES2088A41.7710602.250647NaN
    132ES1908A41.2390691.856564NaN
    133ES9994A42.3583631.459455NaN
    \n", + "

    134 rows × 4 columns

    \n", + "
    " + ], + "text/plain": [ + " station.code lat lon \\\n", + "0 ES0266A 41.379322 2.086140 \n", + "1 ES0392A 41.727704 1.838531 \n", + "2 ES0395A 41.567824 2.014598 \n", + "3 ES0559A 41.387424 2.164918 \n", + "4 ES0567A 41.384906 2.119574 \n", + ".. ... ... ... \n", + "129 ES2087A 41.929283 2.257302 \n", + "130 ES2091A 40.579900 0.553500 \n", + "131 ES2088A 41.771060 2.250647 \n", + "132 ES1908A 41.239069 1.856564 \n", + "133 ES9994A 42.358363 1.459455 \n", + "\n", + " standardised_network_provided_area_classification \n", + "0 urban-centre \n", + "1 urban-suburban \n", + "2 urban-centre \n", + "3 urban-centre \n", + "4 urban-centre \n", + ".. ... \n", + "129 NaN \n", + "130 NaN \n", + "131 NaN \n", + "132 NaN \n", + "133 NaN \n", + "\n", + "[134 rows x 4 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/XVPCA_info.csv'\n", + "df = pd.read_csv(file_path)\n", + "df" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "lat = df['lat']\n", + "lon = df['lon']" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "metadata = {'station_code': {'data': df['station.code'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'area_classification': {'data': df['standardised_network_provided_area_classification'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str}}" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_code var (1/2)\n", + "Rank 000: Var station_code created (1/2)\n", + "Rank 000: Var station_code data (1/2)\n", + "Rank 000: Var station_code completed (1/2)\n", + "Rank 000: Writing area_classification var (2/2)\n", + "Rank 000: Var area_classification created (2/2)\n", + "Rank 000: Var area_classification data (2/2)\n", + "Rank 000: Var area_classification completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/gpfs/projects/bsc32/software/suselinux/11/software/NES/0.9.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/NES/0.9.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_1.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:              (time: 1, station: 134)\n",
    +       "Coordinates:\n",
    +       "  * time                 (time) datetime64[ns] 1996-12-31\n",
    +       "  * station              (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n",
    +       "Data variables:\n",
    +       "    station_code         (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n",
    +       "    area_classification  (station) object 'urban-centre' ... 'nan'\n",
    +       "    lat                  (station) float64 41.38 41.73 41.57 ... 41.24 42.36\n",
    +       "    lon                  (station) float64 2.086 1.839 2.015 ... 1.857 1.459\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, station: 134)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", + "Data variables:\n", + " station_code (station) object ...\n", + " area_classification (station) object ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example 2" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    EstacióPM10 Barcelona (Eixample)PM10 Badalona (guàrdia urbana)PM10 Badalona (Assamblea de Catalunya)PM10 Barcelona (Pl. de la Universitat)PM10 Barcelona (Poblenou)PM10 Barcelona (Zona Universitària)PM10 Barcelona (el Port Vell)PM10 Barcelona (IES Goya)PM10 Barcelona (IES Verdaguer)...PM10 Constantí (Gaudí)PM10 Vila-seca (RENFE)PM10 Sitges (Vallcarca-oficines)PM10 Sant Vicenç dels Horts (Àlaba)PM10 Montsec (OAM)PM10 Montseny (la Castanya)PM10 Caldes de Montbui (Ajuntament)PM10 Sant Feliu de Llobregat (Eugeni d'Ors)PM 10 La Seu d'Urgell (CC Les Monges)PM10 Vic (Centre Cívic Santa Anna)
    0Codi europeuES1438AES1928AES2027AES0559AES0691AES0567AES1870AES1852AES1900A...ES1123AES1117AES2033AES2011AES1982AES1778AES1680AES1362AES9994AES1874A
    1DiaValor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)...Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)
    22017-01-01 00:00:0019.6NaN2020.225.616.529NaN23.8...12.97NaN1122.499.5002997.936455NaNNaNNaNNaN
    32017-01-02 00:00:0027.220.862331.63522.82817.232.4...NaN25.382625.391.8296189.7870043222.06NaNNaN
    42017-01-03 00:00:0035.7NaN323736.230.931NaN35.8...21.836.494830.658.09460716.978294335.84NaNNaN
    ..................................................................
    3622017-12-27 00:00:0017.57.591016.91413.121NaN20.8...1222.95NaNNaN13.066751NaN10.3NaNNaN
    3632017-12-28 00:00:0017NaN1417.915NaN1314.516...NaN6.5NaN9.97613.351872NaN26.81NaNNaN
    3642017-12-29 00:00:0024.6212423.225.815.321NaN25.9...8.869.56NaN23.7614.219732NaN14.09NaNNaN
    3652017-12-30 00:00:0027.4NaN1522.316.611.21610.718.8...NaNNaNNaN19.041.0911874.713029NaNNaNNaNNaN
    3662017-12-31 00:00:0017.312.51316.317.69.914NaN17.4...12.77NaNNaN15.232.156595.024302NaNNaNNaNNaN
    \n", + "

    367 rows × 84 columns

    \n", + "
    " + ], + "text/plain": [ + " Estació PM10 Barcelona (Eixample) \\\n", + "0 Codi europeu ES1438A \n", + "1 Dia Valor (µg/m3) \n", + "2 2017-01-01 00:00:00 19.6 \n", + "3 2017-01-02 00:00:00 27.2 \n", + "4 2017-01-03 00:00:00 35.7 \n", + ".. ... ... \n", + "362 2017-12-27 00:00:00 17.5 \n", + "363 2017-12-28 00:00:00 17 \n", + "364 2017-12-29 00:00:00 24.6 \n", + "365 2017-12-30 00:00:00 27.4 \n", + "366 2017-12-31 00:00:00 17.3 \n", + "\n", + " PM10 Badalona (guàrdia urbana) PM10 Badalona (Assamblea de Catalunya) \\\n", + "0 ES1928A ES2027A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN 20 \n", + "3 20.86 23 \n", + "4 NaN 32 \n", + ".. ... ... \n", + "362 7.59 10 \n", + "363 NaN 14 \n", + "364 21 24 \n", + "365 NaN 15 \n", + "366 12.5 13 \n", + "\n", + " PM10 Barcelona (Pl. de la Universitat) PM10 Barcelona (Poblenou) \\\n", + "0 ES0559A ES0691A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 20.2 25.6 \n", + "3 31.6 35 \n", + "4 37 36.2 \n", + ".. ... ... \n", + "362 16.9 14 \n", + "363 17.9 15 \n", + "364 23.2 25.8 \n", + "365 22.3 16.6 \n", + "366 16.3 17.6 \n", + "\n", + " PM10 Barcelona (Zona Universitària) PM10 Barcelona (el Port Vell) \\\n", + "0 ES0567A ES1870A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 16.5 29 \n", + "3 22.8 28 \n", + "4 30.9 31 \n", + ".. ... ... \n", + "362 13.1 21 \n", + "363 NaN 13 \n", + "364 15.3 21 \n", + "365 11.2 16 \n", + "366 9.9 14 \n", + "\n", + " PM10 Barcelona (IES Goya) PM10 Barcelona (IES Verdaguer) ... \\\n", + "0 ES1852A ES1900A ... \n", + "1 Valor (µg/m3) Valor (µg/m3) ... \n", + "2 NaN 23.8 ... \n", + "3 17.2 32.4 ... \n", + "4 NaN 35.8 ... \n", + ".. ... ... ... \n", + "362 NaN 20.8 ... \n", + "363 14.5 16 ... \n", + "364 NaN 25.9 ... \n", + "365 10.7 18.8 ... \n", + "366 NaN 17.4 ... \n", + "\n", + " PM10 Constantí (Gaudí) PM10 Vila-seca (RENFE) \\\n", + "0 ES1123A ES1117A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 12.97 NaN \n", + "3 NaN 25.38 \n", + "4 21.8 36.49 \n", + ".. ... ... \n", + "362 12 22.95 \n", + "363 NaN 6.5 \n", + "364 8.86 9.56 \n", + "365 NaN NaN \n", + "366 12.77 NaN \n", + "\n", + " PM10 Sitges (Vallcarca-oficines) PM10 Sant Vicenç dels Horts (Àlaba) \\\n", + "0 ES2033A ES2011A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 11 22.49 \n", + "3 26 25.39 \n", + "4 48 30.65 \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN 9.976 \n", + "364 NaN 23.76 \n", + "365 NaN 19.04 \n", + "366 NaN 15.23 \n", + "\n", + " PM10 Montsec (OAM) PM10 Montseny (la Castanya) \\\n", + "0 ES1982A ES1778A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 9.500299 7.936455 \n", + "3 1.829618 9.787004 \n", + "4 8.094607 16.97829 \n", + ".. ... ... \n", + "362 1 3.066751 \n", + "363 1 3.351872 \n", + "364 1 4.219732 \n", + "365 1.091187 4.713029 \n", + "366 2.15659 5.024302 \n", + "\n", + " PM10 Caldes de Montbui (Ajuntament) \\\n", + "0 ES1680A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 32 \n", + "4 43 \n", + ".. ... \n", + "362 NaN \n", + "363 NaN \n", + "364 NaN \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM10 Sant Feliu de Llobregat (Eugeni d'Ors) \\\n", + "0 ES1362A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 22.06 \n", + "4 35.84 \n", + ".. ... \n", + "362 10.3 \n", + "363 26.81 \n", + "364 14.09 \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM 10 La Seu d'Urgell (CC Les Monges) PM10 Vic (Centre Cívic Santa Anna) \n", + "0 ES9994A ES1874A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN NaN \n", + "3 NaN NaN \n", + "4 NaN NaN \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN NaN \n", + "364 NaN NaN \n", + "365 NaN NaN \n", + "366 NaN NaN \n", + "\n", + "[367 rows x 84 columns]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path_2 = '/esarchive/scratch/avilanova/software/NES/tutorials/data/Dades_2017.xlsx'\n", + "df_2 = pd.read_excel(file_path_2)\n", + "df_2" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "times = df_2['Estació'].iloc[2:]\n", + "lat = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", + "lon = np.full(len(df_2.iloc[0].values[1:]), np.nan)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "metadata = {'station_name': {'data': df_2.columns.str.replace('PM10 ', '').str.replace('PM 10 ', '').to_numpy()[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'station_code': {'data': df_2.iloc[0].values[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'pm10': {'data': df_2.iloc[2:, 1:].to_numpy().T,\n", + " 'dimensions': ('station', 'time',),\n", + " 'dtype': float}}" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_name var (1/3)\n", + "Rank 000: Var station_name created (1/3)\n", + "Rank 000: Var station_name data (1/3)\n", + "Rank 000: Var station_name completed (1/3)\n", + "Rank 000: Writing station_code var (2/3)\n", + "Rank 000: Var station_code created (2/3)\n", + "Rank 000: Var station_code data (2/3)\n", + "Rank 000: Var station_code completed (2/3)\n", + "Rank 000: Writing pm10 var (3/3)\n", + "Rank 000: Var pm10 created (3/3)\n", + "Rank 000: Var pm10 data (3/3)\n", + "Rank 000: Var pm10 completed (3/3)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/gpfs/projects/bsc32/software/suselinux/11/software/NES/0.9.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/NES/0.9.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/NES/0.9.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_2.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 365, station: 83)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n",
    +       "  * station       (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n",
    +       "Data variables:\n",
    +       "    station_name  (station) object 'Barcelona (Eixample)' ... 'Vic (Centre Cí...\n",
    +       "    station_code  (station) object 'ES1438A' 'ES1928A' ... 'ES9994A' 'ES1874A'\n",
    +       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
    +       "    lat           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
    +       "    lon           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 365, station: 83)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " station_code (station) object ...\n", + " pm10 (station, time) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_2.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/2.1-create_nes_port.ipynb b/tutorials/2.Creation/2.4.Create_Points_Port_Barcelona.ipynb similarity index 73% rename from Jupyter_notebooks/2.1-create_nes_port.ipynb rename to tutorials/2.Creation/2.4.Create_Points_Port_Barcelona.ipynb index e66b4f8812f1c0195b9adfe46cb635773e2e1e52..2e7020e0ed430fca8de3de6a809a289ec3e8286d 100644 --- a/Jupyter_notebooks/2.1-create_nes_port.ipynb +++ b/tutorials/2.Creation/2.4.Create_Points_Port_Barcelona.ipynb @@ -17,7 +17,8 @@ "import pandas as pd\n", "import numpy as np\n", "from datetime import datetime, timedelta\n", - "from nes import *" + "from nes import *\n", + "import os" ] }, { @@ -166,7 +167,7 @@ } ], "source": [ - "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx'\n", + "file_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/Dades_Port_Barcelona_2017-2021_corr.xlsx'\n", "df_data = pd.read_excel(file_path, header=3, index_col='Horario: UTC')\n", "df_data" ] @@ -279,7 +280,7 @@ } ], "source": [ - "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/estaciones.csv'\n", + "file_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/estaciones.csv'\n", "df_stations = pd.read_csv(file_path)\n", "df_stations" ] @@ -404,10 +405,14 @@ "Rank 000: Creating points_grid_no2.nc\n", "Rank 000: NetCDF ready to write\n", "Rank 000: Dimensions done\n", + "|S1\n", + "S75\n", "Rank 000: Writing station_name var (1/2)\n", "Rank 000: Var station_name created (1/2)\n", "Rank 000: Var station_name data (1/2)\n", "Rank 000: Var station_name completed (1/2)\n", + "float64\n", + "\n", "Rank 000: Writing sconcno2 var (2/2)\n", "Rank 000: Var sconcno2 created (2/2)\n" ] @@ -416,7 +421,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:336: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", " warnings.warn(msg)\n" ] }, @@ -794,38 +799,49 @@ " fill: currentColor;\n", "}\n", "
    <xarray.Dataset>\n",
    -       "Dimensions:       (time: 43818, station: 2)\n",
    +       "Dimensions:       (time: 43818, station: 2, strlen: 75)\n",
            "Coordinates:\n",
            "  * time          (time) datetime64[ns] 2017-01-01 ... 2021-12-31T17:00:00\n",
            "  * station       (station) float64 0.0 1.0\n",
    +       "Dimensions without coordinates: strlen\n",
            "Data variables:\n",
    -       "    station_name  (station) object 'NO2-UM' 'NO2-ZAL Prat'\n",
    -       "    sconcno2      (time, station) float64 64.64 49.08 68.16 ... 12.76 28.66\n",
            "    lat           (station) float64 41.37 41.32\n",
            "    lon           (station) float64 2.185 2.135\n",
    +       "    station_name  (station, strlen) object 'N' 'O' '2' '-' 'U' ... '' '' '' ''\n",
    +       "    sconcno2      (time, station) float64 64.64 49.08 68.16 ... 12.76 28.66\n",
            "Attributes:\n",
    -       "    Conventions:  CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", - "Dimensions: (time: 43818, station: 2)\n", + "Dimensions: (time: 43818, station: 2, strlen: 75)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2017-01-01 ... 2021-12-31T17:00:00\n", " * station (station) float64 0.0 1.0\n", + "Dimensions without coordinates: strlen\n", "Data variables:\n", - " station_name (station) object ...\n", - " sconcno2 (time, station) float64 ...\n", " lat (station) float64 ...\n", " lon (station) float64 ...\n", + " station_name (station, strlen) object ...\n", + " sconcno2 (time, station) float64 ...\n", "Attributes:\n", " Conventions: CF-1.7" ] @@ -848,7 +864,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -858,7 +874,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -1012,7 +1028,7 @@ "[43818 rows x 6 columns]" ] }, - "execution_count": 14, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -1023,73 +1039,22 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 18, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Done sconcno2_201701.nc\n", - "Done sconcno2_201702.nc\n", - "Done sconcno2_201703.nc\n", - "Done sconcno2_201704.nc\n", - "Done sconcno2_201705.nc\n", - "Done sconcno2_201706.nc\n", - "Done sconcno2_201707.nc\n", - "Done sconcno2_201708.nc\n", - "Done sconcno2_201709.nc\n", - "Done sconcno2_201710.nc\n", - "Done sconcno2_201711.nc\n", - "Done sconcno2_201712.nc\n", - "Done sconcno2_201801.nc\n", - "Done sconcno2_201802.nc\n", - "Done sconcno2_201803.nc\n", - "Done sconcno2_201804.nc\n", - "Done sconcno2_201805.nc\n", - "Done sconcno2_201806.nc\n", - "Done sconcno2_201807.nc\n", - "Done sconcno2_201808.nc\n", - "Done sconcno2_201809.nc\n", - "Done sconcno2_201810.nc\n", - "Done sconcno2_201811.nc\n", - "Done sconcno2_201812.nc\n", - "Done sconcno2_201901.nc\n", - "Done sconcno2_201902.nc\n", - "Done sconcno2_201903.nc\n", - "Done sconcno2_201904.nc\n", - "Done sconcno2_201905.nc\n", - "Done sconcno2_201906.nc\n", - "Done sconcno2_201907.nc\n", - "Done sconcno2_201908.nc\n", - "Done sconcno2_201909.nc\n", - "Done sconcno2_201910.nc\n", - "Done sconcno2_201911.nc\n", - "Done sconcno2_201912.nc\n", - "Done sconcno2_202001.nc\n", - "Done sconcno2_202002.nc\n", - "Done sconcno2_202003.nc\n", - "Done sconcno2_202004.nc\n", - "Done sconcno2_202005.nc\n", - "Done sconcno2_202006.nc\n", - "Done sconcno2_202007.nc\n", - "Done sconcno2_202008.nc\n", - "Done sconcno2_202009.nc\n", - "Done sconcno2_202010.nc\n", - "Done sconcno2_202011.nc\n", - "Done sconcno2_202012.nc\n", - "Done sconcno2_202101.nc\n", - "Done sconcno2_202102.nc\n", - "Done sconcno2_202103.nc\n", - "Done sconcno2_202104.nc\n", - "Done sconcno2_202105.nc\n", - "Done sconcno2_202106.nc\n", - "Done sconcno2_202107.nc\n", - "Done sconcno2_202108.nc\n", - "Done sconcno2_202109.nc\n", - "Done sconcno2_202110.nc\n", - "Done sconcno2_202111.nc\n", - "Done sconcno2_202112.nc\n" + "ename": "PermissionError", + "evalue": "[Errno 13] Permission denied: b'/esarchive/obs/port_barcelona/port-barcelona/hourly/sconcno2/sconcno2_201701.nc'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mPermissionError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[0;31m# Save files\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 37\u001b[0;31m \u001b[0mpoints_grid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf_path\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'/sconcno2_{0}{1}.nc'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0myear\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmonth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzfill\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 38\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;32mdel\u001b[0m \u001b[0mpoints_grid\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1946\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1947\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1948\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1949\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1950\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1882\u001b[0m \u001b[0mnetcdf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mDataset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mformat\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"NETCDF4\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'w'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparallel\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcomm\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcomm\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mMPI\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mInfo\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1883\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1884\u001b[0;31m \u001b[0mnetcdf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mDataset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mformat\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"NETCDF4\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'w'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparallel\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1885\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1886\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Rank {0:03d}: NetCDF ready to write\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrank\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4._ensure_nc_success\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mPermissionError\u001b[0m: [Errno 13] Permission denied: b'/esarchive/obs/port_barcelona/port-barcelona/hourly/sconcno2/sconcno2_201701.nc'" ] } ], @@ -1122,9 +1087,15 @@ " \n", " # Assign metadata\n", " points_grid.variables = metadata\n", - " \n", + " \n", + "\n", + " # Making directory\n", + " netcdf_path = '/esarchive/obs/port_barcelona/port-barcelona/hourly/sconcno2'\n", + " if not os.path.exists(os.path.dirname(netcdf_path)):\n", + " os.makedirs(os.path.dirname(netcdf_path))\n", + " \n", " # Save files\n", - " points_grid.to_netcdf('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_{0}{1}.nc'.format(year, str(month).zfill(2)))\n", + " points_grid.to_netcdf(netcdf_path + '/sconcno2_{0}{1}.nc'.format(year, str(month).zfill(2)))\n", " \n", " del points_grid\n", " print('Done sconcno2_{0}{1}.nc'.format(year, str(month).zfill(2)))" @@ -1132,7 +1103,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -1495,22 +1466,22 @@ " * time (time) datetime64[ns] 2017-05-01 ... 2017-05-31T23:00:00\n", " * station (station) float64 0.0 1.0\n", "Data variables:\n", - " station_name (station) object 'NO2-UM' 'NO2-ZAL Prat'\n", + " station_name (station) |S75 b'NO2-UM' b'NO2-ZAL Prat'\n", " altitude (station) float64 nan nan\n", " sconcno2 (time, station) float64 8.77 23.49 1.76 ... 27.11 13.87 30.07\n", " lat (station) float64 41.37 41.32\n", " lon (station) float64 2.185 2.135\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -1519,7 +1490,7 @@ " * time (time) datetime64[ns] 2017-05-01 ... 2017-05-31T23:00:00\n", " * station (station) float64 0.0 1.0\n", "Data variables:\n", - " station_name (station) object ...\n", + " station_name (station) |S75 ...\n", " altitude (station) float64 ...\n", " sconcno2 (time, station) float64 ...\n", " lat (station) float64 ...\n", @@ -1528,18 +1499,18 @@ " Conventions: CF-1.7" ] }, - "execution_count": 23, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "xr.open_dataset('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_201705.nc')" + "xr.open_dataset('/esarchive/obs/port_barcelona/port-barcelona/hourly/sconcno2/sconcno2_201705.nc')" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -1902,22 +1873,22 @@ " * time (time) datetime64[ns] 2021-12-01 ... 2021-12-31T17:00:00\n", " * station (station) float64 0.0 1.0\n", "Data variables:\n", - " station_name (station) object 'NO2-UM' 'NO2-ZAL Prat'\n", + " station_name (station) |S75 b'NO2-UM' b'NO2-ZAL Prat'\n", " altitude (station) float64 nan nan\n", " sconcno2 (time, station) float64 13.33 nan 13.42 ... 29.82 12.76 28.66\n", " lat (station) float64 41.37 41.32\n", " lon (station) float64 2.185 2.135\n", "Attributes:\n", - " Conventions: CF-1.7
  • Conventions :
    CF-1.7
  • " ], "text/plain": [ "\n", @@ -1926,7 +1897,7 @@ " * time (time) datetime64[ns] 2021-12-01 ... 2021-12-31T17:00:00\n", " * station (station) float64 0.0 1.0\n", "Data variables:\n", - " station_name (station) object ...\n", + " station_name (station) |S75 ...\n", " altitude (station) float64 ...\n", " sconcno2 (time, station) float64 ...\n", " lat (station) float64 ...\n", @@ -1935,14 +1906,21 @@ " Conventions: CF-1.7" ] }, - "execution_count": 24, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "xr.open_dataset('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_202112.nc')" + "xr.open_dataset('/esarchive/obs/port_barcelona/port-barcelona/hourly/sconcno2/sconcno2_202112.nc')" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -1962,6 +1940,11 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.4" + }, + "vscode": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } } }, "nbformat": 4, diff --git a/tutorials/2.Creation/2.5.Create_Points_CSIC.ipynb b/tutorials/2.Creation/2.5.Create_Points_CSIC.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..65d623e8c48565ecd1279c7d57168bbcac3e44c2 --- /dev/null +++ b/tutorials/2.Creation/2.5.Create_Points_CSIC.ipynb @@ -0,0 +1,1882 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create monthly observations datasets (CSIC)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import pandas as pd\n", + "import numpy as np\n", + "from datetime import datetime, timedelta\n", + "from nes import *\n", + "import os" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    traffic_siteurban_site
    Date-hour in
    2019-01-014.9889882.553235
    2019-02-013.4225351.556226
    2019-03-012.6750651.686355
    2019-04-013.4255221.975486
    2019-05-015.3148091.119245
    2019-06-013.1394951.626567
    2019-07-010.0000002.226856
    2019-08-010.0000002.469638
    2019-09-010.0000003.727355
    2019-10-010.0000001.535056
    2019-11-010.0000002.511152
    2019-12-010.0000000.000000
    \n", + "
    " + ], + "text/plain": [ + " traffic_site urban_site\n", + "Date-hour in \n", + "2019-01-01 4.988988 2.553235\n", + "2019-02-01 3.422535 1.556226\n", + "2019-03-01 2.675065 1.686355\n", + "2019-04-01 3.425522 1.975486\n", + "2019-05-01 5.314809 1.119245\n", + "2019-06-01 3.139495 1.626567\n", + "2019-07-01 0.000000 2.226856\n", + "2019-08-01 0.000000 2.469638\n", + "2019-09-01 0.000000 3.727355\n", + "2019-10-01 0.000000 1.535056\n", + "2019-11-01 0.000000 2.511152\n", + "2019-12-01 0.000000 0.000000" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/NH3_barcelona_2019_csic.csv'\n", + "df_data = pd.read_csv(file_path, index_col='Date-hour in', parse_dates=True)\n", + "df_data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    stationLonLat
    0urban_site2.115141.3875
    1traffic_site2.153441.3987
    \n", + "
    " + ], + "text/plain": [ + " station Lon Lat\n", + "0 urban_site 2.1151 41.3875\n", + "1 traffic_site 2.1534 41.3987" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/NH3_stations_CSIC.csv'\n", + "df_stations = pd.read_csv(file_path)\n", + "df_stations" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create dataset with all timesteps" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([datetime.datetime(2019, 1, 1, 0, 0),\n", + " datetime.datetime(2019, 2, 1, 0, 0),\n", + " datetime.datetime(2019, 3, 1, 0, 0),\n", + " datetime.datetime(2019, 4, 1, 0, 0),\n", + " datetime.datetime(2019, 5, 1, 0, 0),\n", + " datetime.datetime(2019, 6, 1, 0, 0),\n", + " datetime.datetime(2019, 7, 1, 0, 0),\n", + " datetime.datetime(2019, 8, 1, 0, 0),\n", + " datetime.datetime(2019, 9, 1, 0, 0),\n", + " datetime.datetime(2019, 10, 1, 0, 0),\n", + " datetime.datetime(2019, 11, 1, 0, 0),\n", + " datetime.datetime(2019, 12, 1, 0, 0)], dtype=object)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "times = df_data.index.to_pydatetime()\n", + "times" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([41.3875, 41.3987])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lat = df_stations['Lat'].to_numpy()\n", + "lat" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([2.1151, 2.1534])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lon = df_stations['Lon'].to_numpy()\n", + "lon" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[4.98898815, 2.55323513],\n", + " [3.42253493, 1.55622608],\n", + " [2.67506499, 1.68635512],\n", + " [3.42552223, 1.97548632],\n", + " [5.31480909, 1.11924537],\n", + " [3.139495 , 1.6265667 ],\n", + " [0. , 2.22685565],\n", + " [0. , 2.46963801],\n", + " [0. , 3.72735536],\n", + " [0. , 1.5350561 ],\n", + " [0. , 2.51115189],\n", + " [0. , 0. ]])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_data.to_numpy()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "metadata = {'station_name': {'data': df_data.columns.to_numpy(),\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'sconcnh3': {'data': df_data.to_numpy(),\n", + " 'dimensions': ('time', 'station',),\n", + " 'dtype': float}}" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_nh3.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_name var (1/2)\n", + "Rank 000: Var station_name created (1/2)\n", + "Rank 000: Var station_name data (1/2)\n", + "Rank 000: Var station_name completed (1/2)\n", + "Rank 000: Writing sconcnh3 var (2/2)\n", + "Rank 000: Var sconcnh3 created (2/2)\n", + "Rank 000: Var sconcnh3 data (2/2)\n", + "Rank 000: Var sconcnh3 completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:336: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_nh3.nc', info=True)\n", + "del points_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 12, station: 2, strlen: 75)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 2019-01-01 2019-02-01 ... 2019-12-01\n",
    +       "  * station       (station) float64 0.0 1.0\n",
    +       "Dimensions without coordinates: strlen\n",
    +       "Data variables:\n",
    +       "    lat           (station) float64 41.39 41.4\n",
    +       "    lon           (station) float64 2.115 2.153\n",
    +       "    station_name  (station, strlen) object 't' 'r' 'a' 'f' 'f' ... '' '' '' ''\n",
    +       "    sconcnh3      (time, station) float64 4.989 2.553 3.423 ... 2.511 0.0 0.0\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 12, station: 2, strlen: 75)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2019-01-01 2019-02-01 ... 2019-12-01\n", + " * station (station) float64 0.0 1.0\n", + "Dimensions without coordinates: strlen\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " station_name (station, strlen) object ...\n", + " sconcnh3 (time, station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_nh3.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create one dataset per month (Ready for Providentia)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "df_data['month'] = df_data.index.month\n", + "df_data['year'] = df_data.index.year" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    traffic_siteurban_sitemonthyear
    Date-hour in
    2019-01-014.9889882.55323512019
    2019-02-013.4225351.55622622019
    2019-03-012.6750651.68635532019
    2019-04-013.4255221.97548642019
    2019-05-015.3148091.11924552019
    2019-06-013.1394951.62656762019
    2019-07-010.0000002.22685672019
    2019-08-010.0000002.46963882019
    2019-09-010.0000003.72735592019
    2019-10-010.0000001.535056102019
    2019-11-010.0000002.511152112019
    2019-12-010.0000000.000000122019
    \n", + "
    " + ], + "text/plain": [ + " traffic_site urban_site month year\n", + "Date-hour in \n", + "2019-01-01 4.988988 2.553235 1 2019\n", + "2019-02-01 3.422535 1.556226 2 2019\n", + "2019-03-01 2.675065 1.686355 3 2019\n", + "2019-04-01 3.425522 1.975486 4 2019\n", + "2019-05-01 5.314809 1.119245 5 2019\n", + "2019-06-01 3.139495 1.626567 6 2019\n", + "2019-07-01 0.000000 2.226856 7 2019\n", + "2019-08-01 0.000000 2.469638 8 2019\n", + "2019-09-01 0.000000 3.727355 9 2019\n", + "2019-10-01 0.000000 1.535056 10 2019\n", + "2019-11-01 0.000000 2.511152 11 2019\n", + "2019-12-01 0.000000 0.000000 12 2019" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_data" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Done sconcnh3_201901.nc\n", + "Done sconcnh3_201902.nc\n", + "Done sconcnh3_201903.nc\n", + "Done sconcnh3_201904.nc\n", + "Done sconcnh3_201905.nc\n", + "Done sconcnh3_201906.nc\n", + "Done sconcnh3_201907.nc\n", + "Done sconcnh3_201908.nc\n", + "Done sconcnh3_201909.nc\n", + "Done sconcnh3_201910.nc\n", + "Done sconcnh3_201911.nc\n", + "Done sconcnh3_201912.nc\n" + ] + } + ], + "source": [ + "for (year, month), current in df_data.groupby(['year', 'month']):\n", + " \n", + " # Read time\n", + " times = current.index.to_pydatetime()\n", + " \n", + " # Fill altitude with nans\n", + " altitude = np.full(len(current.columns[0:2]), np.nan)\n", + " \n", + " # Read metadata\n", + " metadata = {'station_name': {'data': current.columns[0:2].to_numpy(),\n", + " 'dimensions': ('station'),\n", + " 'standard_name': ''},\n", + " 'altitude': {'data': altitude,\n", + " 'dimensions': ('station',),\n", + " 'units': 'meters',\n", + " 'standard_name': 'altitude'},\n", + " 'sconcnh3': {'data': current.iloc[:, 0:2].to_numpy(),\n", + " 'units': 'µg m-3',\n", + " 'dimensions': ('time', 'station',),\n", + " 'long_name': ''}\n", + " }\n", + " \n", + " # Create object\n", + " points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)\n", + " \n", + " # Assign metadata\n", + " points_grid.variables = metadata\n", + " \n", + " # Making directory\n", + " netcdf_path = '/esarchive/obs/csic/csic/monthly/sconcnh3/'\n", + " if not os.path.exists(os.path.dirname(netcdf_path)):\n", + " os.makedirs(os.path.dirname(netcdf_path))\n", + " \n", + " # Save files\n", + " points_grid.to_netcdf(netcdf_path + '/sconcnh3_{0}{1}.nc'.format(year, str(month).zfill(2)))\n", + " \n", + " del points_grid\n", + " print('Done sconcnh3_{0}{1}.nc'.format(year, str(month).zfill(2)))" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 1, station: 2)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 2019-05-01\n",
    +       "  * station       (station) float64 0.0 1.0\n",
    +       "Data variables:\n",
    +       "    lat           (station) float64 41.39 41.4\n",
    +       "    lon           (station) float64 2.115 2.153\n",
    +       "    station_name  (station) object 'traffic_site' 'urban_site'\n",
    +       "    altitude      (station) float64 nan nan\n",
    +       "    sconcnh3      (time, station) float64 5.315 1.119\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, station: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2019-05-01\n", + " * station (station) float64 0.0 1.0\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " station_name (station) object ...\n", + " altitude (station) float64 ...\n", + " sconcnh3 (time, station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('/esarchive/obs/csic/csic/monthly/sconcnh3/sconcnh3_201905.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:       (time: 1, station: 2)\n",
    +       "Coordinates:\n",
    +       "  * time          (time) datetime64[ns] 2019-01-01\n",
    +       "  * station       (station) float64 0.0 1.0\n",
    +       "Data variables:\n",
    +       "    lat           (station) float64 41.39 41.4\n",
    +       "    lon           (station) float64 2.115 2.153\n",
    +       "    station_name  (station) object 'traffic_site' 'urban_site'\n",
    +       "    altitude      (station) float64 nan nan\n",
    +       "    sconcnh3      (time, station) float64 4.989 2.553\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, station: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2019-01-01\n", + " * station (station) float64 0.0 1.0\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " station_name (station) object ...\n", + " altitude (station) float64 ...\n", + " sconcnh3 (time, station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('/esarchive/obs/csic/csic/monthly/sconcnh3/sconcnh3_201901.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + }, + "vscode": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/2.Creation/2.6.Create-LCC.ipynb b/tutorials/2.Creation/2.6.Create-LCC.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e64d5200e52dfe5aca0aa830b50fbdfddd211399 --- /dev/null +++ b/tutorials/2.Creation/2.6.Create-LCC.ipynb @@ -0,0 +1,482 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create LCC grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lat_1 = 37\n", + "lat_2 = 43\n", + "lon_0 = -3\n", + "lat_0 = 40\n", + "nx = 397\n", + "ny = 397\n", + "inc_x = 4000\n", + "inc_y = 4000\n", + "x_0 = -807847.688\n", + "y_0 = -797137.125\n", + "lcc_grid = create_nes(comm=None, info=False, projection='lcc',\n", + " lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, lat_0=lat_0, \n", + " nx=nx, ny=ny, inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating lcc_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "lcc_grid.to_netcdf('lcc_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:            (time: 1, lev: 1, y: 397, x: 397)\n",
    +       "Coordinates:\n",
    +       "  * time               (time) datetime64[ns] 1996-12-31\n",
    +       "  * lev                (lev) float64 0.0\n",
    +       "  * y                  (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n",
    +       "  * x                  (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n",
    +       "Data variables:\n",
    +       "    lat                (y, x) float64 ...\n",
    +       "    lon                (y, x) float64 ...\n",
    +       "    Lambert_conformal  |S1 b''\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, y: 397, x: 397)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * y (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n", + " * x (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n", + "Data variables:\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " Lambert_conformal |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('lcc_grid.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/2.Creation/2.7.Create_Mercator.ipynb b/tutorials/2.Creation/2.7.Create_Mercator.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e2c7399beb4618d86ff29389eb6e05ceeedef64d --- /dev/null +++ b/tutorials/2.Creation/2.7.Create_Mercator.ipynb @@ -0,0 +1,503 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create Mercator grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lat_ts = -1.5\n", + "lon_0 = -18.0\n", + "nx = 210\n", + "ny = 236\n", + "inc_x = 50000\n", + "inc_y = 50000\n", + "x_0 = -126017.5\n", + "y_0 = -5407460.0\n", + "mercator_grid = create_nes(comm=None, info=False, projection='mercator',\n", + " lat_ts=lat_ts, lon_0=lon_0, nx=nx, ny=ny, \n", + " inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating mercator_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "mercator_grid.to_netcdf('mercator_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:   (time: 1, lev: 1, y: 236, x: 210)\n",
    +       "Coordinates:\n",
    +       "  * time      (time) datetime64[ns] 1996-12-31\n",
    +       "  * lev       (lev) float64 0.0\n",
    +       "  * y         (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
    +       "  * x         (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n",
    +       "Data variables:\n",
    +       "    lat       (y, x) float64 -43.67 -43.67 -43.67 -43.67 ... 49.75 49.75 49.75\n",
    +       "    lon       (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.1 74.55 75.0\n",
    +       "    mercator  |S1 b''\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, y: 236, x: 210)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", + " * x (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n", + "Data variables:\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " mercator |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('mercator_grid.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1-introduction.ipynb b/tutorials/3.Statistics/3.1.Statistics.ipynb similarity index 96% rename from Jupyter_notebooks/1-introduction.ipynb rename to tutorials/3.Statistics/3.1.Statistics.ipynb index 615860d3bed82415dcc5a0269940034654a10415..e65883867faddd8ea4e1b7cd4b81a5d35ff5a417 100644 --- a/Jupyter_notebooks/1-introduction.ipynb +++ b/tutorials/3.Statistics/3.1.Statistics.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Introduction to NES - NetCDF for Earth Science" + "# Calculate simple statistics" ] }, { @@ -32,8 +32,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 175 ms, sys: 159 ms, total: 335 ms\n", - "Wall time: 15.7 s\n" + "CPU times: user 156 ms, sys: 52.8 ms, total: 209 ms\n", + "Wall time: 15.2 s\n" ] } ], @@ -50,7 +50,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 3, @@ -341,8 +341,8 @@ "text": [ "Rank 000: Loading O3 var (1/1)\n", "Rank 000: Loaded O3 var ((109, 24, 361, 467))\n", - "CPU times: user 1.21 s, sys: 7.1 s, total: 8.32 s\n", - "Wall time: 42 s\n" + "CPU times: user 1.15 s, sys: 6.44 s, total: 7.6 s\n", + "Wall time: 39.5 s\n" ] } ], @@ -379,8 +379,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 723 ms, sys: 719 ms, total: 1.44 s\n", - "Wall time: 14.3 s\n" + "CPU times: user 708 ms, sys: 497 ms, total: 1.2 s\n", + "Wall time: 14.5 s\n" ] } ], @@ -393,7 +393,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Statistics" + "## Calculate daily statistics" ] }, { @@ -405,8 +405,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 403 ms, sys: 206 ms, total: 609 ms\n", - "Wall time: 611 ms\n" + "CPU times: user 340 ms, sys: 482 ms, total: 822 ms\n", + "Wall time: 823 ms\n" ] } ], @@ -442,8 +442,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 49.2 ms, sys: 30.3 ms, total: 79.4 ms\n", - "Wall time: 837 ms\n" + "CPU times: user 39.4 ms, sys: 29.8 ms, total: 69.2 ms\n", + "Wall time: 715 ms\n" ] } ], diff --git a/tutorials/4.Interpolation/4.1.Vertical_Interpolation.ipynb b/tutorials/4.Interpolation/4.1.Vertical_Interpolation.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..435f3463ff05277fbf3f4b7b146a622f225136c2 --- /dev/null +++ b/tutorials/4.Interpolation/4.1.Vertical_Interpolation.ipynb @@ -0,0 +1,479 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to interpolate vertically" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Interpolation with vertical coordinate and variable to interpolate in one file" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.1. Read dataset to interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "source_path = '/gpfs/scratch/bsc32/bsc32538/original_files/CAMS_MONARCH_d01_2022070412.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid = open_netcdf(path=source_path, info=True, avoid_first_hours=12, avoid_last_hours=73)\n", + "source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n", + " 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],\n", + " mask=False,\n", + " fill_value=999999,\n", + " dtype=int32),\n", + " 'dimensions': ('lm',),\n", + " 'units': '',\n", + " 'long_name': 'layer id'}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "source_grid.keep_vars(['O3', 'mid_layer_height_agl'])" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading mid_layer_height_agl var (1/2)\n", + "Rank 000: Loaded mid_layer_height_agl var ((24, 24, 361, 467))\n", + "Rank 000: Loading O3 var (2/2)\n", + "Rank 000: Loaded O3 var ((24, 24, 361, 467))\n" + ] + } + ], + "source": [ + "source_grid.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.2. Interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "level_list = [0.,50.,100.,250.,500.,750.,1000.,2000.,3000.,5000.]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "source_grid.vertical_var_name = 'mid_layer_height_agl'" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\tO3 vertical interpolation\n", + "\t\tO3 time step 2022-07-05 00:00:00 (1/24))\n", + "\t\tO3 time step 2022-07-05 01:00:00 (2/24))\n", + "\t\tO3 time step 2022-07-05 02:00:00 (3/24))\n", + "\t\tO3 time step 2022-07-05 03:00:00 (4/24))\n", + "\t\tO3 time step 2022-07-05 04:00:00 (5/24))\n", + "\t\tO3 time step 2022-07-05 05:00:00 (6/24))\n", + "\t\tO3 time step 2022-07-05 06:00:00 (7/24))\n", + "\t\tO3 time step 2022-07-05 07:00:00 (8/24))\n", + "\t\tO3 time step 2022-07-05 08:00:00 (9/24))\n", + "\t\tO3 time step 2022-07-05 09:00:00 (10/24))\n", + "\t\tO3 time step 2022-07-05 10:00:00 (11/24))\n", + "\t\tO3 time step 2022-07-05 11:00:00 (12/24))\n", + "\t\tO3 time step 2022-07-05 12:00:00 (13/24))\n", + "\t\tO3 time step 2022-07-05 13:00:00 (14/24))\n", + "\t\tO3 time step 2022-07-05 14:00:00 (15/24))\n", + "\t\tO3 time step 2022-07-05 15:00:00 (16/24))\n", + "\t\tO3 time step 2022-07-05 16:00:00 (17/24))\n", + "\t\tO3 time step 2022-07-05 17:00:00 (18/24))\n", + "\t\tO3 time step 2022-07-05 18:00:00 (19/24))\n", + "\t\tO3 time step 2022-07-05 19:00:00 (20/24))\n", + "\t\tO3 time step 2022-07-05 20:00:00 (21/24))\n", + "\t\tO3 time step 2022-07-05 21:00:00 (22/24))\n", + "\t\tO3 time step 2022-07-05 22:00:00 (23/24))\n", + "\t\tO3 time step 2022-07-05 23:00:00 (24/24))\n" + ] + } + ], + "source": [ + "interpolated_source_grid = source_grid.interpolate_vertical(level_list, info=True, kind='linear', extrapolate=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': [0.0,\n", + " 50.0,\n", + " 100.0,\n", + " 250.0,\n", + " 500.0,\n", + " 750.0,\n", + " 1000.0,\n", + " 2000.0,\n", + " 3000.0,\n", + " 5000.0],\n", + " 'long_name': 'Mid-layer height above ground level',\n", + " 'standard_name': 'height_agl',\n", + " 'units': 'm',\n", + " 'coordinates': 'lon lat'}" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lev" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Interpolation with vertical coordinate in a separate file" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1. Read data to interpolate (source)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_data = open_netcdf(path=source_path, info=True, avoid_first_hours=12, avoid_last_hours=73)\n", + "source_data" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "source_data.keep_vars(['O3'])" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3 var (1/1)\n", + "Rank 000: Loaded O3 var ((24, 24, 361, 467))\n" + ] + } + ], + "source": [ + "source_data.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2. Read vertical levels (source)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_levels = open_netcdf(path=source_path, info=True, avoid_first_hours=12, avoid_last_hours=73)\n", + "source_levels" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "source_levels.keep_vars(['mid_layer_height_agl'])" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading mid_layer_height_agl var (1/1)\n", + "Rank 000: Loaded mid_layer_height_agl var ((24, 24, 361, 467))\n" + ] + } + ], + "source": [ + "source_levels.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.3. Interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\tO3 vertical interpolation\n", + "\t\tO3 time step 2022-07-05 00:00:00 (1/24))\n", + "\t\tO3 time step 2022-07-05 01:00:00 (2/24))\n", + "\t\tO3 time step 2022-07-05 02:00:00 (3/24))\n", + "\t\tO3 time step 2022-07-05 03:00:00 (4/24))\n", + "\t\tO3 time step 2022-07-05 04:00:00 (5/24))\n", + "\t\tO3 time step 2022-07-05 05:00:00 (6/24))\n", + "\t\tO3 time step 2022-07-05 06:00:00 (7/24))\n", + "\t\tO3 time step 2022-07-05 07:00:00 (8/24))\n", + "\t\tO3 time step 2022-07-05 08:00:00 (9/24))\n", + "\t\tO3 time step 2022-07-05 09:00:00 (10/24))\n", + "\t\tO3 time step 2022-07-05 10:00:00 (11/24))\n", + "\t\tO3 time step 2022-07-05 11:00:00 (12/24))\n", + "\t\tO3 time step 2022-07-05 12:00:00 (13/24))\n", + "\t\tO3 time step 2022-07-05 13:00:00 (14/24))\n", + "\t\tO3 time step 2022-07-05 14:00:00 (15/24))\n", + "\t\tO3 time step 2022-07-05 15:00:00 (16/24))\n", + "\t\tO3 time step 2022-07-05 16:00:00 (17/24))\n", + "\t\tO3 time step 2022-07-05 17:00:00 (18/24))\n", + "\t\tO3 time step 2022-07-05 18:00:00 (19/24))\n", + "\t\tO3 time step 2022-07-05 19:00:00 (20/24))\n", + "\t\tO3 time step 2022-07-05 20:00:00 (21/24))\n", + "\t\tO3 time step 2022-07-05 21:00:00 (22/24))\n", + "\t\tO3 time step 2022-07-05 22:00:00 (23/24))\n", + "\t\tO3 time step 2022-07-05 23:00:00 (24/24))\n" + ] + } + ], + "source": [ + "interpolated_source_grid = source_data.interpolate_vertical(level_list, new_src_vertical=source_levels,\n", + " info=True, kind='linear', extrapolate=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': [0.0,\n", + " 50.0,\n", + " 100.0,\n", + " 250.0,\n", + " 500.0,\n", + " 750.0,\n", + " 1000.0,\n", + " 2000.0,\n", + " 3000.0,\n", + " 5000.0],\n", + " 'long_name': 'Mid-layer height above ground level',\n", + " 'standard_name': 'height_agl',\n", + " 'units': 'm',\n", + " 'coordinates': 'lon lat'}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lev" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/4.Interpolation/4.2.Horizontal_Interpolation.ipynb b/tutorials/4.Interpolation/4.2.Horizontal_Interpolation.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..f1f44986341b3109b57225eaaf72c6405737c46d --- /dev/null +++ b/tutorials/4.Interpolation/4.2.Horizontal_Interpolation.ipynb @@ -0,0 +1,702 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to interpolate horizontally" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read data to interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "source_path = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid = open_netcdf(path=source_path, info=True)\n", + "source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[16.35033798, 16.43292999, 16.51514626, ..., 16.51514626,\n", + " 16.43292999, 16.35033798],\n", + " [16.52742577, 16.61023903, 16.69267654, ..., 16.69267654,\n", + " 16.61024284, 16.52742577],\n", + " [16.70447159, 16.78750801, 16.87016678, ..., 16.87016678,\n", + " 16.78750992, 16.70447159],\n", + " ...,\n", + " [58.32094955, 58.47268295, 58.62430954, ..., 58.62430954,\n", + " 58.47268295, 58.32094955],\n", + " [58.42628479, 58.57820129, 58.73002625, ..., 58.73002625,\n", + " 58.57820129, 58.42628479],\n", + " [58.53079224, 58.68289948, 58.83491898, ..., 58.83491898,\n", + " 58.68290329, 58.53079224]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.18126488, -22.01667213, -21.85179901, ..., 41.8517952 ,\n", + " 42.01666641, 42.18125916],\n", + " [-22.27817917, -22.11318588, -21.94790459, ..., 41.94789886,\n", + " 42.11317444, 42.27817154],\n", + " [-22.37526703, -22.2098732 , -22.04418945, ..., 42.04418564,\n", + " 42.2098732 , 42.37526321],\n", + " ...,\n", + " [-67.57766724, -67.39706421, -67.21534729, ..., 87.21533966,\n", + " 87.39705658, 87.57765961],\n", + " [-67.90187836, -67.72247314, -67.54193878, ..., 87.54193878,\n", + " 87.72245789, 87.90187073],\n", + " [-68.22803497, -68.04981995, -67.87051392, ..., 87.87050629,\n", + " 88.04981995, 88.22803497]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "source_grid.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Interpolation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1. With destination grid from file" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Read destination grid" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid_path = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc'\n", + "dst_grid = open_netcdf(path=dst_grid_path, info=True)\n", + "dst_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-90., -89., -88., -87., -86., -85., -84., -83., -82.,\n", + " -81., -80., -79., -78., -77., -76., -75., -74., -73.,\n", + " -72., -71., -70., -69., -68., -67., -66., -65., -64.,\n", + " -63., -62., -61., -60., -59., -58., -57., -56., -55.,\n", + " -54., -53., -52., -51., -50., -49., -48., -47., -46.,\n", + " -45., -44., -43., -42., -41., -40., -39., -38., -37.,\n", + " -36., -35., -34., -33., -32., -31., -30., -29., -28.,\n", + " -27., -26., -25., -24., -23., -22., -21., -20., -19.,\n", + " -18., -17., -16., -15., -14., -13., -12., -11., -10.,\n", + " -9., -8., -7., -6., -5., -4., -3., -2., -1.,\n", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,\n", + " 9., 10., 11., 12., 13., 14., 15., 16., 17.,\n", + " 18., 19., 20., 21., 22., 23., 24., 25., 26.,\n", + " 27., 28., 29., 30., 31., 32., 33., 34., 35.,\n", + " 36., 37., 38., 39., 40., 41., 42., 43., 44.,\n", + " 45., 46., 47., 48., 49., 50., 51., 52., 53.,\n", + " 54., 55., 56., 57., 58., 59., 60., 61., 62.,\n", + " 63., 64., 65., 66., 67., 68., 69., 70., 71.,\n", + " 72., 73., 74., 75., 76., 77., 78., 79., 80.,\n", + " 81., 82., 83., 84., 85., 86., 87., 88., 89.,\n", + " 90.],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lat',),\n", + " 'long_name': 'latitude',\n", + " 'units': 'degrees_north',\n", + " 'standard_name': 'grid_latitude'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-180. , -178.59375, -177.1875 , -175.78125,\n", + " -174.375 , -172.96875, -171.5625 , -170.15625,\n", + " -168.75 , -167.34375, -165.9375 , -164.53125,\n", + " -163.125 , -161.71875, -160.3125 , -158.90625,\n", + " -157.5 , -156.09375, -154.6875 , -153.28125,\n", + " -151.875 , -150.46875, -149.0625 , -147.65625,\n", + " -146.25 , -144.84375, -143.4375 , -142.03125,\n", + " -140.625 , -139.21875, -137.8125 , -136.40625,\n", + " -135. , -133.59375, -132.1875 , -130.78125,\n", + " -129.375 , -127.96875, -126.5625 , -125.15625,\n", + " -123.75 , -122.34375, -120.9375 , -119.53125,\n", + " -118.125 , -116.71875, -115.3125 , -113.90625,\n", + " -112.5 , -111.09375, -109.6875 , -108.28125,\n", + " -106.875 , -105.46875, -104.0625 , -102.65625,\n", + " -101.25 , -99.84375, -98.4375 , -97.03125,\n", + " -95.625 , -94.21875, -92.8125 , -91.40625,\n", + " -90. , -88.59375, -87.1875 , -85.78125,\n", + " -84.375 , -82.96875, -81.5625 , -80.15625,\n", + " -78.75 , -77.34375, -75.9375 , -74.53125,\n", + " -73.125 , -71.71875, -70.3125 , -68.90625,\n", + " -67.5 , -66.09375, -64.6875 , -63.28125,\n", + " -61.875 , -60.46875, -59.0625 , -57.65625,\n", + " -56.25 , -54.84375, -53.4375 , -52.03125,\n", + " -50.625 , -49.21875, -47.8125 , -46.40625,\n", + " -45. , -43.59375, -42.1875 , -40.78125,\n", + " -39.375 , -37.96875, -36.5625 , -35.15625,\n", + " -33.75 , -32.34375, -30.9375 , -29.53125,\n", + " -28.125 , -26.71875, -25.3125 , -23.90625,\n", + " -22.5 , -21.09375, -19.6875 , -18.28125,\n", + " -16.875 , -15.46875, -14.0625 , -12.65625,\n", + " -11.25 , -9.84375, -8.4375 , -7.03125,\n", + " -5.625 , -4.21875, -2.8125 , -1.40625,\n", + " 0. , 1.40625, 2.8125 , 4.21875,\n", + " 5.625 , 7.03125, 8.4375 , 9.84375,\n", + " 11.25 , 12.65625, 14.0625 , 15.46875,\n", + " 16.875 , 18.28125, 19.6875 , 21.09375,\n", + " 22.5 , 23.90625, 25.3125 , 26.71875,\n", + " 28.125 , 29.53125, 30.9375 , 32.34375,\n", + " 33.75 , 35.15625, 36.5625 , 37.96875,\n", + " 39.375 , 40.78125, 42.1875 , 43.59375,\n", + " 45. , 46.40625, 47.8125 , 49.21875,\n", + " 50.625 , 52.03125, 53.4375 , 54.84375,\n", + " 56.25 , 57.65625, 59.0625 , 60.46875,\n", + " 61.875 , 63.28125, 64.6875 , 66.09375,\n", + " 67.5 , 68.90625, 70.3125 , 71.71875,\n", + " 73.125 , 74.53125, 75.9375 , 77.34375,\n", + " 78.75 , 80.15625, 81.5625 , 82.96875,\n", + " 84.375 , 85.78125, 87.1875 , 88.59375,\n", + " 90. , 91.40625, 92.8125 , 94.21875,\n", + " 95.625 , 97.03125, 98.4375 , 99.84375,\n", + " 101.25 , 102.65625, 104.0625 , 105.46875,\n", + " 106.875 , 108.28125, 109.6875 , 111.09375,\n", + " 112.5 , 113.90625, 115.3125 , 116.71875,\n", + " 118.125 , 119.53125, 120.9375 , 122.34375,\n", + " 123.75 , 125.15625, 126.5625 , 127.96875,\n", + " 129.375 , 130.78125, 132.1875 , 133.59375,\n", + " 135. , 136.40625, 137.8125 , 139.21875,\n", + " 140.625 , 142.03125, 143.4375 , 144.84375,\n", + " 146.25 , 147.65625, 149.0625 , 150.46875,\n", + " 151.875 , 153.28125, 154.6875 , 156.09375,\n", + " 157.5 , 158.90625, 160.3125 , 161.71875,\n", + " 163.125 , 164.53125, 165.9375 , 167.34375,\n", + " 168.75 , 170.15625, 171.5625 , 172.96875,\n", + " 174.375 , 175.78125, 177.1875 , 178.59375,\n", + " 180. ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lon',),\n", + " 'long_name': 'longitude',\n", + " 'units': 'degrees_east',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lon" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\tO3_all horizontal interpolation\n" + ] + } + ], + "source": [ + "interpolated_source_grid = source_grid.interpolate_horizontal(dst_grid, weight_matrix_path=None, \n", + " kind='NearestNeighbour', n_neighbours=4,\n", + " info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-90., -89., -88., -87., -86., -85., -84., -83., -82.,\n", + " -81., -80., -79., -78., -77., -76., -75., -74., -73.,\n", + " -72., -71., -70., -69., -68., -67., -66., -65., -64.,\n", + " -63., -62., -61., -60., -59., -58., -57., -56., -55.,\n", + " -54., -53., -52., -51., -50., -49., -48., -47., -46.,\n", + " -45., -44., -43., -42., -41., -40., -39., -38., -37.,\n", + " -36., -35., -34., -33., -32., -31., -30., -29., -28.,\n", + " -27., -26., -25., -24., -23., -22., -21., -20., -19.,\n", + " -18., -17., -16., -15., -14., -13., -12., -11., -10.,\n", + " -9., -8., -7., -6., -5., -4., -3., -2., -1.,\n", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,\n", + " 9., 10., 11., 12., 13., 14., 15., 16., 17.,\n", + " 18., 19., 20., 21., 22., 23., 24., 25., 26.,\n", + " 27., 28., 29., 30., 31., 32., 33., 34., 35.,\n", + " 36., 37., 38., 39., 40., 41., 42., 43., 44.,\n", + " 45., 46., 47., 48., 49., 50., 51., 52., 53.,\n", + " 54., 55., 56., 57., 58., 59., 60., 61., 62.,\n", + " 63., 64., 65., 66., 67., 68., 69., 70., 71.,\n", + " 72., 73., 74., 75., 76., 77., 78., 79., 80.,\n", + " 81., 82., 83., 84., 85., 86., 87., 88., 89.,\n", + " 90.],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lat',),\n", + " 'long_name': 'latitude',\n", + " 'units': 'degrees_north',\n", + " 'standard_name': 'grid_latitude'}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-180. , -178.59375, -177.1875 , -175.78125,\n", + " -174.375 , -172.96875, -171.5625 , -170.15625,\n", + " -168.75 , -167.34375, -165.9375 , -164.53125,\n", + " -163.125 , -161.71875, -160.3125 , -158.90625,\n", + " -157.5 , -156.09375, -154.6875 , -153.28125,\n", + " -151.875 , -150.46875, -149.0625 , -147.65625,\n", + " -146.25 , -144.84375, -143.4375 , -142.03125,\n", + " -140.625 , -139.21875, -137.8125 , -136.40625,\n", + " -135. , -133.59375, -132.1875 , -130.78125,\n", + " -129.375 , -127.96875, -126.5625 , -125.15625,\n", + " -123.75 , -122.34375, -120.9375 , -119.53125,\n", + " -118.125 , -116.71875, -115.3125 , -113.90625,\n", + " -112.5 , -111.09375, -109.6875 , -108.28125,\n", + " -106.875 , -105.46875, -104.0625 , -102.65625,\n", + " -101.25 , -99.84375, -98.4375 , -97.03125,\n", + " -95.625 , -94.21875, -92.8125 , -91.40625,\n", + " -90. , -88.59375, -87.1875 , -85.78125,\n", + " -84.375 , -82.96875, -81.5625 , -80.15625,\n", + " -78.75 , -77.34375, -75.9375 , -74.53125,\n", + " -73.125 , -71.71875, -70.3125 , -68.90625,\n", + " -67.5 , -66.09375, -64.6875 , -63.28125,\n", + " -61.875 , -60.46875, -59.0625 , -57.65625,\n", + " -56.25 , -54.84375, -53.4375 , -52.03125,\n", + " -50.625 , -49.21875, -47.8125 , -46.40625,\n", + " -45. , -43.59375, -42.1875 , -40.78125,\n", + " -39.375 , -37.96875, -36.5625 , -35.15625,\n", + " -33.75 , -32.34375, -30.9375 , -29.53125,\n", + " -28.125 , -26.71875, -25.3125 , -23.90625,\n", + " -22.5 , -21.09375, -19.6875 , -18.28125,\n", + " -16.875 , -15.46875, -14.0625 , -12.65625,\n", + " -11.25 , -9.84375, -8.4375 , -7.03125,\n", + " -5.625 , -4.21875, -2.8125 , -1.40625,\n", + " 0. , 1.40625, 2.8125 , 4.21875,\n", + " 5.625 , 7.03125, 8.4375 , 9.84375,\n", + " 11.25 , 12.65625, 14.0625 , 15.46875,\n", + " 16.875 , 18.28125, 19.6875 , 21.09375,\n", + " 22.5 , 23.90625, 25.3125 , 26.71875,\n", + " 28.125 , 29.53125, 30.9375 , 32.34375,\n", + " 33.75 , 35.15625, 36.5625 , 37.96875,\n", + " 39.375 , 40.78125, 42.1875 , 43.59375,\n", + " 45. , 46.40625, 47.8125 , 49.21875,\n", + " 50.625 , 52.03125, 53.4375 , 54.84375,\n", + " 56.25 , 57.65625, 59.0625 , 60.46875,\n", + " 61.875 , 63.28125, 64.6875 , 66.09375,\n", + " 67.5 , 68.90625, 70.3125 , 71.71875,\n", + " 73.125 , 74.53125, 75.9375 , 77.34375,\n", + " 78.75 , 80.15625, 81.5625 , 82.96875,\n", + " 84.375 , 85.78125, 87.1875 , 88.59375,\n", + " 90. , 91.40625, 92.8125 , 94.21875,\n", + " 95.625 , 97.03125, 98.4375 , 99.84375,\n", + " 101.25 , 102.65625, 104.0625 , 105.46875,\n", + " 106.875 , 108.28125, 109.6875 , 111.09375,\n", + " 112.5 , 113.90625, 115.3125 , 116.71875,\n", + " 118.125 , 119.53125, 120.9375 , 122.34375,\n", + " 123.75 , 125.15625, 126.5625 , 127.96875,\n", + " 129.375 , 130.78125, 132.1875 , 133.59375,\n", + " 135. , 136.40625, 137.8125 , 139.21875,\n", + " 140.625 , 142.03125, 143.4375 , 144.84375,\n", + " 146.25 , 147.65625, 149.0625 , 150.46875,\n", + " 151.875 , 153.28125, 154.6875 , 156.09375,\n", + " 157.5 , 158.90625, 160.3125 , 161.71875,\n", + " 163.125 , 164.53125, 165.9375 , 167.34375,\n", + " 168.75 , 170.15625, 171.5625 , 172.96875,\n", + " 174.375 , 175.78125, 177.1875 , 178.59375,\n", + " 180. ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lon',),\n", + " 'long_name': 'longitude',\n", + " 'units': 'degrees_east',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lon" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2. Without destination grid from file" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Create destination grid" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "lat_orig = 40\n", + "lon_orig = 0\n", + "inc_lat = 0.2\n", + "inc_lon = 0.2\n", + "n_lat = 20\n", + "n_lon = 20\n", + "dst_grid = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([40.1, 40.3, 40.5, 40.7, 40.9, 41.1, 41.3, 41.5, 41.7, 41.9, 42.1,\n", + " 42.3, 42.5, 42.7, 42.9, 43.1, 43.3, 43.5, 43.7, 43.9])}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0.1, 0.3, 0.5, 0.7, 0.9, 1.1, 1.3, 1.5, 1.7, 1.9, 2.1, 2.3, 2.5,\n", + " 2.7, 2.9, 3.1, 3.3, 3.5, 3.7, 3.9])}" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lon" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\tO3_all horizontal interpolation\n" + ] + } + ], + "source": [ + "interpolated_source_grid = source_grid.interpolate_horizontal(dst_grid, weight_matrix_path=None, \n", + " kind='NearestNeighbour', n_neighbours=4,\n", + " info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([40.1, 40.3, 40.5, 40.7, 40.9, 41.1, 41.3, 41.5, 41.7, 41.9, 42.1,\n", + " 42.3, 42.5, 42.7, 42.9, 43.1, 43.3, 43.5, 43.7, 43.9])}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0.1, 0.3, 0.5, 0.7, 0.9, 1.1, 1.3, 1.5, 1.7, 1.9, 2.1, 2.3, 2.5,\n", + " 2.7, 2.9, 3.1, 3.3, 3.5, 3.7, 3.9])}" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lon" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/4.Interpolation/4.3.Providentia_Interpolation.ipynb b/tutorials/4.Interpolation/4.3.Providentia_Interpolation.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6618c680438a8d8e215476926fc1e2505ec51e87 --- /dev/null +++ b/tutorials/4.Interpolation/4.3.Providentia_Interpolation.ipynb @@ -0,0 +1,1656 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to interpolate horizontally using Providentia format" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read data to interpolate" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "source_path = '/esarchive/recon/ecmwf/cams61/cams61_chimere_ph2/eu/hourly/sconco3/sconco3_201804.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid = open_netcdf(path=source_path, info=True)\n", + "source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'sconco3': {'data': None,\n", + " 'dimensions': ('time', 'lat', 'lon'),\n", + " 'coordinates': 'lat lon',\n", + " 'grid_mapping': 'crs',\n", + " 'units': 'ppb'}}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[30. , 30.2, 30.4, 30.6, 30.8, 31. , 31.2, 31.4, 31.6,\n", + " 31.8, 32. , 32.2, 32.4, 32.6, 32.8, 33. , 33.2, 33.4,\n", + " 33.6, 33.8, 34. , 34.2, 34.4, 34.6, 34.8, 35. , 35.2,\n", + " 35.4, 35.6, 35.8, 36. , 36.2, 36.4, 36.6, 36.8, 37. ,\n", + " 37.2, 37.4, 37.6, 37.8, 38. , 38.2, 38.4, 38.6, 38.8,\n", + " 39. , 39.2, 39.4, 39.6, 39.8, 40. , 40.2, 40.4, 40.6,\n", + " 40.8, 41. , 41.2, 41.4, 41.6, 41.8, 42. , 42.2, 42.4,\n", + " 42.6, 42.8, 43. , 43.2, 43.4, 43.6, 43.8, 44. , 44.2,\n", + " 44.4, 44.6, 44.8, 45. , 45.2, 45.4, 45.6, 45.8, 46. ,\n", + " 46.2, 46.4, 46.6, 46.8, 47. , 47.2, 47.4, 47.6, 47.8,\n", + " 48. , 48.2, 48.4, 48.6, 48.8, 49. , 49.2, 49.4, 49.6,\n", + " 49.8, 50. , 50.2, 50.4, 50.6, 50.8, 51. , 51.2, 51.4,\n", + " 51.6, 51.8, 52. , 52.2, 52.4, 52.6, 52.8, 53. , 53.2,\n", + " 53.4, 53.6, 53.8, 54. , 54.2, 54.4, 54.6, 54.8, 55. ,\n", + " 55.2, 55.4, 55.6, 55.8, 56. , 56.2, 56.4, 56.6, 56.8,\n", + " 57. , 57.2, 57.4, 57.6, 57.8, 58. , 58.2, 58.4, 58.6,\n", + " 58.8, 59. , 59.2, 59.4, 59.6, 59.8, 60. , 60.2, 60.4,\n", + " 60.6, 60.8, 61. , 61.2, 61.4, 61.6, 61.8, 62. , 62.2,\n", + " 62.4, 62.6, 62.8, 63. , 63.2, 63.4, 63.6, 63.8, 64. ,\n", + " 64.2, 64.4, 64.6, 64.8, 65. , 65.2, 65.4, 65.6, 65.8,\n", + " 66. , 66.2, 66.4, 66.6, 66.8, 67. , 67.2, 67.4, 67.6,\n", + " 67.8, 68. , 68.2, 68.4, 68.6, 68.8, 69. , 69.2, 69.4,\n", + " 69.6, 69.8, 70. , 70.2, 70.4, 70.6, 70.8, 71. , 71.2,\n", + " 71.4, 71.6, 71.8, 72. ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('lat',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'Latitude',\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-25. , -24.8, -24.6, -24.4, -24.2, -24. , -23.8, -23.6,\n", + " -23.4, -23.2, -23. , -22.8, -22.6, -22.4, -22.2, -22. ,\n", + " -21.8, -21.6, -21.4, -21.2, -21. , -20.8, -20.6, -20.4,\n", + " -20.2, -20. , -19.8, -19.6, -19.4, -19.2, -19. , -18.8,\n", + " -18.6, -18.4, -18.2, -18. , -17.8, -17.6, -17.4, -17.2,\n", + " -17. , -16.8, -16.6, -16.4, -16.2, -16. , -15.8, -15.6,\n", + " -15.4, -15.2, -15. , -14.8, -14.6, -14.4, -14.2, -14. ,\n", + " -13.8, -13.6, -13.4, -13.2, -13. , -12.8, -12.6, -12.4,\n", + " -12.2, -12. , -11.8, -11.6, -11.4, -11.2, -11. , -10.8,\n", + " -10.6, -10.4, -10.2, -10. , -9.8, -9.6, -9.4, -9.2,\n", + " -9. , -8.8, -8.6, -8.4, -8.2, -8. , -7.8, -7.6,\n", + " -7.4, -7.2, -7. , -6.8, -6.6, -6.4, -6.2, -6. ,\n", + " -5.8, -5.6, -5.4, -5.2, -5. , -4.8, -4.6, -4.4,\n", + " -4.2, -4. , -3.8, -3.6, -3.4, -3.2, -3. , -2.8,\n", + " -2.6, -2.4, -2.2, -2. , -1.8, -1.6, -1.4, -1.2,\n", + " -1. , -0.8, -0.6, -0.4, -0.2, 0. , 0.2, 0.4,\n", + " 0.6, 0.8, 1. , 1.2, 1.4, 1.6, 1.8, 2. ,\n", + " 2.2, 2.4, 2.6, 2.8, 3. , 3.2, 3.4, 3.6,\n", + " 3.8, 4. , 4.2, 4.4, 4.6, 4.8, 5. , 5.2,\n", + " 5.4, 5.6, 5.8, 6. , 6.2, 6.4, 6.6, 6.8,\n", + " 7. , 7.2, 7.4, 7.6, 7.8, 8. , 8.2, 8.4,\n", + " 8.6, 8.8, 9. , 9.2, 9.4, 9.6, 9.8, 10. ,\n", + " 10.2, 10.4, 10.6, 10.8, 11. , 11.2, 11.4, 11.6,\n", + " 11.8, 12. , 12.2, 12.4, 12.6, 12.8, 13. , 13.2,\n", + " 13.4, 13.6, 13.8, 14. , 14.2, 14.4, 14.6, 14.8,\n", + " 15. , 15.2, 15.4, 15.6, 15.8, 16. , 16.2, 16.4,\n", + " 16.6, 16.8, 17. , 17.2, 17.4, 17.6, 17.8, 18. ,\n", + " 18.2, 18.4, 18.6, 18.8, 19. , 19.2, 19.4, 19.6,\n", + " 19.8, 20. , 20.2, 20.4, 20.6, 20.8, 21. , 21.2,\n", + " 21.4, 21.6, 21.8, 22. , 22.2, 22.4, 22.6, 22.8,\n", + " 23. , 23.2, 23.4, 23.6, 23.8, 24. , 24.2, 24.4,\n", + " 24.6, 24.8, 25. , 25.2, 25.4, 25.6, 25.8, 26. ,\n", + " 26.2, 26.4, 26.6, 26.8, 27. , 27.2, 27.4, 27.6,\n", + " 27.8, 28. , 28.2, 28.4, 28.6, 28.8, 29. , 29.2,\n", + " 29.4, 29.6, 29.8, 30. , 30.2, 30.4, 30.6, 30.8,\n", + " 31. , 31.2, 31.4, 31.6, 31.8, 32. , 32.2, 32.4,\n", + " 32.6, 32.8, 33. , 33.2, 33.4, 33.6, 33.8, 34. ,\n", + " 34.2, 34.4, 34.6, 34.8, 35. , 35.2, 35.4, 35.6,\n", + " 35.8, 36. , 36.2, 36.4, 36.6, 36.8, 37. , 37.2,\n", + " 37.4, 37.6, 37.8, 38. , 38.2, 38.4, 38.6, 38.8,\n", + " 39. , 39.2, 39.4, 39.6, 39.8, 40. , 40.2, 40.4,\n", + " 40.6, 40.8, 41. , 41.2, 41.4, 41.6, 41.8, 42. ,\n", + " 42.2, 42.4, 42.6, 42.8, 43. , 43.2, 43.4, 43.6,\n", + " 43.8, 44. , 44.2, 44.4, 44.6, 44.8, 45. ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('lon',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'Longitude',\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading sconco3 var (1/1)\n", + "Rank 000: Loaded sconco3 var ((720, 1, 211, 351))\n" + ] + } + ], + "source": [ + "source_grid.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((-25.10000 29.90000, -24.90000 29.900...
    1POLYGON ((-24.90000 29.90000, -24.70000 29.900...
    2POLYGON ((-24.70000 29.90000, -24.50000 29.900...
    3POLYGON ((-24.50000 29.90000, -24.30000 29.900...
    4POLYGON ((-24.30000 29.90000, -24.10000 29.900...
    ......
    74056POLYGON ((44.10000 71.90000, 44.30000 71.90000...
    74057POLYGON ((44.30000 71.90000, 44.50000 71.90000...
    74058POLYGON ((44.50000 71.90000, 44.70000 71.90000...
    74059POLYGON ((44.70000 71.90000, 44.90000 71.90000...
    74060POLYGON ((44.90000 71.90000, 45.10000 71.90000...
    \n", + "

    74061 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((-25.10000 29.90000, -24.90000 29.900...\n", + "1 POLYGON ((-24.90000 29.90000, -24.70000 29.900...\n", + "2 POLYGON ((-24.70000 29.90000, -24.50000 29.900...\n", + "3 POLYGON ((-24.50000 29.90000, -24.30000 29.900...\n", + "4 POLYGON ((-24.30000 29.90000, -24.10000 29.900...\n", + "... ...\n", + "74056 POLYGON ((44.10000 71.90000, 44.30000 71.90000...\n", + "74057 POLYGON ((44.30000 71.90000, 44.50000 71.90000...\n", + "74058 POLYGON ((44.50000 71.90000, 44.70000 71.90000...\n", + "74059 POLYGON ((44.70000 71.90000, 44.90000 71.90000...\n", + "74060 POLYGON ((44.90000 71.90000, 45.10000 71.90000...\n", + "\n", + "[74061 rows x 1 columns]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "source_grid.create_shapefile()\n", + "source_grid.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "source_grid.shapefile['sconco3'] = source_grid.variables['sconco3']['data'][0, 0, :, :].ravel()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "source_grid.write_shapefile('model')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Interpolation" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/load_nes.py:69: UserWarning: Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\n", + " warnings.warn(\"Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\")\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid_path = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.4/hourly/sconco3/sconco3_201804.nc'\n", + "dst_grid = open_netcdf(path=dst_grid_path, info=True)\n", + "dst_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", + " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", + " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", + " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", + " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", + " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", + " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", + " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", + " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", + " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", + " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", + " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", + " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", + " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", + " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", + " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", + " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", + " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", + " 43.616667 , 47.3 , 46.65 , 45. ,\n", + " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", + " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", + " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", + " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", + " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", + " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", + " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", + " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", + " 78.90715 , 59. , 69.45 , 59.2 ,\n", + " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", + " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", + " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", + " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", + " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", + " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", + " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", + " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", + " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", + " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", + " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", + " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", + " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", + " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", + " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", + " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", + " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", + " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", + " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", + " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", + " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", + " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", + " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", + " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", + " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", + " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", + " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", + " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", + " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", + " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", + " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", + " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", + " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", + " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", + " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", + " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", + " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dst_grid.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\tsconco3 horizontal interpolation\n", + "sconco3\n" + ] + } + ], + "source": [ + "interpolated_source_grid = source_grid.interpolate_horizontal(dst_grid, weight_matrix_path=None, \n", + " kind='NearestNeighbour', n_neighbours=4,\n", + " info=True, to_providentia=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", + " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", + " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", + " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", + " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", + " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", + " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", + " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", + " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", + " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", + " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", + " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", + " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", + " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", + " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", + " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", + " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", + " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", + " 43.616667 , 47.3 , 46.65 , 45. ,\n", + " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", + " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", + " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", + " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", + " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", + " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", + " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", + " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", + " 78.90715 , 59. , 69.45 , 59.2 ,\n", + " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", + " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", + " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", + " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", + " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", + " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", + " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", + " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", + " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", + " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", + " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", + " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", + " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", + " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", + " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", + " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", + " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", + " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", + " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", + " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", + " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", + " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", + " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", + " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", + " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", + " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", + " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", + " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", + " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", + " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", + " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", + " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", + " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", + " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", + " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", + " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", + " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20)}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POINT (-56.62478 -64.24006)
    1POINT (-68.31069 -54.84846)
    2POINT (16.76667 47.76667)
    3POINT (12.97222 46.67778)
    4POINT (15.94222 48.72111)
    ......
    163POINT (-155.57616 19.53623)
    164POINT (-24.80000 -89.99695)
    165POINT (-124.15100 41.05410)
    166POINT (103.51570 21.57310)
    167POINT (18.48968 -34.35348)
    \n", + "

    168 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POINT (-56.62478 -64.24006)\n", + "1 POINT (-68.31069 -54.84846)\n", + "2 POINT (16.76667 47.76667)\n", + "3 POINT (12.97222 46.67778)\n", + "4 POINT (15.94222 48.72111)\n", + ".. ...\n", + "163 POINT (-155.57616 19.53623)\n", + "164 POINT (-24.80000 -89.99695)\n", + "165 POINT (-124.15100 41.05410)\n", + "166 POINT (103.51570 21.57310)\n", + "167 POINT (18.48968 -34.35348)\n", + "\n", + "[168 rows x 1 columns]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.create_shapefile()\n", + "interpolated_source_grid.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "interpolated_source_grid.shapefile['sconco3'] = interpolated_source_grid.variables['sconco3']['data'][:, 0].ravel()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "interpolated_source_grid.write_shapefile('interpolated_points')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Compare outputs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### From NES" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[53.66018016, 53.83203477, 53.63669104, ..., 53.2461802 ,\n", + " 53.21874343, 53.10930358],\n", + " [53.72655496, 53.84765007, 53.58983273, ..., 53.39846363,\n", + " 53.19921385, 52.96872593],\n", + " [39.71929631, 40.01980784, 39.58707525, ..., 54.41383702,\n", + " 53.30565688, 51.58627296],\n", + " ...,\n", + " [50.31659288, 50.36346616, 50.3400049 , ..., 50.5349958 ,\n", + " 50.66003373, 50.60927108],\n", + " [38.61718492, 39.64453277, 40.28125328, ..., 47.66795862,\n", + " 46.42186248, 45.68748656],\n", + " [44.8632812 , 44.78906232, 44.64843684, ..., 45.16406056,\n", + " 45.16796676, 45.30077914]])" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "interpolated_source_grid.variables['sconco3']['data']" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes_providentia.py:585: UserWarning: WARNING!!! Providentia datasets cannot be written in parallel yet. Changing to serial mode.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "interpolated_source_grid.to_netcdf('interpolated_source_grid.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
    <xarray.Dataset>\n",
    +       "Dimensions:                 (time: 720, station: 168, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n",
    +       "Coordinates:\n",
    +       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
    +       "  * station                 (station) float64 0.0 1.0 2.0 ... 165.0 166.0 167.0\n",
    +       "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n",
    +       "Data variables:\n",
    +       "    lat                     (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
    +       "    lon                     (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
    +       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
    +       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
    +       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
    +       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
    +       "    sconco3                 (station, time) float64 ...\n",
    +       "Attributes:\n",
    +       "    Conventions:  CF-1.7
    " + ], + "text/plain": [ + "\n", + "Dimensions: (time: 720, station: 168, model_latitude: 211, model_longitude: 351, grid_edge: 1125)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + " * station (station) float64 0.0 1.0 2.0 ... 165.0 166.0 167.0\n", + "Dimensions without coordinates: model_latitude, model_longitude, grid_edge\n", + "Data variables:\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " sconco3 (station, time) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('interpolated_source_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### From Providentia IT" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "tool_interpolated_grid_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/load_nes.py:69: UserWarning: Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\n", + " warnings.warn(\"Parallel method cannot be 'Y' to create points NES. Setting it to 'X'\")\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tool_interpolated_grid = open_netcdf(path=tool_interpolated_grid_path, info=True)\n", + "tool_interpolated_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading sconco3 var (1/2)\n", + "Rank 000: Loaded sconco3 var ((175, 720))\n", + "Rank 000: Loading station_reference var (2/2)\n", + "Rank 000: Loaded station_reference var ((175,))\n" + ] + } + ], + "source": [ + "tool_interpolated_grid.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "masked_array(\n", + " data=[[nan, nan, nan, ..., nan, nan, nan],\n", + " [nan, nan, nan, ..., nan, nan, nan],\n", + " [nan, nan, nan, ..., nan, nan, nan],\n", + " ...,\n", + " [nan, nan, nan, ..., nan, nan, nan],\n", + " [nan, nan, nan, ..., nan, nan, nan],\n", + " [nan, nan, nan, ..., nan, nan, nan]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32)" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tool_interpolated_grid.variables['sconco3']['data']" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POINT (-56.62478 -64.24006)
    1POINT (-68.31069 -54.84846)
    2POINT (-65.60083 -22.10333)
    3POINT (-63.88194 -31.66861)
    4POINT (16.76667 47.76667)
    ......
    170POINT (-155.57616 19.53623)
    171POINT (-24.80000 -89.99695)
    172POINT (-124.15100 41.05410)
    173POINT (103.51570 21.57310)
    174POINT (18.48968 -34.35348)
    \n", + "

    175 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POINT (-56.62478 -64.24006)\n", + "1 POINT (-68.31069 -54.84846)\n", + "2 POINT (-65.60083 -22.10333)\n", + "3 POINT (-63.88194 -31.66861)\n", + "4 POINT (16.76667 47.76667)\n", + ".. ...\n", + "170 POINT (-155.57616 19.53623)\n", + "171 POINT (-24.80000 -89.99695)\n", + "172 POINT (-124.15100 41.05410)\n", + "173 POINT (103.51570 21.57310)\n", + "174 POINT (18.48968 -34.35348)\n", + "\n", + "[175 rows x 1 columns]" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tool_interpolated_grid.create_shapefile()\n", + "tool_interpolated_grid.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "tool_interpolated_grid.shapefile['sconco3'] = tool_interpolated_grid.variables['sconco3']['data'][:, 0].ravel()" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [], + "source": [ + "tool_interpolated_grid.write_shapefile('providentia_it_points')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/3-add_time_bnds.ipynb b/tutorials/5.Others/5.1.Add_Time_Bounds.ipynb similarity index 82% rename from Jupyter_notebooks/3-add_time_bnds.ipynb rename to tutorials/5.Others/5.1.Add_Time_Bounds.ipynb index 9c08016b91387c8db5d3efaeef8d56d2edbd04be..1ed4e2f29a02d9a7b08cc645a9cb39a2d8606e5a 100644 --- a/Jupyter_notebooks/3-add_time_bnds.ipynb +++ b/tutorials/5.Others/5.1.Add_Time_Bounds.ipynb @@ -40,24 +40,11 @@ "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", - " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "array = np.array([[datetime.datetime(year=2020, month=2, day=20), \n", " datetime.datetime(year=2020, month=2, day=15)]])\n", - "nessy.set_time_bnds(array)\n", - "nessy.time_bnds" + "nessy.set_time_bnds(array)" ] }, { @@ -78,15 +65,6 @@ "nessy.load()" ] }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "nessy.to_netcdf('nc_serial_test.nc')" - ] - }, { "cell_type": "markdown", "metadata": {}, diff --git a/tutorials/5.Others/5.2.Add_Coordinates_Bounds.ipynb b/tutorials/5.Others/5.2.Add_Coordinates_Bounds.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..537349001ae305b0d7ac343a269a5bb13b18ad22 --- /dev/null +++ b/tutorials/5.Others/5.2.Add_Coordinates_Bounds.ipynb @@ -0,0 +1,236 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to add coordinates bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import datetime\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Set coordinates bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "test_path = \"/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc\"\n", + "nessy = open_netcdf(path=test_path, info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "nessy.create_spatial_bounds()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "nessy.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Explore variables" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[16.2203979 , 16.30306824, 16.48028979, 16.39739715],\n", + " [16.30306855, 16.3853609 , 16.56280424, 16.48029011],\n", + " [16.38536121, 16.46727425, 16.64493885, 16.56280455],\n", + " ...,\n", + " [16.46727269, 16.38535964, 16.56280298, 16.64493728],\n", + " [16.3853609 , 16.30306855, 16.48029011, 16.56280424],\n", + " [16.30306824, 16.2203979 , 16.39739715, 16.48028979]],\n", + "\n", + " [[16.39739783, 16.48029047, 16.65746762, 16.57435251],\n", + " [16.48029079, 16.56280491, 16.74020402, 16.65746794],\n", + " [16.56280523, 16.64493952, 16.82256006, 16.74020434],\n", + " ...,\n", + " [16.64493796, 16.56280366, 16.74020276, 16.82255849],\n", + " [16.56280491, 16.48029079, 16.65746794, 16.74020402],\n", + " [16.48029047, 16.39739783, 16.57435251, 16.65746762]],\n", + "\n", + " [[16.57435149, 16.65746661, 16.83459876, 16.751261 ],\n", + " [16.65746692, 16.74020301, 16.91755729, 16.83459908],\n", + " [16.74020332, 16.82255904, 17.00013494, 16.91755761],\n", + " ...,\n", + " [16.82255748, 16.74020175, 16.91755603, 17.00013337],\n", + " [16.74020301, 16.65746692, 16.83459908, 16.91755729],\n", + " [16.65746661, 16.57435149, 16.751261 , 16.83459876]],\n", + "\n", + " ...,\n", + "\n", + " [[58.19210948, 58.34380497, 58.44964444, 58.29776032],\n", + " [58.34380555, 58.49539321, 58.6014247 , 58.44964502],\n", + " [58.49539378, 58.64687141, 58.75309835, 58.60142528],\n", + " ...,\n", + " [58.64686852, 58.49539089, 58.60142239, 58.75309546],\n", + " [58.49539321, 58.34380555, 58.44964502, 58.6014247 ],\n", + " [58.34380497, 58.19210948, 58.29776032, 58.44964444]],\n", + "\n", + " [[58.29776072, 58.44964485, 58.55466327, 58.40259426],\n", + " [58.44964543, 58.6014251 , 58.7066318 , 58.55466385],\n", + " [58.60142568, 58.75309876, 58.85849715, 58.70663238],\n", + " ...,\n", + " [58.75309587, 58.60142279, 58.70662948, 58.85849425],\n", + " [58.6014251 , 58.44964543, 58.55466385, 58.7066318 ],\n", + " [58.44964485, 58.29776072, 58.40259426, 58.55466327]],\n", + "\n", + " [[58.40259366, 58.55466267, 58.65885172, 58.50660166],\n", + " [58.55466325, 58.7066312 , 58.81100467, 58.6588523 ],\n", + " [58.70663178, 58.85849655, 58.96305787, 58.81100525],\n", + " ...,\n", + " [58.85849365, 58.70662888, 58.81100235, 58.96305497],\n", + " [58.7066312 , 58.55466325, 58.6588523 , 58.81100467],\n", + " [58.55466267, 58.40259366, 58.50660166, 58.65885172]]])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.lat_bnds" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[-22.21497021, -22.05071303, -22.14733617, -22.31199395],\n", + " [-22.0507124 , -21.88618013, -21.9824008 , -22.14733554],\n", + " [-21.8861795 , -21.72137239, -21.81718872, -21.98240017],\n", + " ...,\n", + " [ 41.72137553, 41.88618264, 41.98240332, 41.81719187],\n", + " [ 41.88618013, 42.0507124 , 42.14733554, 41.9824008 ],\n", + " [ 42.05071303, 42.21497021, 42.31199395, 42.14733617]],\n", + "\n", + " [[-22.31199432, -22.14733654, -22.24413665, -22.4091946 ],\n", + " [-22.14733591, -21.98240117, -22.07879923, -22.24413602],\n", + " [-21.98240054, -21.81718908, -21.91318321, -22.0787986 ],\n", + " ...,\n", + " [ 41.81719223, 41.98240369, 42.07880176, 41.91318637],\n", + " [ 41.98240117, 42.14733591, 42.24413602, 42.07879923],\n", + " [ 42.14733654, 42.31199432, 42.4091946 , 42.24413665]],\n", + "\n", + " [[-22.40919405, -22.2441361 , -22.34111548, -22.50657316],\n", + " [-22.24413547, -22.07879868, -22.17537644, -22.34111485],\n", + " [-22.07879805, -21.91318266, -22.00935688, -22.1753758 ],\n", + " ...,\n", + " [ 41.91318582, 42.07880121, 42.17537897, 42.00936005],\n", + " [ 42.07879868, 42.24413547, 42.34111485, 42.17537644],\n", + " [ 42.2441361 , 42.40919405, 42.50657316, 42.34111548]],\n", + "\n", + " ...,\n", + "\n", + " [[-67.50645709, -67.32583243, -67.64966627, -67.82912696],\n", + " [-67.32583174, -67.14410165, -67.46910621, -67.64966558],\n", + " [-67.14410095, -66.96124932, -67.28743133, -67.46910552],\n", + " ...,\n", + " [ 86.96125282, 87.14410443, 87.46910897, 87.28743481],\n", + " [ 87.14410165, 87.32583174, 87.64966558, 87.46910621],\n", + " [ 87.32583243, 87.50645709, 87.82912696, 87.64966627]],\n", + "\n", + " [[-67.82912819, -67.64966751, -67.97544812, -68.1537229 ],\n", + " [-67.64966682, -67.46910745, -67.79608108, -67.97544744],\n", + " [-67.46910676, -67.28743258, -67.6156063 , -67.79608039],\n", + " ...,\n", + " [ 87.28743606, 87.46911022, 87.79608382, 87.61560976],\n", + " [ 87.46910745, 87.64966682, 87.97544744, 87.79608108],\n", + " [ 87.64966751, 87.82912819, 88.1537229 , 87.97544812]],\n", + "\n", + " [[-68.15372103, -67.97544625, -68.30317799, -68.48024479],\n", + " [-67.97544557, -67.7960792 , -68.12502637, -68.30317732],\n", + " [-67.79607851, -67.61560442, -67.94577447, -68.12502569],\n", + " ...,\n", + " [ 87.61560787, 87.79608195, 88.1250291 , 87.9457779 ],\n", + " [ 87.7960792 , 87.97544557, 88.30317732, 88.12502637],\n", + " [ 87.97544625, 88.15372103, 88.48024479, 88.30317799]]])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.lon_bnds" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.6.8 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + }, + "vscode": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/5.Others/5.3.Create_Shapefiles.ipynb b/tutorials/5.Others/5.3.Create_Shapefiles.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6fe590da742d02b786b853f037cf007edfe3e2fb --- /dev/null +++ b/tutorials/5.Others/5.3.Create_Shapefiles.ipynb @@ -0,0 +1,504 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create and save shapefiles" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. From grids that have already been created" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "grid_path = '/gpfs/scratch/bsc32/bsc32538/original_files/CAMS_MONARCH_d01_2022070412.nc'\n", + "grid = open_netcdf(path=grid_path, info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((-22.16553 16.27358, -22.04224 16.335...
    1POLYGON ((-22.04224 16.33554, -21.91879 16.397...
    2POLYGON ((-21.91879 16.39727, -21.79523 16.458...
    3POLYGON ((-21.79523 16.45880, -21.67145 16.520...
    4POLYGON ((-21.67145 16.52011, -21.54755 16.581...
    ......
    168582POLYGON ((87.45258 59.04235, 87.58887 58.92854...
    168583POLYGON ((87.58887 58.92854, 87.72452 58.81466...
    168584POLYGON ((87.72452 58.81466, 87.85956 58.70073...
    168585POLYGON ((87.85956 58.70073, 87.99396 58.58673...
    168586POLYGON ((87.99396 58.58673, 88.12778 58.47268...
    \n", + "

    168587 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((-22.16553 16.27358, -22.04224 16.335...\n", + "1 POLYGON ((-22.04224 16.33554, -21.91879 16.397...\n", + "2 POLYGON ((-21.91879 16.39727, -21.79523 16.458...\n", + "3 POLYGON ((-21.79523 16.45880, -21.67145 16.520...\n", + "4 POLYGON ((-21.67145 16.52011, -21.54755 16.581...\n", + "... ...\n", + "168582 POLYGON ((87.45258 59.04235, 87.58887 58.92854...\n", + "168583 POLYGON ((87.58887 58.92854, 87.72452 58.81466...\n", + "168584 POLYGON ((87.72452 58.81466, 87.85956 58.70073...\n", + "168585 POLYGON ((87.85956 58.70073, 87.99396 58.58673...\n", + "168586 POLYGON ((87.99396 58.58673, 88.12778 58.47268...\n", + "\n", + "[168587 rows x 1 columns]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grid.create_shapefile()\n", + "grid.shapefile" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Add data to shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3 var (1/1)\n", + "Rank 000: Loaded O3 var ((109, 24, 361, 467))\n" + ] + } + ], + "source": [ + "grid.keep_vars(['O3'])\n", + "grid.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# Add data of last timestep\n", + "grid.shapefile['O3'] = grid.variables['O3']['data'][-1, -1, :].ravel()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometryO3
    FID
    0POLYGON ((-22.16553 16.27358, -22.04224 16.335...0.037919
    1POLYGON ((-22.04224 16.33554, -21.91879 16.397...0.038064
    2POLYGON ((-21.91879 16.39727, -21.79523 16.458...0.038086
    3POLYGON ((-21.79523 16.45880, -21.67145 16.520...0.038092
    4POLYGON ((-21.67145 16.52011, -21.54755 16.581...0.038098
    .........
    168582POLYGON ((87.45258 59.04235, 87.58887 58.92854...0.026542
    168583POLYGON ((87.58887 58.92854, 87.72452 58.81466...0.026310
    168584POLYGON ((87.72452 58.81466, 87.85956 58.70073...0.027037
    168585POLYGON ((87.85956 58.70073, 87.99396 58.58673...0.027271
    168586POLYGON ((87.99396 58.58673, 88.12778 58.47268...0.026327
    \n", + "

    168587 rows × 2 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry O3\n", + "FID \n", + "0 POLYGON ((-22.16553 16.27358, -22.04224 16.335... 0.037919\n", + "1 POLYGON ((-22.04224 16.33554, -21.91879 16.397... 0.038064\n", + "2 POLYGON ((-21.91879 16.39727, -21.79523 16.458... 0.038086\n", + "3 POLYGON ((-21.79523 16.45880, -21.67145 16.520... 0.038092\n", + "4 POLYGON ((-21.67145 16.52011, -21.54755 16.581... 0.038098\n", + "... ... ...\n", + "168582 POLYGON ((87.45258 59.04235, 87.58887 58.92854... 0.026542\n", + "168583 POLYGON ((87.58887 58.92854, 87.72452 58.81466... 0.026310\n", + "168584 POLYGON ((87.72452 58.81466, 87.85956 58.70073... 0.027037\n", + "168585 POLYGON ((87.85956 58.70073, 87.99396 58.58673... 0.027271\n", + "168586 POLYGON ((87.99396 58.58673, 88.12778 58.47268... 0.026327\n", + "\n", + "[168587 rows x 2 columns]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grid.shapefile" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Save shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "grid.write_shapefile('rotated_1')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. From grids created from scratch" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "centre_lat = 51\n", + "centre_lon = 10\n", + "west_boundary = -35\n", + "south_boundary = -27\n", + "inc_rlat = 0.2\n", + "inc_rlon = 0.2\n", + "grid = create_nes(comm=None, info=False, projection='rotated',\n", + " centre_lat=centre_lat, centre_lon=centre_lon,\n", + " west_boundary=west_boundary, south_boundary=south_boundary,\n", + " inc_rlat=inc_rlat, inc_rlon=inc_rlon)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((-22.21497 16.22040, -22.05071 16.303...
    1POLYGON ((-22.05071 16.30307, -21.88618 16.385...
    2POLYGON ((-21.88618 16.38536, -21.72137 16.467...
    3POLYGON ((-21.72137 16.46727, -21.55629 16.548...
    4POLYGON ((-21.55629 16.54881, -21.39094 16.629...
    ......
    95116POLYGON ((87.25127 59.16191, 87.43401 59.01025...
    95117POLYGON ((87.43401 59.01025, 87.61561 58.85850...
    95118POLYGON ((87.61561 58.85850, 87.79608 58.70663...
    95119POLYGON ((87.79608 58.70663, 87.97545 58.55466...
    95120POLYGON ((87.97545 58.55466, 88.15372 58.40259...
    \n", + "

    95121 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((-22.21497 16.22040, -22.05071 16.303...\n", + "1 POLYGON ((-22.05071 16.30307, -21.88618 16.385...\n", + "2 POLYGON ((-21.88618 16.38536, -21.72137 16.467...\n", + "3 POLYGON ((-21.72137 16.46727, -21.55629 16.548...\n", + "4 POLYGON ((-21.55629 16.54881, -21.39094 16.629...\n", + "... ...\n", + "95116 POLYGON ((87.25127 59.16191, 87.43401 59.01025...\n", + "95117 POLYGON ((87.43401 59.01025, 87.61561 58.85850...\n", + "95118 POLYGON ((87.61561 58.85850, 87.79608 58.70663...\n", + "95119 POLYGON ((87.79608 58.70663, 87.97545 58.55466...\n", + "95120 POLYGON ((87.97545 58.55466, 88.15372 58.40259...\n", + "\n", + "[95121 rows x 1 columns]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grid.create_shapefile()\n", + "grid.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "grid.write_shapefile('rotated_2')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/5.Others/5.4.Spatial_Join.ipynb b/tutorials/5.Others/5.4.Spatial_Join.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b00e62288dc5f117c373579760892b9c5d86452a --- /dev/null +++ b/tutorials/5.Others/5.4.Spatial_Join.ipynb @@ -0,0 +1,892 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to make spatial joins" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr\n", + "import geopandas as gpd\n", + "import pandas as pd\n", + "\n", + "pd.options.mode.chained_assignment = None" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Method 1: Centroids" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...
    ......
    9995POLYGON ((20.80000 60.90000, 21.00000 60.90000...
    9996POLYGON ((21.00000 60.90000, 21.20000 60.90000...
    9997POLYGON ((21.20000 60.90000, 21.40000 60.90000...
    9998POLYGON ((21.40000 60.90000, 21.60000 60.90000...
    9999POLYGON ((21.60000 60.90000, 21.80000 60.90000...
    \n", + "

    10000 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...\n", + "... ...\n", + "9995 POLYGON ((20.80000 60.90000, 21.00000 60.90000...\n", + "9996 POLYGON ((21.00000 60.90000, 21.20000 60.90000...\n", + "9997 POLYGON ((21.20000 60.90000, 21.40000 60.90000...\n", + "9998 POLYGON ((21.40000 60.90000, 21.60000 60.90000...\n", + "9999 POLYGON ((21.60000 60.90000, 21.80000 60.90000...\n", + "\n", + "[10000 rows x 1 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.2\n", + "inc_lon = 0.2\n", + "n_lat = 100\n", + "n_lon = 100\n", + "coordinates = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)\n", + "coordinates.create_shapefile()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "mask_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/timezones_2021c/timezones_2021c.shp'\n", + "mask = gpd.read_file(mask_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py:2190: UserWarning: Geometry is in a geographic CRS. Results from 'centroid' are likely incorrect. Use 'GeoSeries.to_crs()' to re-project geometries to a projected CRS before this operation.\n", + "\n", + " shapefile_aux.geometry = self.shapefile.centroid\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 15.2 s, sys: 680 ms, total: 15.9 s\n", + "Wall time: 15.9 s\n" + ] + } + ], + "source": [ + "%time coordinates.spatial_join(mask)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometrytzid
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...Europe/Madrid
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...Europe/Madrid
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...Europe/Madrid
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...NaN
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...NaN
    .........
    9995POLYGON ((20.80000 60.90000, 21.00000 60.90000...Europe/Helsinki
    9996POLYGON ((21.00000 60.90000, 21.20000 60.90000...Europe/Helsinki
    9997POLYGON ((21.20000 60.90000, 21.40000 60.90000...Europe/Helsinki
    9998POLYGON ((21.40000 60.90000, 21.60000 60.90000...Europe/Helsinki
    9999POLYGON ((21.60000 60.90000, 21.80000 60.90000...Europe/Helsinki
    \n", + "

    10000 rows × 2 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry tzid\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ... Europe/Madrid\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ... Europe/Madrid\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ... Europe/Madrid\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ... NaN\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ... NaN\n", + "... ... ...\n", + "9995 POLYGON ((20.80000 60.90000, 21.00000 60.90000... Europe/Helsinki\n", + "9996 POLYGON ((21.00000 60.90000, 21.20000 60.90000... Europe/Helsinki\n", + "9997 POLYGON ((21.20000 60.90000, 21.40000 60.90000... Europe/Helsinki\n", + "9998 POLYGON ((21.40000 60.90000, 21.60000 60.90000... Europe/Helsinki\n", + "9999 POLYGON ((21.60000 60.90000, 21.80000 60.90000... Europe/Helsinki\n", + "\n", + "[10000 rows x 2 columns]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "coordinates.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "coordinates.shapefile.to_file('spatial_join_method_1')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Method 2: Nearest centroids" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...
    ......
    95POLYGON ((2.80000 42.90000, 3.00000 42.90000, ...
    96POLYGON ((3.00000 42.90000, 3.20000 42.90000, ...
    97POLYGON ((3.20000 42.90000, 3.40000 42.90000, ...
    98POLYGON ((3.40000 42.90000, 3.60000 42.90000, ...
    99POLYGON ((3.60000 42.90000, 3.80000 42.90000, ...
    \n", + "

    100 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...\n", + ".. ...\n", + "95 POLYGON ((2.80000 42.90000, 3.00000 42.90000, ...\n", + "96 POLYGON ((3.00000 42.90000, 3.20000 42.90000, ...\n", + "97 POLYGON ((3.20000 42.90000, 3.40000 42.90000, ...\n", + "98 POLYGON ((3.40000 42.90000, 3.60000 42.90000, ...\n", + "99 POLYGON ((3.60000 42.90000, 3.80000 42.90000, ...\n", + "\n", + "[100 rows x 1 columns]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.2\n", + "inc_lon = 0.2\n", + "n_lat = 10\n", + "n_lon = 10\n", + "coordinates = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)\n", + "coordinates.create_shapefile()" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "mask_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/timezones_2021c/timezones_2021c.shp'\n", + "mask = gpd.read_file(mask_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "#%time coordinates.spatial_join(mask, method='nearest')" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...
    ......
    95POLYGON ((2.80000 42.90000, 3.00000 42.90000, ...
    96POLYGON ((3.00000 42.90000, 3.20000 42.90000, ...
    97POLYGON ((3.20000 42.90000, 3.40000 42.90000, ...
    98POLYGON ((3.40000 42.90000, 3.60000 42.90000, ...
    99POLYGON ((3.60000 42.90000, 3.80000 42.90000, ...
    \n", + "

    100 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...\n", + ".. ...\n", + "95 POLYGON ((2.80000 42.90000, 3.00000 42.90000, ...\n", + "96 POLYGON ((3.00000 42.90000, 3.20000 42.90000, ...\n", + "97 POLYGON ((3.20000 42.90000, 3.40000 42.90000, ...\n", + "98 POLYGON ((3.40000 42.90000, 3.60000 42.90000, ...\n", + "99 POLYGON ((3.60000 42.90000, 3.80000 42.90000, ...\n", + "\n", + "[100 rows x 1 columns]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "coordinates.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "coordinates.shapefile.to_file('spatial_join_method_2')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Method 3: Areas intersection" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometry
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...
    ......
    9995POLYGON ((20.80000 60.90000, 21.00000 60.90000...
    9996POLYGON ((21.00000 60.90000, 21.20000 60.90000...
    9997POLYGON ((21.20000 60.90000, 21.40000 60.90000...
    9998POLYGON ((21.40000 60.90000, 21.60000 60.90000...
    9999POLYGON ((21.60000 60.90000, 21.80000 60.90000...
    \n", + "

    10000 rows × 1 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...\n", + "... ...\n", + "9995 POLYGON ((20.80000 60.90000, 21.00000 60.90000...\n", + "9996 POLYGON ((21.00000 60.90000, 21.20000 60.90000...\n", + "9997 POLYGON ((21.20000 60.90000, 21.40000 60.90000...\n", + "9998 POLYGON ((21.40000 60.90000, 21.60000 60.90000...\n", + "9999 POLYGON ((21.60000 60.90000, 21.80000 60.90000...\n", + "\n", + "[10000 rows x 1 columns]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.2\n", + "inc_lon = 0.2\n", + "n_lat = 100\n", + "n_lon = 100\n", + "coordinates = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)\n", + "coordinates.create_shapefile()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "mask_path = '/esarchive/scratch/avilanova/software/NES/tutorials/data/timezones_2021c/timezones_2021c.shp'\n", + "mask = gpd.read_file(mask_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: 9270 intersected areas found\n", + "CPU times: user 8min 19s, sys: 9.29 s, total: 8min 29s\n", + "Wall time: 8min 30s\n" + ] + } + ], + "source": [ + "%time coordinates.spatial_join(mask, method='intersection')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    geometrytzid
    FID
    0POLYGON ((1.80000 41.10000, 2.00000 41.10000, ...Europe/Madrid
    1POLYGON ((2.00000 41.10000, 2.20000 41.10000, ...Europe/Madrid
    2POLYGON ((2.20000 41.10000, 2.40000 41.10000, ...Europe/Madrid
    3POLYGON ((2.40000 41.10000, 2.60000 41.10000, ...Europe/Madrid
    4POLYGON ((2.60000 41.10000, 2.80000 41.10000, ...NaN
    .........
    9995POLYGON ((20.80000 60.90000, 21.00000 60.90000...Europe/Helsinki
    9996POLYGON ((21.00000 60.90000, 21.20000 60.90000...Europe/Helsinki
    9997POLYGON ((21.20000 60.90000, 21.40000 60.90000...Europe/Helsinki
    9998POLYGON ((21.40000 60.90000, 21.60000 60.90000...Europe/Helsinki
    9999POLYGON ((21.60000 60.90000, 21.80000 60.90000...Europe/Helsinki
    \n", + "

    10000 rows × 2 columns

    \n", + "
    " + ], + "text/plain": [ + " geometry tzid\n", + "FID \n", + "0 POLYGON ((1.80000 41.10000, 2.00000 41.10000, ... Europe/Madrid\n", + "1 POLYGON ((2.00000 41.10000, 2.20000 41.10000, ... Europe/Madrid\n", + "2 POLYGON ((2.20000 41.10000, 2.40000 41.10000, ... Europe/Madrid\n", + "3 POLYGON ((2.40000 41.10000, 2.60000 41.10000, ... Europe/Madrid\n", + "4 POLYGON ((2.60000 41.10000, 2.80000 41.10000, ... NaN\n", + "... ... ...\n", + "9995 POLYGON ((20.80000 60.90000, 21.00000 60.90000... Europe/Helsinki\n", + "9996 POLYGON ((21.00000 60.90000, 21.20000 60.90000... Europe/Helsinki\n", + "9997 POLYGON ((21.20000 60.90000, 21.40000 60.90000... Europe/Helsinki\n", + "9998 POLYGON ((21.40000 60.90000, 21.60000 60.90000... Europe/Helsinki\n", + "9999 POLYGON ((21.60000 60.90000, 21.80000 60.90000... Europe/Helsinki\n", + "\n", + "[10000 rows x 2 columns]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "coordinates.shapefile" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "coordinates.shapefile.to_file('spatial_join_method_3')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/tutorials/5.Others/5.5.Selecting.ipynb b/tutorials/5.Others/5.5.Selecting.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..fd7024dfeea42ff99de9fc9a225d144e5e89fab8 --- /dev/null +++ b/tutorials/5.Others/5.5.Selecting.ipynb @@ -0,0 +1,271 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Select function" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "file_path = \"/gpfs/scratch/bsc32/bsc32538/original_files/CAMS_MONARCH_d01_2022070412.nc\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Time" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2022-07-04 12:00:00 109 2022-07-09 00:00:00\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\nCPU times: user 1.17 s, sys: 6.79 s, total: 7.96 s\\nWall time: 31.4 s\\n'" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy = open_netcdf(file_path)\n", + "nessy.keep_vars('O3')\n", + "\n", + "print(nessy.time[0], len(nessy.time), nessy.time[-1])\n", + "# %time nessy.load()\n", + "\"\"\"\n", + "CPU times: user 1.17 s, sys: 6.79 s, total: 7.96 s\n", + "Wall time: 31.4 s\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Hours filter" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2022-07-05 00:00:00 24 2022-07-05 23:00:00\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\nCPU times: user 295 ms, sys: 1.47 s, total: 1.76 s\\nWall time: 6.77 s\\n'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy = open_netcdf(file_path)\n", + "nessy.keep_vars('O3')\n", + "\n", + "nessy.sel(hours_start=12, hours_end=73)\n", + "\n", + "print(nessy.time[0], len(nessy.time), nessy.time[-1])\n", + "# %time nessy.load()\n", + "\"\"\"\n", + "CPU times: user 295 ms, sys: 1.47 s, total: 1.76 s\n", + "Wall time: 6.77 s\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Time filter" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2022-07-05 00:00:00 24 2022-07-05 23:00:00\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\nCPU times: user 274 ms, sys: 1.44 s, total: 1.71 s\\nWall time: 7.53 s\\n'" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from datetime import datetime\n", + "\n", + "nessy = open_netcdf(file_path)\n", + "nessy.keep_vars('O3')\n", + "\n", + "nessy.sel(time_min=datetime(year=2022, month=7, day=5, hour=0), \n", + " time_max=datetime(year=2022, month=7, day=5, hour=23))\n", + "\n", + "print(nessy.time[0], len(nessy.time), nessy.time[-1])\n", + "# %time nessy.load()\n", + "\"\"\"\n", + "CPU times: user 274 ms, sys: 1.44 s, total: 1.71 s\n", + "Wall time: 7.53 s\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Level" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\nCPU times: user 77.3 ms, sys: 248 ms, total: 325 ms\\nWall time: 17.1 s\\n'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy = open_netcdf(file_path)\n", + "nessy.keep_vars('O3')\n", + "\n", + "nessy.sel(lev_min=23)\n", + "\n", + "print(len(nessy.lev['data']))\n", + "# %time nessy.load()\n", + "\"\"\"\n", + "CPU times: user 77.3 ms, sys: 248 ms, total: 325 ms\n", + "Wall time: 17.1 s\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Coordinates" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 16.4 ms, sys: 79.8 ms, total: 96.2 ms\n", + "Wall time: 18.6 s\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\nCPU times: user 13.9 ms, sys: 74.9 ms, total: 88.8 ms\\nWall time: 16.3 s\\n'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy = open_netcdf(file_path)\n", + "nessy.keep_vars('O3')\n", + "\n", + "nessy.sel(lat_min=30, lat_max=31, lon_min=-1, lon_max=1)\n", + "%time nessy.load()\n", + "\"\"\"\n", + "CPU times: user 13.9 ms, sys: 74.9 ms, total: 88.8 ms\n", + "Wall time: 16.3 s\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "nessy.to_netcdf(\"test_sel.nc\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd b/tutorials/Jupyter_bash_nord3v2.cmd similarity index 80% rename from Jupyter_notebooks/Jupyter_bash_nord3v2.cmd rename to tutorials/Jupyter_bash_nord3v2.cmd index 20819e939a150aab82341bed9e2f6ca797fa45d6..eefd0f7219bcc337065fb5eef61df954e423919e 100644 --- a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd +++ b/tutorials/Jupyter_bash_nord3v2.cmd @@ -1,10 +1,11 @@ #!/bin/bash #SBATCH --ntasks 1 -#SBATCH --time 03:00:00 +#SBATCH --time 02:00:00 #SBATCH --job-name NES #SBATCH --output log_jupyter-notebook-%J.out #SBATCH --error log_jupyter-notebook-%J.err #SBATCH --exclusive +#SBATCH --qos debug # get tunneling info XDG_RUNTIME_DIR="" @@ -32,8 +33,11 @@ module load cfunits/1.8-foss-2019b-Python-3.7.4 module load filelock/3.7.1-foss-2019b-Python-3.7.4 module load pyproj/2.5.0-foss-2019b-Python-3.7.4 module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load geopandas/0.10.2-foss-2019b-Python-3.7.4 +module load Shapely/1.7.1-foss-2019b-Python-3.7.4 +#module load NES/0.9.0-foss-2019b-Python-3.7.4 -export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} +export PYTHONPATH=/esarchive/scratch/avilanova/software/NES:${PYTHONPATH} # DON'T USE ADDRESS BELOW. # DO USE TOKEN BELOW diff --git a/Jupyter_notebooks/input/Dades_2017.xlsx b/tutorials/data/Dades_2017.xlsx similarity index 100% rename from Jupyter_notebooks/input/Dades_2017.xlsx rename to tutorials/data/Dades_2017.xlsx diff --git a/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx b/tutorials/data/Dades_Port_Barcelona_2017-2021_corr.xlsx similarity index 100% rename from Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx rename to tutorials/data/Dades_Port_Barcelona_2017-2021_corr.xlsx diff --git a/tutorials/data/NH3_barcelona_2019_csic.csv b/tutorials/data/NH3_barcelona_2019_csic.csv new file mode 100644 index 0000000000000000000000000000000000000000..0b1fdd941b1d3dc57aea5ca3431abd63be90191c --- /dev/null +++ b/tutorials/data/NH3_barcelona_2019_csic.csv @@ -0,0 +1,13 @@ +Date-hour in,traffic_site,urban_site +01/01/2019 00:00:00,4.98898814531849,2.55323513371909 +02/01/2019 00:00:00,3.42253492604592,1.55622607730934 +03/01/2019 00:00:00,2.67506499076435,1.68635511757375 +04/01/2019 00:00:00,3.42552223044561,1.97548631953393 +05/01/2019 00:00:00,5.31480909108289,1.11924536797087 +06/01/2019 00:00:00,3.13949500437943,1.62656669999897 +07/01/2019 00:00:00,0,2.22685564636695 +08/01/2019 00:00:00,0,2.4696380137646 +09/01/2019 00:00:00,0,3.72735535650666 +10/01/2019 00:00:00,0,1.53505610127377 +11/01/2019 00:00:00,0,2.51115189477121 +12/01/2019 00:00:00,0,0 diff --git a/tutorials/data/NH3_stations_CSIC.csv b/tutorials/data/NH3_stations_CSIC.csv new file mode 100644 index 0000000000000000000000000000000000000000..cf9a54d4dd99109a2aee009c61f72ea663f03574 --- /dev/null +++ b/tutorials/data/NH3_stations_CSIC.csv @@ -0,0 +1,3 @@ +station,Lon,Lat +urban_site,2.1151,41.3875 +traffic_site,2.1534,41.3987 diff --git a/Jupyter_notebooks/input/XVPCA_info.csv b/tutorials/data/XVPCA_info.csv similarity index 100% rename from Jupyter_notebooks/input/XVPCA_info.csv rename to tutorials/data/XVPCA_info.csv diff --git a/Jupyter_notebooks/input/estaciones.csv b/tutorials/data/estaciones.csv similarity index 100% rename from Jupyter_notebooks/input/estaciones.csv rename to tutorials/data/estaciones.csv diff --git a/tutorials/data/timezones_2021c/timezones_2021c.dbf b/tutorials/data/timezones_2021c/timezones_2021c.dbf new file mode 100644 index 0000000000000000000000000000000000000000..aed52eac47a3f420d8ecd782863d6145e1a7f300 Binary files /dev/null and b/tutorials/data/timezones_2021c/timezones_2021c.dbf differ diff --git a/tutorials/data/timezones_2021c/timezones_2021c.prj b/tutorials/data/timezones_2021c/timezones_2021c.prj new file mode 100644 index 0000000000000000000000000000000000000000..f45cbadf0074d8b7b2669559a93bc50bb95f82d4 --- /dev/null +++ b/tutorials/data/timezones_2021c/timezones_2021c.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/tutorials/data/timezones_2021c/timezones_2021c.shp b/tutorials/data/timezones_2021c/timezones_2021c.shp new file mode 100644 index 0000000000000000000000000000000000000000..5dbd64b964295a7689fb70646caf4db8f0745ddb Binary files /dev/null and b/tutorials/data/timezones_2021c/timezones_2021c.shp differ diff --git a/tutorials/data/timezones_2021c/timezones_2021c.shx b/tutorials/data/timezones_2021c/timezones_2021c.shx new file mode 100644 index 0000000000000000000000000000000000000000..dda8f5adb453c5460d2c39d7741a2c62d71c2153 Binary files /dev/null and b/tutorials/data/timezones_2021c/timezones_2021c.shx differ