diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d55066268e29f54f152e3812e772083687590e2..6b0bc74f780903b0a4e1bcdb6b54c3ffada0835c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ * Open NetCDF: * Regular Latitude-Longitude * Rotated Lat-Lon + * Points + * Points in GHOST format * Statistics: * Daily_mean * Daily_max @@ -16,4 +18,7 @@ * By Y axis * By X axis * Create NetCDF: - * Regular Latitude-Longitude \ No newline at end of file + * Regular Latitude-Longitude + * Rotated Lat-Lon + * Points + \ No newline at end of file diff --git a/Jupyter_notebooks/NES_simple_test.ipynb b/Jupyter_notebooks/1-introduction.ipynb similarity index 96% rename from Jupyter_notebooks/NES_simple_test.ipynb rename to Jupyter_notebooks/1-introduction.ipynb index badbd866ce4c13e650b9ae31734f9f605b449a84..b98283a20ad5134c2bed0a04aa84c7bec21b4964 100644 --- a/Jupyter_notebooks/NES_simple_test.ipynb +++ b/Jupyter_notebooks/1-introduction.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# NES - NetCDF for Earth Science" + "# Introduction to NES - NetCDF for Earth Science" ] }, { @@ -32,8 +32,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 181 ms, sys: 125 ms, total: 306 ms\n", - "Wall time: 16 s\n" + "CPU times: user 189 ms, sys: 136 ms, total: 326 ms\n", + "Wall time: 16.1 s\n" ] } ], @@ -50,7 +50,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 3, @@ -341,7 +341,7 @@ "text": [ "Rank 000: Loading O3 var (1/1)\n", "Rank 000: Loaded O3 var ((109, 24, 361, 467))\n", - "CPU times: user 1.19 s, sys: 6.98 s, total: 8.17 s\n", + "CPU times: user 1.16 s, sys: 6.54 s, total: 7.69 s\n", "Wall time: 40.8 s\n" ] } @@ -379,8 +379,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1.09 s, sys: 917 ms, total: 2.01 s\n", - "Wall time: 14.3 s\n" + "CPU times: user 917 ms, sys: 497 ms, total: 1.41 s\n", + "Wall time: 14.2 s\n" ] } ], @@ -405,8 +405,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 409 ms, sys: 81.9 ms, total: 491 ms\n", - "Wall time: 719 ms\n" + "CPU times: user 402 ms, sys: 77.5 ms, total: 479 ms\n", + "Wall time: 701 ms\n" ] } ], @@ -442,8 +442,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 45.4 ms, sys: 32.1 ms, total: 77.6 ms\n", - "Wall time: 723 ms\n" + "CPU times: user 38.3 ms, sys: 28 ms, total: 66.3 ms\n", + "Wall time: 847 ms\n" ] } ], diff --git a/Jupyter_notebooks/1.1-regular_grids.ipynb b/Jupyter_notebooks/1.1-regular_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..3ada97f779e36115e8bf379e5d5bf734198a0e41 --- /dev/null +++ b/Jupyter_notebooks/1.1-regular_grids.ipynb @@ -0,0 +1,2364 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write regular grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from netCDF4 import Dataset\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# ERROR when files have lat_bnds, lon_bnds \n", + "#nc_path_1 = '/esarchive/exp/ecearth/a2vx/original_files/cmorfiles-fixed/CMIP/EC-Earth-Consortium/EC-Earth3-AerChem/historical/r4i1p1f1/Amon/ch4/gn/v20200609/ch4_Amon_EC-Earth3-AerChem_historical_r4i1p1f1_gn_185001-185012.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'JM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'LM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IHRST' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'I_PAR_STA' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'J_PAR_STA' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NPHS' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCLOD' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NHEAT' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NPREC' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NRDLW' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NRDSW' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NSRFC' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVGMAXLEN' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDRMINout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDRMAXout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDIMINout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDIMAXout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DXH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SG1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SG2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DSG1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DSG2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SGML1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SGML2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SLDPTH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ISLTYP' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IVGTYP' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCFRCV' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCFRST' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'FIS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GLAT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GLON' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VLAT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VLON' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACFRCV' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACFRST' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACSNOM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACSNOW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AKHSAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AKMSAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALBASE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALBEDO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'BGROFF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACL' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CLDEFI' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CMC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CNVBOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CNVTOP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CPRATE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CUPPT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CUPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CZEN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CZMEAN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DNVVELMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EPSR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GRNFLX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HBOTD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HBOTS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HTOPD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HTOPS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MIXHT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MXSNAL' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PBLH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'POTEVP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSFCAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RH02MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RH02MIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T02MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T02MIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T10AVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QSH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QWBS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RADOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'REFDMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWINC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCEVP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCEXC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCLHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCSHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SI' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SICE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SIGT4' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMSTAV' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMSTOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNOAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNOPCX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SOILTB' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SSROFF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SST' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SUBSHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TH10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'THS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'THZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TWBS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UPHLMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UPVVELMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U10MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'USTAR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V10MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VEGFRC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Z0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'POTFLX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TLMIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TLMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'LSPA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACUTIM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'APHTIM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ARDLW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ARDSW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASRFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVRAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVCNVC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ROUGHCOR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMOISCOR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'relative_humidity_2m' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'W' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'W_TOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'OMGALF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'O3' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CLDFRA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EXCH_H' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWTT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWTT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PINT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DWDT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TCUCN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TRAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'XLEN_MIX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_ICE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_RIMEF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_RAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SH2O' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'STC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_ACPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_CUPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_DEPDRY' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_OPT_R' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_SW_TOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_SW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_LW_TOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_LW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ENG_SW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EMISS_AERO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ADRYDEP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'WETDEP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PH_NO2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AEROSSA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_optical_depth' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'satellite_AOD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_loading' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'clear_sky_AOD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'layer_thickness' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_pressure' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'interface_pressure' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'relative_humidity' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_height' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_height_agl' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'air_density' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'dry_pm10_mass' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'dry_pm2p5_mass' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_002' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_003' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_004' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_005' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_006' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_007' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_008' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_002' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_003' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_004' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_005' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_006' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_007' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_008' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_009' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_010' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_011' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_012' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_013' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_014' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_015' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_016' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_017' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_018' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_019' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_020' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_021' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_022' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_023' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_024' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_025' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_026' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_027' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_028' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_029' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_030' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_031' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_032' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_033' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_034' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_035' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_036' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_037' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_038' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_039' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_040' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_041' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_042' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_043' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_044' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_045' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_046' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_047' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_048' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_049' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_050' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_051' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_052' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_053' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_054' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_055' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_056' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_057' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_058' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_059' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_060' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_061' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_062' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_063' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_064' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_065' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_066' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_067' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_068' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_069' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_070' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_071' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_072' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_073' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_074' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_075' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_076' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_077' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_078' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_079' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_080' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MPRATES_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_3' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_4' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_5' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_6' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_7' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_8' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                    (time: 9, lm: 48, lmp: 49, lon: 257, lat: 181, idat: 3, soil_lm: 4, num_aero: 88, num_aero_r: 3, num_aero_1: 89, num_engy: 7, num_gas_total: 1, aerosol_optical_depth_dim: 8, satellite_AOD_dim: 2, aerosol_loading_dim: 8, clear_sky_AOD_dim: 8, aerosol_extinction_dim: 8)\n",
+       "Coordinates:\n",
+       "  * time                       (time) datetime64[ns] 2008-12-31 ... 2009-01-01\n",
+       "  * lm                         (lm) int32 0 1 2 3 4 5 6 ... 41 42 43 44 45 46 47\n",
+       "  * lmp                        (lmp) int32 0 1 2 3 4 5 6 ... 43 44 45 46 47 48\n",
+       "  * lon                        (lon) float32 -180.0 -178.6 ... 178.6 180.0\n",
+       "  * lat                        (lat) float32 -90.0 -89.0 -88.0 ... 89.0 90.0\n",
+       "  * aerosol_optical_depth_dim  (aerosol_optical_depth_dim) |S100 b'DUST_1    ...\n",
+       "  * satellite_AOD_dim          (satellite_AOD_dim) |S100 b'MODIS TERRA 550 nm...\n",
+       "  * aerosol_loading_dim        (aerosol_loading_dim) |S100 b'DUST_1          ...\n",
+       "  * clear_sky_AOD_dim          (clear_sky_AOD_dim) |S100 b'DUST_1            ...\n",
+       "  * aerosol_extinction_dim     (aerosol_extinction_dim) |S100 b'DUST_1       ...\n",
+       "Dimensions without coordinates: idat, soil_lm, num_aero, num_aero_r, num_aero_1, num_engy, num_gas_total\n",
+       "Data variables: (12/302)\n",
+       "    IM                         float64 257.0\n",
+       "    JM                         float64 181.0\n",
+       "    LM                         float64 48.0\n",
+       "    IHRST                      float64 0.0\n",
+       "    I_PAR_STA                  float64 1.0\n",
+       "    J_PAR_STA                  float64 1.0\n",
+       "    ...                         ...\n",
+       "    aerosol_extinction_DUST_3  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_4  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_5  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_6  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_7  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_8  (time, lm, lat, lon) float32 ...\n",
+       "Attributes:\n",
+       "    Domain:       Global\n",
+       "    Conventions:  None\n",
+       "    history:      MONARCHv1.0 netcdf file.\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 9, lm: 48, lmp: 49, lon: 257, lat: 181, idat: 3, soil_lm: 4, num_aero: 88, num_aero_r: 3, num_aero_1: 89, num_engy: 7, num_gas_total: 1, aerosol_optical_depth_dim: 8, satellite_AOD_dim: 2, aerosol_loading_dim: 8, clear_sky_AOD_dim: 8, aerosol_extinction_dim: 8)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2008-12-31 ... 2009-01-01\n", + " * lm (lm) int32 0 1 2 3 4 5 6 ... 41 42 43 44 45 46 47\n", + " * lmp (lmp) int32 0 1 2 3 4 5 6 ... 43 44 45 46 47 48\n", + " * lon (lon) float32 -180.0 -178.6 ... 178.6 180.0\n", + " * lat (lat) float32 -90.0 -89.0 -88.0 ... 89.0 90.0\n", + " * aerosol_optical_depth_dim (aerosol_optical_depth_dim) |S100 b'DUST_1 ...\n", + " * satellite_AOD_dim (satellite_AOD_dim) |S100 b'MODIS TERRA 550 nm...\n", + " * aerosol_loading_dim (aerosol_loading_dim) |S100 b'DUST_1 ...\n", + " * clear_sky_AOD_dim (clear_sky_AOD_dim) |S100 b'DUST_1 ...\n", + " * aerosol_extinction_dim (aerosol_extinction_dim) |S100 b'DUST_1 ...\n", + "Dimensions without coordinates: idat, soil_lm, num_aero, num_aero_r, num_aero_1, num_engy, num_gas_total\n", + "Data variables: (12/302)\n", + " IM float64 ...\n", + " JM float64 ...\n", + " LM float64 ...\n", + " IHRST float64 ...\n", + " I_PAR_STA float64 ...\n", + " J_PAR_STA float64 ...\n", + " ... ...\n", + " aerosol_extinction_DUST_3 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_4 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_5 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_6 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_7 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_8 (time, lm, lat, lon) float32 ...\n", + "Attributes:\n", + " Domain: Global\n", + " Conventions: None\n", + " history: MONARCHv1.0 netcdf file.\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2008, 12, 31, 0, 0),\n", + " datetime.datetime(2008, 12, 31, 3, 0),\n", + " datetime.datetime(2008, 12, 31, 6, 0),\n", + " datetime.datetime(2008, 12, 31, 9, 0),\n", + " datetime.datetime(2008, 12, 31, 12, 0),\n", + " datetime.datetime(2008, 12, 31, 15, 0),\n", + " datetime.datetime(2008, 12, 31, 18, 0),\n", + " datetime.datetime(2008, 12, 31, 21, 0),\n", + " datetime.datetime(2009, 1, 1, 0, 0)]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n", + " 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,\n", + " 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,\n", + " 42, 43, 44, 45, 46, 47],\n", + " mask=False,\n", + " fill_value=999999,\n", + " dtype=int32),\n", + " 'dimensions': ('lm',),\n", + " 'units': '',\n", + " 'long_name': 'layer id'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-90., -89., -88., -87., -86., -85., -84., -83., -82.,\n", + " -81., -80., -79., -78., -77., -76., -75., -74., -73.,\n", + " -72., -71., -70., -69., -68., -67., -66., -65., -64.,\n", + " -63., -62., -61., -60., -59., -58., -57., -56., -55.,\n", + " -54., -53., -52., -51., -50., -49., -48., -47., -46.,\n", + " -45., -44., -43., -42., -41., -40., -39., -38., -37.,\n", + " -36., -35., -34., -33., -32., -31., -30., -29., -28.,\n", + " -27., -26., -25., -24., -23., -22., -21., -20., -19.,\n", + " -18., -17., -16., -15., -14., -13., -12., -11., -10.,\n", + " -9., -8., -7., -6., -5., -4., -3., -2., -1.,\n", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,\n", + " 9., 10., 11., 12., 13., 14., 15., 16., 17.,\n", + " 18., 19., 20., 21., 22., 23., 24., 25., 26.,\n", + " 27., 28., 29., 30., 31., 32., 33., 34., 35.,\n", + " 36., 37., 38., 39., 40., 41., 42., 43., 44.,\n", + " 45., 46., 47., 48., 49., 50., 51., 52., 53.,\n", + " 54., 55., 56., 57., 58., 59., 60., 61., 62.,\n", + " 63., 64., 65., 66., 67., 68., 69., 70., 71.,\n", + " 72., 73., 74., 75., 76., 77., 78., 79., 80.,\n", + " 81., 82., 83., 84., 85., 86., 87., 88., 89.,\n", + " 90.],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lat',),\n", + " 'long_name': 'latitude',\n", + " 'units': 'degrees_north',\n", + " 'standard_name': 'grid_latitude'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-180. , -178.59375, -177.1875 , -175.78125,\n", + " -174.375 , -172.96875, -171.5625 , -170.15625,\n", + " -168.75 , -167.34375, -165.9375 , -164.53125,\n", + " -163.125 , -161.71875, -160.3125 , -158.90625,\n", + " -157.5 , -156.09375, -154.6875 , -153.28125,\n", + " -151.875 , -150.46875, -149.0625 , -147.65625,\n", + " -146.25 , -144.84375, -143.4375 , -142.03125,\n", + " -140.625 , -139.21875, -137.8125 , -136.40625,\n", + " -135. , -133.59375, -132.1875 , -130.78125,\n", + " -129.375 , -127.96875, -126.5625 , -125.15625,\n", + " -123.75 , -122.34375, -120.9375 , -119.53125,\n", + " -118.125 , -116.71875, -115.3125 , -113.90625,\n", + " -112.5 , -111.09375, -109.6875 , -108.28125,\n", + " -106.875 , -105.46875, -104.0625 , -102.65625,\n", + " -101.25 , -99.84375, -98.4375 , -97.03125,\n", + " -95.625 , -94.21875, -92.8125 , -91.40625,\n", + " -90. , -88.59375, -87.1875 , -85.78125,\n", + " -84.375 , -82.96875, -81.5625 , -80.15625,\n", + " -78.75 , -77.34375, -75.9375 , -74.53125,\n", + " -73.125 , -71.71875, -70.3125 , -68.90625,\n", + " -67.5 , -66.09375, -64.6875 , -63.28125,\n", + " -61.875 , -60.46875, -59.0625 , -57.65625,\n", + " -56.25 , -54.84375, -53.4375 , -52.03125,\n", + " -50.625 , -49.21875, -47.8125 , -46.40625,\n", + " -45. , -43.59375, -42.1875 , -40.78125,\n", + " -39.375 , -37.96875, -36.5625 , -35.15625,\n", + " -33.75 , -32.34375, -30.9375 , -29.53125,\n", + " -28.125 , -26.71875, -25.3125 , -23.90625,\n", + " -22.5 , -21.09375, -19.6875 , -18.28125,\n", + " -16.875 , -15.46875, -14.0625 , -12.65625,\n", + " -11.25 , -9.84375, -8.4375 , -7.03125,\n", + " -5.625 , -4.21875, -2.8125 , -1.40625,\n", + " 0. , 1.40625, 2.8125 , 4.21875,\n", + " 5.625 , 7.03125, 8.4375 , 9.84375,\n", + " 11.25 , 12.65625, 14.0625 , 15.46875,\n", + " 16.875 , 18.28125, 19.6875 , 21.09375,\n", + " 22.5 , 23.90625, 25.3125 , 26.71875,\n", + " 28.125 , 29.53125, 30.9375 , 32.34375,\n", + " 33.75 , 35.15625, 36.5625 , 37.96875,\n", + " 39.375 , 40.78125, 42.1875 , 43.59375,\n", + " 45. , 46.40625, 47.8125 , 49.21875,\n", + " 50.625 , 52.03125, 53.4375 , 54.84375,\n", + " 56.25 , 57.65625, 59.0625 , 60.46875,\n", + " 61.875 , 63.28125, 64.6875 , 66.09375,\n", + " 67.5 , 68.90625, 70.3125 , 71.71875,\n", + " 73.125 , 74.53125, 75.9375 , 77.34375,\n", + " 78.75 , 80.15625, 81.5625 , 82.96875,\n", + " 84.375 , 85.78125, 87.1875 , 88.59375,\n", + " 90. , 91.40625, 92.8125 , 94.21875,\n", + " 95.625 , 97.03125, 98.4375 , 99.84375,\n", + " 101.25 , 102.65625, 104.0625 , 105.46875,\n", + " 106.875 , 108.28125, 109.6875 , 111.09375,\n", + " 112.5 , 113.90625, 115.3125 , 116.71875,\n", + " 118.125 , 119.53125, 120.9375 , 122.34375,\n", + " 123.75 , 125.15625, 126.5625 , 127.96875,\n", + " 129.375 , 130.78125, 132.1875 , 133.59375,\n", + " 135. , 136.40625, 137.8125 , 139.21875,\n", + " 140.625 , 142.03125, 143.4375 , 144.84375,\n", + " 146.25 , 147.65625, 149.0625 , 150.46875,\n", + " 151.875 , 153.28125, 154.6875 , 156.09375,\n", + " 157.5 , 158.90625, 160.3125 , 161.71875,\n", + " 163.125 , 164.53125, 165.9375 , 167.34375,\n", + " 168.75 , 170.15625, 171.5625 , 172.96875,\n", + " 174.375 , 175.78125, 177.1875 , 178.59375,\n", + " 180. ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lon',),\n", + " 'long_name': 'longitude',\n", + " 'units': 'degrees_east',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "nessy_1.keep_vars('O3')" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3 var (1/1)\n", + "Rank 000: Loaded O3 var ((9, 48, 181, 257))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'O3': {'data': masked_array(\n", + " data=[[[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " ...,\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'lm', 'lat', 'lon'),\n", + " 'long_name': 'O3',\n", + " 'units': 'unknown',\n", + " 'standard_name': 'O3'}}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating regular_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing O3 var (1/1)\n", + "Rank 000: Var O3 created (1/1)\n", + "Rank 000: Var O3 data (1/1)\n", + "Rank 000: Var O3 completed (1/1)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('regular_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('regular_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:  (time: 9, lev: 48, lat: 181, lon: 257)\n",
+       "Coordinates:\n",
+       "  * time     (time) datetime64[ns] 2008-12-31 2008-12-31T03:00:00 ... 2009-01-01\n",
+       "  * lev      (lev) float64 0.0 1.0 2.0 3.0 4.0 5.0 ... 43.0 44.0 45.0 46.0 47.0\n",
+       "  * lat      (lat) float64 -90.0 -89.0 -88.0 -87.0 -86.0 ... 87.0 88.0 89.0 90.0\n",
+       "  * lon      (lon) float64 -180.0 -178.6 -177.2 -175.8 ... 177.2 178.6 180.0\n",
+       "Data variables:\n",
+       "    O3       (time, lev, lat, lon) float32 ...\n",
+       "    crs      |S1 b''\n",
+       "Attributes:\n",
+       "    Domain:       Global\n",
+       "    Conventions:  CF-1.7\n",
+       "    history:      MONARCHv1.0 netcdf file.\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 9, lev: 48, lat: 181, lon: 257)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2008-12-31 2008-12-31T03:00:00 ... 2009-01-01\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 5.0 ... 43.0 44.0 45.0 46.0 47.0\n", + " * lat (lat) float64 -90.0 -89.0 -88.0 -87.0 -86.0 ... 87.0 88.0 89.0 90.0\n", + " * lon (lon) float64 -180.0 -178.6 -177.2 -175.8 ... 177.2 178.6 180.0\n", + "Data variables:\n", + " O3 (time, lev, lat, lon) float32 ...\n", + " crs |S1 ...\n", + "Attributes:\n", + " Domain: Global\n", + " Conventions: CF-1.7\n", + " history: MONARCHv1.0 netcdf file.\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('regular_file_1.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.2-rotated_grids.ipynb b/Jupyter_notebooks/1.2-rotated_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..41ef31309f56d2040926dc637c26e97f335f5bf6 --- /dev/null +++ b/Jupyter_notebooks/1.2-rotated_grids.ipynb @@ -0,0 +1,1273 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write rotated grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 1, nv: 2, lev: 24, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2021-08-03\n",
+       "  * lev           (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n",
+       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
+       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
+       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Dimensions without coordinates: nv\n",
+       "Data variables:\n",
+       "    time_bnds     (time, nv) datetime64[ns] 2021-08-03 2021-08-07\n",
+       "    O3_all        (time, lev, rlat, rlon) float32 ...\n",
+       "    rotated_pole  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, nv: 2, lev: 24, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-08-03\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Dimensions without coordinates: nv\n", + "Data variables:\n", + " time_bnds (time, nv) datetime64[ns] ...\n", + " O3_all (time, lev, rlat, rlon) float32 ...\n", + " rotated_pole |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2021, 8, 3, 0, 0)]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.,\n", + " 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21.,\n", + " 22., 23.],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('lev',),\n", + " 'units': '',\n", + " 'positive': 'up'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[16.35033798, 16.43292999, 16.51514626, ..., 16.51514626,\n", + " 16.43292999, 16.35033798],\n", + " [16.52742577, 16.61023903, 16.69267654, ..., 16.69267654,\n", + " 16.61024284, 16.52742577],\n", + " [16.70447159, 16.78750801, 16.87016678, ..., 16.87016678,\n", + " 16.78750992, 16.70447159],\n", + " ...,\n", + " [58.32094955, 58.47268295, 58.62430954, ..., 58.62430954,\n", + " 58.47268295, 58.32094955],\n", + " [58.42628479, 58.57820129, 58.73002625, ..., 58.73002625,\n", + " 58.57820129, 58.42628479],\n", + " [58.53079224, 58.68289948, 58.83491898, ..., 58.83491898,\n", + " 58.68290329, 58.53079224]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.18126488, -22.01667213, -21.85179901, ..., 41.8517952 ,\n", + " 42.01666641, 42.18125916],\n", + " [-22.27817917, -22.11318588, -21.94790459, ..., 41.94789886,\n", + " 42.11317444, 42.27817154],\n", + " [-22.37526703, -22.2098732 , -22.04418945, ..., 42.04418564,\n", + " 42.2098732 , 42.37526321],\n", + " ...,\n", + " [-67.57766724, -67.39706421, -67.21534729, ..., 87.21533966,\n", + " 87.39705658, 87.57765961],\n", + " [-67.90187836, -67.72247314, -67.54193878, ..., 87.54193878,\n", + " 87.72245789, 87.90187073],\n", + " [-68.22803497, -68.04981995, -67.87051392, ..., 87.87050629,\n", + " 88.04981995, 88.22803497]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'O3_all': {'data': masked_array(\n", + " data=[[[[2.82636070e-09, 2.83436141e-09, 2.82522827e-09, ...,\n", + " 2.96334601e-09, 2.94810221e-09, 2.91839553e-09],\n", + " [2.80930834e-09, 2.57182142e-09, 2.55521360e-09, ...,\n", + " 2.56395216e-09, 2.55890820e-09, 2.89073032e-09],\n", + " [2.79031842e-09, 2.53415999e-09, 2.50317100e-09, ...,\n", + " 2.56737764e-09, 2.58685584e-09, 2.85498758e-09],\n", + " ...,\n", + " [4.54469973e-09, 2.31674457e-09, 2.22753971e-09, ...,\n", + " 3.90127353e-09, 3.89643118e-09, 3.95452204e-09],\n", + " [4.54129534e-09, 3.39469808e-09, 2.30205255e-09, ...,\n", + " 3.88824706e-09, 3.88372090e-09, 3.95252631e-09],\n", + " [4.55012028e-09, 4.54941684e-09, 4.55885596e-09, ...,\n", + " 3.93945099e-09, 3.94256938e-09, 3.94736510e-09]],\n", + " \n", + " [[1.61966751e-09, 1.62850033e-09, 1.62801062e-09, ...,\n", + " 1.74583636e-09, 1.74684045e-09, 1.74125825e-09],\n", + " [1.60704539e-09, 1.41438683e-09, 1.39824063e-09, ...,\n", + " 1.43241041e-09, 1.45136980e-09, 1.73744363e-09],\n", + " [1.59303792e-09, 1.41264567e-09, 1.43958856e-09, ...,\n", + " 1.43522705e-09, 1.45869528e-09, 1.72746673e-09],\n", + " ...,\n", + " [3.39471939e-09, 2.65527422e-09, 2.22850582e-09, ...,\n", + " 3.00350167e-09, 3.02176750e-09, 3.04009262e-09],\n", + " [3.42592332e-09, 2.81851942e-09, 2.28753505e-09, ...,\n", + " 2.99818836e-09, 2.99247205e-09, 3.04403525e-09],\n", + " [3.43113582e-09, 3.43824125e-09, 3.44929552e-09, ...,\n", + " 3.05421777e-09, 3.04752024e-09, 3.04445491e-09]],\n", + " \n", + " [[6.52169652e-10, 6.62677024e-10, 6.71934786e-10, ...,\n", + " 6.84429291e-10, 6.85826118e-10, 6.81504464e-10],\n", + " [6.54959087e-10, 6.65219158e-10, 6.72430500e-10, ...,\n", + " 7.02121916e-10, 6.88325397e-10, 6.78990253e-10],\n", + " [6.57915333e-10, 6.72102929e-10, 6.82566170e-10, ...,\n", + " 7.10820458e-10, 7.07094217e-10, 6.77522760e-10],\n", + " ...,\n", + " [2.26027863e-09, 2.27629537e-09, 2.22616392e-09, ...,\n", + " 1.80253423e-09, 1.80225357e-09, 1.75757697e-09],\n", + " [2.25028196e-09, 2.24872521e-09, 2.25445618e-09, ...,\n", + " 1.78916737e-09, 1.75583581e-09, 1.73717007e-09],\n", + " [2.25827335e-09, 2.26974151e-09, 2.28325270e-09, ...,\n", + " 1.80090465e-09, 1.77703174e-09, 1.75434933e-09]],\n", + " \n", + " ...,\n", + " \n", + " [[6.20177729e-11, 6.26959387e-11, 6.28658792e-11, ...,\n", + " 7.74274672e-11, 7.81546980e-11, 7.60479180e-11],\n", + " [6.20486787e-11, 4.91600684e-11, 4.88878833e-11, ...,\n", + " 8.30884250e-11, 8.02152303e-11, 7.64004970e-11],\n", + " [6.20976950e-11, 4.84989236e-11, 4.85273696e-11, ...,\n", + " 8.46209977e-11, 8.60716498e-11, 9.29777644e-11],\n", + " ...,\n", + " [6.15721710e-11, 5.85051035e-11, 5.68927752e-11, ...,\n", + " 7.66955388e-11, 7.87262894e-11, 8.41871295e-11],\n", + " [6.17081941e-11, 5.77536560e-11, 5.71826440e-11, ...,\n", + " 8.49015233e-11, 8.82505458e-11, 9.20043208e-11],\n", + " [6.09760506e-11, 6.03529102e-11, 6.24047411e-11, ...,\n", + " 9.69636524e-11, 9.73700426e-11, 9.67554162e-11]],\n", + " \n", + " [[6.17567178e-11, 6.23894963e-11, 6.25706292e-11, ...,\n", + " 9.04916420e-11, 8.90077803e-11, 8.43536768e-11],\n", + " [6.17901147e-11, 4.59270816e-11, 4.57923699e-11, ...,\n", + " 1.06383589e-10, 1.05693093e-10, 9.44862175e-11],\n", + " [6.18271337e-11, 4.17853495e-11, 3.94594427e-11, ...,\n", + " 1.34135009e-10, 1.37096737e-10, 1.13853482e-10],\n", + " ...,\n", + " [5.87425456e-11, 5.60845814e-11, 5.33429169e-11, ...,\n", + " 6.52061183e-11, 6.64711411e-11, 7.06842501e-11],\n", + " [5.92315016e-11, 5.72428251e-11, 5.51245403e-11, ...,\n", + " 7.10893150e-11, 7.38196310e-11, 7.53354532e-11],\n", + " [5.72967125e-11, 5.87497967e-11, 6.08200851e-11, ...,\n", + " 7.97847274e-11, 8.28124236e-11, 7.89215707e-11]],\n", + " \n", + " [[6.15217946e-11, 6.21571961e-11, 6.23377391e-11, ...,\n", + " 1.08401239e-10, 1.07494236e-10, 1.08711720e-10],\n", + " [6.15563989e-11, 4.56989759e-11, 4.46428450e-11, ...,\n", + " 1.30999808e-10, 1.26581134e-10, 1.39005307e-10],\n", + " [6.15933693e-11, 3.98656906e-11, 3.75483949e-11, ...,\n", + " 1.37105632e-10, 1.48587462e-10, 1.83946344e-10],\n", + " ...,\n", + " [4.68582569e-11, 4.44464673e-11, 4.43960736e-11, ...,\n", + " 5.86025117e-11, 5.84869791e-11, 6.32652056e-11],\n", + " [4.99817097e-11, 4.49490271e-11, 4.43218864e-11, ...,\n", + " 6.19639479e-11, 6.07859180e-11, 6.55651922e-11],\n", + " [4.98553143e-11, 4.61104453e-11, 4.96835975e-11, ...,\n", + " 6.42673414e-11, 6.38328765e-11, 6.38894007e-11]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'lev', 'rlat', 'rlon'),\n", + " 'units': 'kg/m3',\n", + " 'long_name': 'TRACERS_044',\n", + " 'coordinates': 'lat lon',\n", + " 'cell_methods': 'time: maximum (interval: 1hr)',\n", + " 'grid_mapping': 'rotated_pole'}}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating rotated_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing O3_all var (1/1)\n", + "Rank 000: Var O3_all created (1/1)\n", + "Rank 000: Var O3_all data (1/1)\n", + "Rank 000: Var O3_all completed (1/1)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('rotated_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('rotated_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:    (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time       (time) datetime64[ns] 2021-08-03\n",
+       "  * lev        (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n",
+       "    lat        (rlat, rlon) float64 16.35 16.43 16.52 16.6 ... 58.83 58.68 58.53\n",
+       "    lon        (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 87.87 88.05 88.23\n",
+       "  * rlat       (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon       (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Dimensions without coordinates: time_nv\n",
+       "Data variables:\n",
+       "    time_bnds  (time, time_nv) datetime64[ns] 2021-08-03 2021-08-07\n",
+       "    O3_all     (time, lev, rlat, rlon) float32 ...\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-08-03\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Dimensions without coordinates: time_nv\n", + "Data variables:\n", + " time_bnds (time, time_nv) datetime64[ns] ...\n", + " O3_all (time, lev, rlat, rlon) float32 ...\n", + "Attributes:\n", + " Conventions: CF-1.7\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('rotated_file_1.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.3-points_grids.ipynb b/Jupyter_notebooks/1.3-points_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d3979ac310a02bb834f7b003cfb747e023e3aa7e --- /dev/null +++ b/Jupyter_notebooks/1.3-points_grids.ipynb @@ -0,0 +1,5706 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write observational datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# nc_path_1 = '/esarchive/obs/eea/eionet/hourly/pm10/pm10_202107.nc' # EIONET\n", + "nc_path_1 = '/esarchive/obs/nilu/ebas/daily/pm10/pm10_201507.nc' # EBAS" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read and write - Non-GHOST type" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                       (station: 84, time: 31)\n",
+       "Coordinates:\n",
+       "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
+       "Dimensions without coordinates: station\n",
+       "Data variables: (12/19)\n",
+       "    station_start_date            (station) |S75 b'1980-01-01' ... b'nan'\n",
+       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    country_code                  (station) |S75 b'CH' b'CH' ... b'NL' b'IT'\n",
+       "    ccaa                          (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_name                  (station) |S75 b'payerne' ... b'lamezia terme'\n",
+       "    ...                            ...\n",
+       "    station_code                  (station) |S75 b'CH0002R' ... b'IT0016R'\n",
+       "    longitude                     (station) float32 6.944 8.905 ... 6.277 16.23\n",
+       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    latitude                      (station) float32 46.81 47.48 ... 53.33 38.88\n",
+       "    station_ozone_classification  (station) |S75 b'rural' b'rural' ... b'nan'
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 84, time: 31)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", + "Dimensions without coordinates: station\n", + "Data variables: (12/19)\n", + " station_start_date (station) |S75 ...\n", + " station_zone (station) |S75 ...\n", + " street_type (station) |S75 ...\n", + " country_code (station) |S75 ...\n", + " ccaa (station) |S75 ...\n", + " station_name (station) |S75 ...\n", + " ... ...\n", + " station_code (station) |S75 ...\n", + " longitude (station) float32 ...\n", + " station_end_date (station) |S75 ...\n", + " station_rural_back (station) |S75 ...\n", + " latitude (station) float32 ...\n", + " station_ozone_classification (station) |S75 ..." + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True, parallel_method='X')\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2015, 7, 1, 0, 0),\n", + " datetime.datetime(2015, 7, 1, 1, 0),\n", + " datetime.datetime(2015, 7, 1, 2, 0),\n", + " datetime.datetime(2015, 7, 1, 3, 0),\n", + " datetime.datetime(2015, 7, 1, 4, 0),\n", + " datetime.datetime(2015, 7, 1, 5, 0),\n", + " datetime.datetime(2015, 7, 1, 6, 0),\n", + " datetime.datetime(2015, 7, 1, 7, 0),\n", + " datetime.datetime(2015, 7, 1, 8, 0),\n", + " datetime.datetime(2015, 7, 1, 9, 0),\n", + " datetime.datetime(2015, 7, 1, 10, 0),\n", + " datetime.datetime(2015, 7, 1, 11, 0),\n", + " datetime.datetime(2015, 7, 1, 12, 0),\n", + " datetime.datetime(2015, 7, 1, 13, 0),\n", + " datetime.datetime(2015, 7, 1, 14, 0),\n", + " datetime.datetime(2015, 7, 1, 15, 0),\n", + " datetime.datetime(2015, 7, 1, 16, 0),\n", + " datetime.datetime(2015, 7, 1, 17, 0),\n", + " datetime.datetime(2015, 7, 1, 18, 0),\n", + " datetime.datetime(2015, 7, 1, 19, 0),\n", + " datetime.datetime(2015, 7, 1, 20, 0),\n", + " datetime.datetime(2015, 7, 1, 21, 0),\n", + " datetime.datetime(2015, 7, 1, 22, 0),\n", + " datetime.datetime(2015, 7, 1, 23, 0),\n", + " datetime.datetime(2015, 7, 2, 0, 0),\n", + " datetime.datetime(2015, 7, 2, 1, 0),\n", + " datetime.datetime(2015, 7, 2, 2, 0),\n", + " datetime.datetime(2015, 7, 2, 3, 0),\n", + " datetime.datetime(2015, 7, 2, 4, 0),\n", + " datetime.datetime(2015, 7, 2, 5, 0),\n", + " datetime.datetime(2015, 7, 2, 6, 0)]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,\n", + " 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,\n", + " 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,\n", + " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83]),\n", + " 'units': ''}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.station" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[46.813057, 47.479767, 47.04947 , 47.06741 , 35.039165,\n", + " 56.354443, 56.711666, 55.694107, 59.494446, 58.37611 ,\n", + " 39.546944, 42.720554, 39.87528 , 37.23722 , 43.439167,\n", + " 41.274166, 42.319168, 38.47278 , 39.08278 , 41.238888,\n", + " 41.39389 , 42.634724, 37.051945, 51.57108 , 55.79216 ,\n", + " 46.966667, 51.933334, 64.1 , 45.814445, 58.433 ,\n", + " 65.833336, 62.783333, 60.366665, 51.814445, 50.73639 ,\n", + " 54.754166, 54.125275, 43.404 , 63.84889 , 56.028057,\n", + " 58.80111 , 57.393612, 45.56139 , 48.942223, 49.04222 ,\n", + " 78.90667 , 52.802223, 52.083332, 53.662117, 27.28999 ,\n", + " 51.54111 , 47.83861 , 40.384445, 47.766666, 51.974445,\n", + " 49.573395, 54.433334, 56.161945, 59.779167, 40.3358 ,\n", + " 49.733334, 54.9 , 52.11667 , 43.15 , 35.316666,\n", + " 55.37611 , 47.914722, 53.166668, 42.1 , 60.53002 ,\n", + " 51.53014 , 52.86861 , 55.375 , 54.925556, 50.65 ,\n", + " 49.066666, 31.34274 , 68. , 46.5475 , 52.3 ,\n", + " 46.677776, 52.178074, 53.33389 , 38.8763 ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station',),\n", + " 'units': 'degrees_north',\n", + " 'long_name': 'latitude',\n", + " 'standard_name': 'latitude',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 6.944469, 8.904696, 6.979203, 8.463339,\n", + " 33.05806 , 9.603059, 11.533614, 12.088669,\n", + " 25.930569, 21.845013, -4.350556, -8.923611,\n", + " 4.316389, -3.534167, -4.85 , -3.1425 ,\n", + " 3.315833, -6.923611, -1.101111, -5.8975 ,\n", + " 0.734722, -7.704722, -6.555278, -1.325286,\n", + " -3.2429 , 19.55 , -10.233334, -21.016945,\n", + " 8.637225, 8.269 , 13.916667, 8.883333,\n", + " 11.066667, 21.97139 , 15.739722, 17.534445,\n", + " 22.038055, 21.947 , 15.335577, 13.149458,\n", + " 17.381958, 11.91418 , 14.862789, 19.592232,\n", + " 22.259731, 11.888333, 10.759444, 6.566667,\n", + " 17.934017, 33.749886, 5.853611, 14.441389,\n", + " 44.260582, 16.766666, 4.923611, 15.080278,\n", + " 12.733333, 21.173056, 21.377222, 18.1245 ,\n", + " 16.05 , 37.8 , 5.2 , 19.133333,\n", + " 25.666668, 21.030556, 7.908611, 13.033333,\n", + " 12.633333, 27.66754 , 12.93386 , -6.924722,\n", + " -7.342778, 8.309722, 10.766667, 13.6 ,\n", + " 27.217775, 24.237223, 7.985 , 4.5 ,\n", + " 12.972222, -6.364524, 6.277222, 16.2322 ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station',),\n", + " 'units': 'degrees_east',\n", + " 'long_name': 'longitude',\n", + " 'standard_name': 'longitude',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading station_start_date var (1/17)\n", + "Rank 000: Loaded station_start_date var ((84, 75))\n", + "Rank 000: Loading station_zone var (2/17)\n", + "Rank 000: Loaded station_zone var ((84, 75))\n", + "Rank 000: Loading street_type var (3/17)\n", + "Rank 000: Loaded street_type var ((84, 75))\n", + "Rank 000: Loading country_code var (4/17)\n", + "Rank 000: Loaded country_code var ((84, 75))\n", + "Rank 000: Loading ccaa var (5/17)\n", + "Rank 000: Loaded ccaa var ((84, 75))\n", + "Rank 000: Loading station_name var (6/17)\n", + "Rank 000: Loaded station_name var ((84, 75))\n", + "Rank 000: Loading station_area var (7/17)\n", + "Rank 000: Loaded station_area var ((84, 75))\n", + "Rank 000: Loading city var (8/17)\n", + "Rank 000: Loaded city var ((84, 75))\n", + "Rank 000: Loading pm10 var (9/17)\n", + "Rank 000: Loaded pm10 var ((31, 84))\n", + "Rank 000: Loading station_emep var (10/17)\n", + "Rank 000: Loaded station_emep var ((84, 75))\n", + "Rank 000: Loading station_type var (11/17)\n", + "Rank 000: Loaded station_type var ((84, 75))\n", + "Rank 000: Loading country var (12/17)\n", + "Rank 000: Loaded country var ((84, 75))\n", + "Rank 000: Loading altitude var (13/17)\n", + "Rank 000: Loaded altitude var ((84,))\n", + "Rank 000: Loading station_code var (14/17)\n", + "Rank 000: Loaded station_code var ((84, 75))\n", + "Rank 000: Loading station_end_date var (15/17)\n", + "Rank 000: Loaded station_end_date var ((84, 75))\n", + "Rank 000: Loading station_rural_back var (16/17)\n", + "Rank 000: Loaded station_rural_back var ((84, 75))\n", + "Rank 000: Loading station_ozone_classification var (17/17)\n", + "Rank 000: Loaded station_ozone_classification var ((84, 75))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'station_start_date': {'data': masked_array(\n", + " data=[[b'1', b'9', b'8', ..., b'n', b'n', b'n'],\n", + " [b'1', b'9', b'8', ..., b'n', b'n', b'n'],\n", + " [b'1', b'9', b'9', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_zone': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'street_type': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'country_code': {'data': masked_array(\n", + " data=[[b'C', b'H', b'n', ..., b'n', b'n', b'n'],\n", + " [b'C', b'H', b'n', ..., b'n', b'n', b'n'],\n", + " [b'C', b'H', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'I', b'E', b'n', ..., b'n', b'n', b'n'],\n", + " [b'N', b'L', b'n', ..., b'n', b'n', b'n'],\n", + " [b'I', b'T', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'ccaa': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_name': {'data': masked_array(\n", + " data=[[b'p', b'a', b'y', ..., b'n', b'n', b'n'],\n", + " [b't', b'n', b'i', ..., b'n', b'n', b'n'],\n", + " [b'c', b'h', b'a', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'c', b'a', b'r', ..., b'n', b'n', b'n'],\n", + " [b'k', b'o', b'l', ..., b'n', b'n', b'n'],\n", + " [b'l', b'a', b'm', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_area': {'data': masked_array(\n", + " data=[[b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " [b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " [b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'city': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'pm10': {'data': masked_array(\n", + " data=[[17.899999618530273, 15.399999618530273, 16.200000762939453, ...,\n", + " nan, 14.829999923706055, nan],\n", + " [22.899999618530273, 16.799999237060547, 20.5, ..., nan,\n", + " 24.799999237060547, nan],\n", + " [24.100000381469727, 22.799999237060547, 20.200000762939453, ...,\n", + " nan, 13.920000076293945, nan],\n", + " ...,\n", + " [6.300000190734863, 7.199999809265137, 4.5, ..., nan,\n", + " 8.319999694824219, 9.300000190734863],\n", + " [7.599999904632568, 8.899999618530273, 8.199999809265137, ...,\n", + " nan, 13.020000457763672, 9.300000190734863],\n", + " [11.0, 9.800000190734863, 8.800000190734863, ..., nan,\n", + " 14.140000343322754, 12.699999809265137]],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=-9999.0,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'station'),\n", + " 'units': 'µg/m3',\n", + " 'long_name': 'pm10_mass pm10'},\n", + " 'station_emep': {'data': masked_array(\n", + " data=[[b'y', b'e', b's', ..., b'n', b'n', b'n'],\n", + " [b'y', b'e', b's', ..., b'n', b'n', b'n'],\n", + " [b'y', b'e', b's', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'y', b'e', b's', ..., b'n', b'n', b'n'],\n", + " [b'y', b'e', b's', ..., b'n', b'n', b'n'],\n", + " [b'n', b'o', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_type': {'data': masked_array(\n", + " data=[[b'b', b'a', b'c', ..., b'n', b'n', b'n'],\n", + " [b'b', b'a', b'c', ..., b'n', b'n', b'n'],\n", + " [b'b', b'a', b'c', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'country': {'data': masked_array(\n", + " data=[[b's', b'w', b'i', ..., b'n', b'n', b'n'],\n", + " [b's', b'w', b'i', ..., b'n', b'n', b'n'],\n", + " [b's', b'w', b'i', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'I', b'r', b'e', ..., b'n', b'n', b'n'],\n", + " [b'N', b'e', b't', ..., b'n', b'n', b'n'],\n", + " [b'I', b't', b'a', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'altitude': {'data': masked_array(data=[4.890e+02, 5.380e+02, 1.136e+03, 1.031e+03, 5.320e+02,\n", + " 8.000e+00, 1.000e+01, 3.000e+00, 3.200e+01, 6.000e+00,\n", + " 9.170e+02, 6.850e+02, 7.800e+01, 1.230e+03, 1.340e+02,\n", + " 1.360e+03, 2.300e+01, 3.930e+02, 8.850e+02, 9.850e+02,\n", + " 4.700e+02, 5.060e+02, 5.000e+00, 1.260e+02, 2.600e+02,\n", + " 1.250e+02, 1.000e+01, 6.600e+01, 2.090e+02, 2.190e+02,\n", + " 4.390e+02, 2.100e+02, 3.000e+02, 1.800e+02, 1.603e+03,\n", + " 4.000e+00, 1.570e+02, 8.060e+02, 3.800e+02, 1.630e+02,\n", + " 2.500e+01, 1.000e+01, 5.400e+02, 2.008e+03, 3.450e+02,\n", + " 4.740e+02, 7.400e+01, 2.000e+01, 1.210e+02, 7.000e+00,\n", + " 2.800e+01, 8.990e+02, 2.080e+03, 1.170e+02, 1.000e+00,\n", + " 5.350e+02, 1.000e+00, 1.800e+01, 7.000e+00, 3.600e+01,\n", + " 7.370e+02, 1.500e+02, 5.000e+00, 1.450e+03, 2.500e+02,\n", + " 5.000e+00, 1.205e+03, 6.200e+01, 4.800e+01, 4.000e+00,\n", + " 8.600e+01, 5.900e+01, 2.000e+01, 1.200e+01, 9.370e+02,\n", + " 1.118e+03, 3.500e+01, 3.400e+02, 3.578e+03, 4.000e+00,\n", + " 1.020e+03, 9.000e+00, 1.000e+00, 6.000e+00],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station',),\n", + " 'units': 'meters',\n", + " 'standard_name': 'altitude'},\n", + " 'station_code': {'data': masked_array(\n", + " data=[[b'C', b'H', b'0', ..., b'n', b'n', b'n'],\n", + " [b'C', b'H', b'0', ..., b'n', b'n', b'n'],\n", + " [b'C', b'H', b'0', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'I', b'E', b'0', ..., b'n', b'n', b'n'],\n", + " [b'N', b'L', b'0', ..., b'n', b'n', b'n'],\n", + " [b'I', b'T', b'0', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_end_date': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_rural_back': {'data': masked_array(\n", + " data=[[b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''},\n", + " 'station_ozone_classification': {'data': masked_array(\n", + " data=[[b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " [b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " [b'r', b'u', b'r', ..., b'n', b'n', b'n'],\n", + " ...,\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n'],\n", + " [b'n', b'a', b'n', ..., b'n', b'n', b'n']],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=b'',\n", + " dtype='|S1'), 'dimensions': ('station', 'strlen'), 'standard_name': ''}}" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[17.899999618530273, 15.399999618530273, 16.200000762939453, ...,\n", + " nan, 14.829999923706055, nan],\n", + " [22.899999618530273, 16.799999237060547, 20.5, ..., nan,\n", + " 24.799999237060547, nan],\n", + " [24.100000381469727, 22.799999237060547, 20.200000762939453, ...,\n", + " nan, 13.920000076293945, nan],\n", + " ...,\n", + " [6.300000190734863, 7.199999809265137, 4.5, ..., nan,\n", + " 8.319999694824219, 9.300000190734863],\n", + " [7.599999904632568, 8.899999618530273, 8.199999809265137, ...,\n", + " nan, 13.020000457763672, 9.300000190734863],\n", + " [11.0, 9.800000190734863, 8.800000190734863, ..., nan,\n", + " 14.140000343322754, 12.699999809265137]],\n", + " mask=[[False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " ...,\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False],\n", + " [False, False, False, ..., False, False, False]],\n", + " fill_value=-9999.0,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'station'),\n", + " 'units': 'µg/m3',\n", + " 'long_name': 'pm10_mass pm10'}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables['pm10']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_start_date var (1/17)\n", + "Rank 000: Var station_start_date created (1/17)\n", + "Rank 000: Var station_start_date data (1/17)\n", + "Rank 000: Var station_start_date completed (1/17)\n", + "Rank 000: Writing station_zone var (2/17)\n", + "Rank 000: Var station_zone created (2/17)\n", + "Rank 000: Var station_zone data (2/17)\n", + "Rank 000: Var station_zone completed (2/17)\n", + "Rank 000: Writing street_type var (3/17)\n", + "Rank 000: Var street_type created (3/17)\n", + "Rank 000: Var street_type data (3/17)\n", + "Rank 000: Var street_type completed (3/17)\n", + "Rank 000: Writing country_code var (4/17)\n", + "Rank 000: Var country_code created (4/17)\n", + "Rank 000: Var country_code data (4/17)\n", + "Rank 000: Var country_code completed (4/17)\n", + "Rank 000: Writing ccaa var (5/17)\n", + "Rank 000: Var ccaa created (5/17)\n", + "Rank 000: Var ccaa data (5/17)\n", + "Rank 000: Var ccaa completed (5/17)\n", + "Rank 000: Writing station_name var (6/17)\n", + "Rank 000: Var station_name created (6/17)\n", + "Rank 000: Var station_name data (6/17)\n", + "Rank 000: Var station_name completed (6/17)\n", + "Rank 000: Writing station_area var (7/17)\n", + "Rank 000: Var station_area created (7/17)\n", + "Rank 000: Var station_area data (7/17)\n", + "Rank 000: Var station_area completed (7/17)\n", + "Rank 000: Writing city var (8/17)\n", + "Rank 000: Var city created (8/17)\n", + "Rank 000: Var city data (8/17)\n", + "Rank 000: Var city completed (8/17)\n", + "Rank 000: Writing pm10 var (9/17)\n", + "Rank 000: Var pm10 created (9/17)\n", + "Rank 000: Var pm10 data (9/17)\n", + "Rank 000: Var pm10 completed (9/17)\n", + "Rank 000: Writing station_emep var (10/17)\n", + "Rank 000: Var station_emep created (10/17)\n", + "Rank 000: Var station_emep data (10/17)\n", + "Rank 000: Var station_emep completed (10/17)\n", + "Rank 000: Writing station_type var (11/17)\n", + "Rank 000: Var station_type created (11/17)\n", + "Rank 000: Var station_type data (11/17)\n", + "Rank 000: Var station_type completed (11/17)\n", + "Rank 000: Writing country var (12/17)\n", + "Rank 000: Var country created (12/17)\n", + "Rank 000: Var country data (12/17)\n", + "Rank 000: Var country completed (12/17)\n", + "Rank 000: Writing altitude var (13/17)\n", + "Rank 000: Var altitude created (13/17)\n", + "Rank 000: Var altitude data (13/17)\n", + "Rank 000: Var altitude completed (13/17)\n", + "Rank 000: Writing station_code var (14/17)\n", + "Rank 000: Var station_code created (14/17)\n", + "Rank 000: Var station_code data (14/17)\n", + "Rank 000: Var station_code completed (14/17)\n", + "Rank 000: Writing station_end_date var (15/17)\n", + "Rank 000: Var station_end_date created (15/17)\n", + "Rank 000: Var station_end_date data (15/17)\n", + "Rank 000: Var station_end_date completed (15/17)\n", + "Rank 000: Writing station_rural_back var (16/17)\n", + "Rank 000: Var station_rural_back created (16/17)\n", + "Rank 000: Var station_rural_back data (16/17)\n", + "Rank 000: Var station_rural_back completed (16/17)\n", + "Rank 000: Writing station_ozone_classification var (17/17)\n", + "Rank 000: Var station_ozone_classification created (17/17)\n", + "Rank 000: Var station_ozone_classification data (17/17)\n", + "Rank 000: Var station_ozone_classification completed (17/17)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('points_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('points_file_1.nc', info=True, parallel_method='X')\n", + "nessy_2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                       (time: 31, station: 84)\n",
+       "Coordinates:\n",
+       "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
+       "  * station                       (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n",
+       "Data variables: (12/19)\n",
+       "    station_start_date            (station) |S75 b'1980-01-01nnnnnnnnnnnnnnnn...\n",
+       "    station_zone                  (station) |S75 b'nannnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    street_type                   (station) |S75 b'nannnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    country_code                  (station) |S75 b'CHnnnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    ccaa                          (station) |S75 b'nannnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    station_name                  (station) |S75 b'payernennnnnnnnnnnnnnnnnnn...\n",
+       "    ...                            ...\n",
+       "    station_code                  (station) |S75 b'CH0002Rnnnnnnnnnnnnnnnnnnn...\n",
+       "    station_end_date              (station) |S75 b'nannnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    station_rural_back            (station) |S75 b'nannnnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    station_ozone_classification  (station) |S75 b'ruralnnnnnnnnnnnnnnnnnnnnn...\n",
+       "    lat                           (station) float64 46.81 47.48 ... 53.33 38.88\n",
+       "    lon                           (station) float64 6.944 8.905 ... 6.277 16.23\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 31, station: 84)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", + " * station (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n", + "Data variables: (12/19)\n", + " station_start_date (station) |S75 ...\n", + " station_zone (station) |S75 ...\n", + " street_type (station) |S75 ...\n", + " country_code (station) |S75 ...\n", + " ccaa (station) |S75 ...\n", + " station_name (station) |S75 ...\n", + " ... ...\n", + " station_code (station) |S75 ...\n", + " station_end_date (station) |S75 ...\n", + " station_rural_back (station) |S75 ...\n", + " station_ozone_classification (station) |S75 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_file_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Read and write - GHOST type" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.4/hourly/sconcno2/sconcno2_202004.nc' #EBAS\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/AERONET_v3/1.3.3/hourly/od1020aero/od1020aero_201907.nc' # AERONET\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/CANADA_NAPS/1.4/daily/sconcno2/sconcno2_202011.nc' # CANADA NAPS\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/CHILE_SINCA/1.4/monthly/pm10/pm10_201905.nc' # CHILE SINCA\n", + "nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc' # EANET" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (station: 3, time: 30, N_flag_codes: 190, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/176)\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    station_timezone                                                  (station) object ...\n",
+       "    street_type                                                       (station) object ...\n",
+       "    street_width                                                      (station) float32 ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "Attributes:\n",
+       "    title:          Surface sulphate data in the EANET network in 2019-11.\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Surface observations\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    version:        1.4
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 3, time: 30, N_flag_codes: 190, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", + "Data variables: (12/176)\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", + " ... ...\n", + " station_timezone (station) object ...\n", + " street_type (station) object ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + "Attributes:\n", + " title: Surface sulphate data in the EANET network in 2019-11.\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " version: 1.4" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1 = open_netcdf(path=nc_path_2, info=True, parallel_method='X')\n", + "nessy_ghost_1" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2019, 11, 1, 0, 0),\n", + " datetime.datetime(2019, 11, 2, 0, 0),\n", + " datetime.datetime(2019, 11, 3, 0, 0),\n", + " datetime.datetime(2019, 11, 4, 0, 0),\n", + " datetime.datetime(2019, 11, 5, 0, 0),\n", + " datetime.datetime(2019, 11, 6, 0, 0),\n", + " datetime.datetime(2019, 11, 7, 0, 0),\n", + " datetime.datetime(2019, 11, 8, 0, 0),\n", + " datetime.datetime(2019, 11, 9, 0, 0),\n", + " datetime.datetime(2019, 11, 10, 0, 0),\n", + " datetime.datetime(2019, 11, 11, 0, 0),\n", + " datetime.datetime(2019, 11, 12, 0, 0),\n", + " datetime.datetime(2019, 11, 13, 0, 0),\n", + " datetime.datetime(2019, 11, 14, 0, 0),\n", + " datetime.datetime(2019, 11, 15, 0, 0),\n", + " datetime.datetime(2019, 11, 16, 0, 0),\n", + " datetime.datetime(2019, 11, 17, 0, 0),\n", + " datetime.datetime(2019, 11, 18, 0, 0),\n", + " datetime.datetime(2019, 11, 19, 0, 0),\n", + " datetime.datetime(2019, 11, 20, 0, 0),\n", + " datetime.datetime(2019, 11, 21, 0, 0),\n", + " datetime.datetime(2019, 11, 22, 0, 0),\n", + " datetime.datetime(2019, 11, 23, 0, 0),\n", + " datetime.datetime(2019, 11, 24, 0, 0),\n", + " datetime.datetime(2019, 11, 25, 0, 0),\n", + " datetime.datetime(2019, 11, 26, 0, 0),\n", + " datetime.datetime(2019, 11, 27, 0, 0),\n", + " datetime.datetime(2019, 11, 28, 0, 0),\n", + " datetime.datetime(2019, 11, 29, 0, 0),\n", + " datetime.datetime(2019, 11, 30, 0, 0)]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0, 1, 2]), 'units': ''}" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.station" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[37.70888889, 33.29222222, 35.6025 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[126.27388889, 126.16194444, 127.18138889],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading ASTER_v3_altitude var (1/172)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/172)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/172)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/172)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/172)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/172)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/172)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/172)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ETOPO1_altitude var (18/172)\n", + "Rank 000: Loaded ETOPO1_altitude var ((3,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/172)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((3,))\n", + "Rank 000: Loading GHOST_version var (20/172)\n", + "Rank 000: Loaded GHOST_version var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/172)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/172)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (23/172)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (24/172)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (25/172)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/172)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/172)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (28/172)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (29/172)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/172)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/172)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((3,))\n", + "Rank 000: Loading GHSL_population_density var (32/172)\n", + "Rank 000: Loaded GHSL_population_density var ((3,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (33/172)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (34/172)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (35/172)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (36/172)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (37/172)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_population_density var (38/172)\n", + "Rank 000: Loaded GPW_population_density var ((3,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (39/172)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((3,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (40/172)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (41/172)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/172)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/172)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/172)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/172)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/172)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/172)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/172)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/172)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((3,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/172)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/172)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/172)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/172)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (62/172)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/172)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/172)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((3,))\n", + "Rank 000: Loading WMO_region var (65/172)\n", + "Rank 000: Loaded WMO_region var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/172)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biome var (67/172)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((3,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/172)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((3,))\n", + "Rank 000: Loading administrative_country_division_1 var (69/172)\n", + "Rank 000: Loaded administrative_country_division_1 var ((3,))\n", + "Rank 000: Loading administrative_country_division_2 var (70/172)\n", + "Rank 000: Loaded administrative_country_division_2 var ((3,))\n", + "Rank 000: Loading altitude var (71/172)\n", + "Rank 000: Loaded altitude var ((3,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (72/172)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading annual_native_representativity_percent var (73/172)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading area_classification var (74/172)\n", + "Rank 000: Loaded area_classification var ((3,))\n", + "Rank 000: Loading associated_networks var (75/172)\n", + "Rank 000: Loaded associated_networks var ((3,))\n", + "Rank 000: Loading city var (76/172)\n", + "Rank 000: Loaded city var ((3,))\n", + "Rank 000: Loading climatology var (77/172)\n", + "Rank 000: Loaded climatology var ((3,))\n", + "Rank 000: Loading contact_email_address var (78/172)\n", + "Rank 000: Loaded contact_email_address var ((3,))\n", + "Rank 000: Loading contact_institution var (79/172)\n", + "Rank 000: Loaded contact_institution var ((3,))\n", + "Rank 000: Loading contact_name var (80/172)\n", + "Rank 000: Loaded contact_name var ((3,))\n", + "Rank 000: Loading country var (81/172)\n", + "Rank 000: Loaded country var ((3,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (82/172)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading daily_native_representativity_percent var (83/172)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading daily_passing_vehicles var (84/172)\n", + "Rank 000: Loaded daily_passing_vehicles var ((3,))\n", + "Rank 000: Loading data_level var (85/172)\n", + "Rank 000: Loaded data_level var ((3,))\n", + "Rank 000: Loading data_licence var (86/172)\n", + "Rank 000: Loaded data_licence var ((3,))\n", + "Rank 000: Loading day_night_code var (87/172)\n", + "Rank 000: Loaded day_night_code var ((3, 30))\n", + "Rank 000: Loading daytime_traffic_speed var (88/172)\n", + "Rank 000: Loaded daytime_traffic_speed var ((3,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (89/172)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading distance_to_building var (90/172)\n", + "Rank 000: Loaded distance_to_building var ((3,))\n", + "Rank 000: Loading distance_to_junction var (91/172)\n", + "Rank 000: Loaded distance_to_junction var ((3,))\n", + "Rank 000: Loading distance_to_kerb var (92/172)\n", + "Rank 000: Loaded distance_to_kerb var ((3,))\n", + "Rank 000: Loading distance_to_source var (93/172)\n", + "Rank 000: Loaded distance_to_source var ((3,))\n", + "Rank 000: Loading ellipsoid var (94/172)\n", + "Rank 000: Loaded ellipsoid var ((3,))\n", + "Rank 000: Loading horizontal_datum var (95/172)\n", + "Rank 000: Loaded horizontal_datum var ((3,))\n", + "Rank 000: Loading land_use var (96/172)\n", + "Rank 000: Loaded land_use var ((3,))\n", + "Rank 000: Loading main_emission_source var (97/172)\n", + "Rank 000: Loaded main_emission_source var ((3,))\n", + "Rank 000: Loading measurement_altitude var (98/172)\n", + "Rank 000: Loaded measurement_altitude var ((3,))\n", + "Rank 000: Loading measurement_methodology var (99/172)\n", + "Rank 000: Loaded measurement_methodology var ((3,))\n", + "Rank 000: Loading measurement_scale var (100/172)\n", + "Rank 000: Loaded measurement_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (101/172)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (102/172)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (103/172)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (104/172)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (105/172)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (106/172)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (107/172)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (108/172)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (109/172)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (110/172)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (111/172)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (112/172)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_further_details var (113/172)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (114/172)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((3,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (115/172)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_name var (116/172)\n", + "Rank 000: Loaded measuring_instrument_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_process_details var (117/172)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (118/172)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (119/172)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (120/172)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (121/172)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (122/172)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (123/172)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (124/172)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (125/172)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (126/172)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (127/172)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (128/172)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (129/172)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (130/172)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((3,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (131/172)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading monthly_native_representativity_percent var (132/172)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading network var (133/172)\n", + "Rank 000: Loaded network var ((3,))\n", + "Rank 000: Loading network_maintenance_details var (134/172)\n", + "Rank 000: Loaded network_maintenance_details var ((3,))\n", + "Rank 000: Loading network_miscellaneous_details var (135/172)\n", + "Rank 000: Loaded network_miscellaneous_details var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (136/172)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (137/172)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((3,))\n", + "Rank 000: Loading network_qa_details var (138/172)\n", + "Rank 000: Loaded network_qa_details var ((3,))\n", + "Rank 000: Loading network_sampling_details var (139/172)\n", + "Rank 000: Loaded network_sampling_details var ((3,))\n", + "Rank 000: Loading network_uncertainty_details var (140/172)\n", + "Rank 000: Loaded network_uncertainty_details var ((3,))\n", + "Rank 000: Loading population var (141/172)\n", + "Rank 000: Loaded population var ((3,))\n", + "Rank 000: Loading primary_sampling_further_details var (142/172)\n", + "Rank 000: Loaded primary_sampling_further_details var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (143/172)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (144/172)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (145/172)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (146/172)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_process_details var (147/172)\n", + "Rank 000: Loaded primary_sampling_process_details var ((3,))\n", + "Rank 000: Loading primary_sampling_type var (148/172)\n", + "Rank 000: Loaded primary_sampling_type var ((3,))\n", + "Rank 000: Loading principal_investigator_email_address var (149/172)\n", + "Rank 000: Loaded principal_investigator_email_address var ((3,))\n", + "Rank 000: Loading principal_investigator_institution var (150/172)\n", + "Rank 000: Loaded principal_investigator_institution var ((3,))\n", + "Rank 000: Loading principal_investigator_name var (151/172)\n", + "Rank 000: Loaded principal_investigator_name var ((3,))\n", + "Rank 000: Loading process_warnings var (152/172)\n", + "Rank 000: Loaded process_warnings var ((3,))\n", + "Rank 000: Loading projection var (153/172)\n", + "Rank 000: Loaded projection var ((3,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (154/172)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading representative_radius var (155/172)\n", + "Rank 000: Loaded representative_radius var ((3,))\n", + "Rank 000: Loading retrieval_algorithm var (156/172)\n", + "Rank 000: Loaded retrieval_algorithm var ((3,))\n", + "Rank 000: Loading sample_preparation_further_details var (157/172)\n", + "Rank 000: Loaded sample_preparation_further_details var ((3,))\n", + "Rank 000: Loading sample_preparation_process_details var (158/172)\n", + "Rank 000: Loaded sample_preparation_process_details var ((3,))\n", + "Rank 000: Loading sample_preparation_techniques var (159/172)\n", + "Rank 000: Loaded sample_preparation_techniques var ((3,))\n", + "Rank 000: Loading sample_preparation_types var (160/172)\n", + "Rank 000: Loaded sample_preparation_types var ((3,))\n", + "Rank 000: Loading sampling_height var (161/172)\n", + "Rank 000: Loaded sampling_height var ((3,))\n", + "Rank 000: Loading sconcso4 var (162/172)\n", + "Rank 000: Loaded sconcso4 var ((3, 30))\n", + "Rank 000: Loading season_code var (163/172)\n", + "Rank 000: Loaded season_code var ((3, 30))\n", + "Rank 000: Loading station_classification var (164/172)\n", + "Rank 000: Loaded station_classification var ((3,))\n", + "Rank 000: Loading station_name var (165/172)\n", + "Rank 000: Loaded station_name var ((3,))\n", + "Rank 000: Loading station_reference var (166/172)\n", + "Rank 000: Loaded station_reference var ((3,))\n", + "Rank 000: Loading station_timezone var (167/172)\n", + "Rank 000: Loaded station_timezone var ((3,))\n", + "Rank 000: Loading street_type var (168/172)\n", + "Rank 000: Loaded street_type var ((3,))\n", + "Rank 000: Loading street_width var (169/172)\n", + "Rank 000: Loaded street_width var ((3,))\n", + "Rank 000: Loading terrain var (170/172)\n", + "Rank 000: Loaded terrain var ((3,))\n", + "Rank 000: Loading vertical_datum var (171/172)\n", + "Rank 000: Loaded vertical_datum var ((3,))\n", + "Rank 000: Loading weekday_weekend_code var (172/172)\n", + "Rank 000: Loaded weekday_weekend_code var ((3, 30))\n" + ] + } + ], + "source": [ + "nessy_ghost_1.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_file_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing ASTER_v3_altitude var (1/172)\n", + "Rank 000: Var ASTER_v3_altitude created (1/172)\n", + "Rank 000: Var ASTER_v3_altitude data (1/172)\n", + "Rank 000: Var ASTER_v3_altitude completed (1/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/172)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/172)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/172)\n", + "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/172)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/172)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/172)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/172)\n", + "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/172)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/172)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/172)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/172)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/172)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/172)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/172)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/172)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/172)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/172)\n", + "Rank 000: Writing ETOPO1_altitude var (18/172)\n", + "Rank 000: Var ETOPO1_altitude created (18/172)\n", + "Rank 000: Var ETOPO1_altitude data (18/172)\n", + "Rank 000: Var ETOPO1_altitude completed (18/172)\n", + "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/172)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/172)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/172)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/172)\n", + "Rank 000: Writing GHOST_version var (20/172)\n", + "Rank 000: Var GHOST_version created (20/172)\n", + "Rank 000: Var GHOST_version data (20/172)\n", + "Rank 000: Var GHOST_version completed (20/172)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/172)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/172)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/172)\n", + "Rank 000: Writing GHSL_average_population_density_25km var (23/172)\n", + "Rank 000: Var GHSL_average_population_density_25km created (23/172)\n", + "Rank 000: Var GHSL_average_population_density_25km data (23/172)\n", + "Rank 000: Var GHSL_average_population_density_25km completed (23/172)\n", + "Rank 000: Writing GHSL_average_population_density_5km var (24/172)\n", + "Rank 000: Var GHSL_average_population_density_5km created (24/172)\n", + "Rank 000: Var GHSL_average_population_density_5km data (24/172)\n", + "Rank 000: Var GHSL_average_population_density_5km completed (24/172)\n", + "Rank 000: Writing GHSL_built_up_area_density var (25/172)\n", + "Rank 000: Var GHSL_built_up_area_density created (25/172)\n", + "Rank 000: Var GHSL_built_up_area_density data (25/172)\n", + "Rank 000: Var GHSL_built_up_area_density completed (25/172)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/172)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/172)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/172)\n", + "Rank 000: Writing GHSL_max_population_density_25km var (28/172)\n", + "Rank 000: Var GHSL_max_population_density_25km created (28/172)\n", + "Rank 000: Var GHSL_max_population_density_25km data (28/172)\n", + "Rank 000: Var GHSL_max_population_density_25km completed (28/172)\n", + "Rank 000: Writing GHSL_max_population_density_5km var (29/172)\n", + "Rank 000: Var GHSL_max_population_density_5km created (29/172)\n", + "Rank 000: Var GHSL_max_population_density_5km data (29/172)\n", + "Rank 000: Var GHSL_max_population_density_5km completed (29/172)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/172)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/172)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/172)\n", + "Rank 000: Writing GHSL_population_density var (32/172)\n", + "Rank 000: Var GHSL_population_density created (32/172)\n", + "Rank 000: Var GHSL_population_density data (32/172)\n", + "Rank 000: Var GHSL_population_density completed (32/172)\n", + "Rank 000: Writing GHSL_settlement_model_classification var (33/172)\n", + "Rank 000: Var GHSL_settlement_model_classification created (33/172)\n", + "Rank 000: Var GHSL_settlement_model_classification data (33/172)\n", + "Rank 000: Var GHSL_settlement_model_classification completed (33/172)\n", + "Rank 000: Writing GPW_average_population_density_25km var (34/172)\n", + "Rank 000: Var GPW_average_population_density_25km created (34/172)\n", + "Rank 000: Var GPW_average_population_density_25km data (34/172)\n", + "Rank 000: Var GPW_average_population_density_25km completed (34/172)\n", + "Rank 000: Writing GPW_average_population_density_5km var (35/172)\n", + "Rank 000: Var GPW_average_population_density_5km created (35/172)\n", + "Rank 000: Var GPW_average_population_density_5km data (35/172)\n", + "Rank 000: Var GPW_average_population_density_5km completed (35/172)\n", + "Rank 000: Writing GPW_max_population_density_25km var (36/172)\n", + "Rank 000: Var GPW_max_population_density_25km created (36/172)\n", + "Rank 000: Var GPW_max_population_density_25km data (36/172)\n", + "Rank 000: Var GPW_max_population_density_25km completed (36/172)\n", + "Rank 000: Writing GPW_max_population_density_5km var (37/172)\n", + "Rank 000: Var GPW_max_population_density_5km created (37/172)\n", + "Rank 000: Var GPW_max_population_density_5km data (37/172)\n", + "Rank 000: Var GPW_max_population_density_5km completed (37/172)\n", + "Rank 000: Writing GPW_population_density var (38/172)\n", + "Rank 000: Var GPW_population_density created (38/172)\n", + "Rank 000: Var GPW_population_density data (38/172)\n", + "Rank 000: Var GPW_population_density completed (38/172)\n", + "Rank 000: Writing GSFC_coastline_proximity var (39/172)\n", + "Rank 000: Var GSFC_coastline_proximity created (39/172)\n", + "Rank 000: Var GSFC_coastline_proximity data (39/172)\n", + "Rank 000: Var GSFC_coastline_proximity completed (39/172)\n", + "Rank 000: Writing Joly-Peuch_classification_code var (40/172)\n", + "Rank 000: Var Joly-Peuch_classification_code created (40/172)\n", + "Rank 000: Var Joly-Peuch_classification_code data (40/172)\n", + "Rank 000: Var Joly-Peuch_classification_code completed (40/172)\n", + "Rank 000: Writing Koppen-Geiger_classification var (41/172)\n", + "Rank 000: Var Koppen-Geiger_classification created (41/172)\n", + "Rank 000: Var Koppen-Geiger_classification data (41/172)\n", + "Rank 000: Var Koppen-Geiger_classification completed (41/172)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/172)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/172)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/172)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/172)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/172)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/172)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/172)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/172)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/172)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/172)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/172)\n", + "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/172)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/172)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/172)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/172)\n", + "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/172)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/172)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/172)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/172)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/172)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/172)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/172)\n", + "Rank 000: Writing UMBC_anthrome_classification var (62/172)\n", + "Rank 000: Var UMBC_anthrome_classification created (62/172)\n", + "Rank 000: Var UMBC_anthrome_classification data (62/172)\n", + "Rank 000: Var UMBC_anthrome_classification completed (62/172)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/172)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/172)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/172)\n", + "Rank 000: Writing WMO_region var (65/172)\n", + "Rank 000: Var WMO_region created (65/172)\n", + "Rank 000: Var WMO_region data (65/172)\n", + "Rank 000: Var WMO_region completed (65/172)\n", + "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/172)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/172)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/172)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/172)\n", + "Rank 000: Writing WWF_TEOW_biome var (67/172)\n", + "Rank 000: Var WWF_TEOW_biome created (67/172)\n", + "Rank 000: Var WWF_TEOW_biome data (67/172)\n", + "Rank 000: Var WWF_TEOW_biome completed (67/172)\n", + "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/172)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/172)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/172)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/172)\n", + "Rank 000: Writing administrative_country_division_1 var (69/172)\n", + "Rank 000: Var administrative_country_division_1 created (69/172)\n", + "Rank 000: Var administrative_country_division_1 data (69/172)\n", + "Rank 000: Var administrative_country_division_1 completed (69/172)\n", + "Rank 000: Writing administrative_country_division_2 var (70/172)\n", + "Rank 000: Var administrative_country_division_2 created (70/172)\n", + "Rank 000: Var administrative_country_division_2 data (70/172)\n", + "Rank 000: Var administrative_country_division_2 completed (70/172)\n", + "Rank 000: Writing altitude var (71/172)\n", + "Rank 000: Var altitude created (71/172)\n", + "Rank 000: Var altitude data (71/172)\n", + "Rank 000: Var altitude completed (71/172)\n", + "Rank 000: Writing annual_native_max_gap_percent var (72/172)\n", + "Rank 000: Var annual_native_max_gap_percent created (72/172)\n", + "Rank 000: Var annual_native_max_gap_percent data (72/172)\n", + "Rank 000: Var annual_native_max_gap_percent completed (72/172)\n", + "Rank 000: Writing annual_native_representativity_percent var (73/172)\n", + "Rank 000: Var annual_native_representativity_percent created (73/172)\n", + "Rank 000: Var annual_native_representativity_percent data (73/172)\n", + "Rank 000: Var annual_native_representativity_percent completed (73/172)\n", + "Rank 000: Writing area_classification var (74/172)\n", + "Rank 000: Var area_classification created (74/172)\n", + "Rank 000: Var area_classification data (74/172)\n", + "Rank 000: Var area_classification completed (74/172)\n", + "Rank 000: Writing associated_networks var (75/172)\n", + "Rank 000: Var associated_networks created (75/172)\n", + "Rank 000: Var associated_networks data (75/172)\n", + "Rank 000: Var associated_networks completed (75/172)\n", + "Rank 000: Writing city var (76/172)\n", + "Rank 000: Var city created (76/172)\n", + "Rank 000: Var city data (76/172)\n", + "Rank 000: Var city completed (76/172)\n", + "Rank 000: Writing climatology var (77/172)\n", + "Rank 000: Var climatology created (77/172)\n", + "Rank 000: Var climatology data (77/172)\n", + "Rank 000: Var climatology completed (77/172)\n", + "Rank 000: Writing contact_email_address var (78/172)\n", + "Rank 000: Var contact_email_address created (78/172)\n", + "Rank 000: Var contact_email_address data (78/172)\n", + "Rank 000: Var contact_email_address completed (78/172)\n", + "Rank 000: Writing contact_institution var (79/172)\n", + "Rank 000: Var contact_institution created (79/172)\n", + "Rank 000: Var contact_institution data (79/172)\n", + "Rank 000: Var contact_institution completed (79/172)\n", + "Rank 000: Writing contact_name var (80/172)\n", + "Rank 000: Var contact_name created (80/172)\n", + "Rank 000: Var contact_name data (80/172)\n", + "Rank 000: Var contact_name completed (80/172)\n", + "Rank 000: Writing country var (81/172)\n", + "Rank 000: Var country created (81/172)\n", + "Rank 000: Var country data (81/172)\n", + "Rank 000: Var country completed (81/172)\n", + "Rank 000: Writing daily_native_max_gap_percent var (82/172)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/172)\n", + "Rank 000: Var daily_native_max_gap_percent data (82/172)\n", + "Rank 000: Var daily_native_max_gap_percent completed (82/172)\n", + "Rank 000: Writing daily_native_representativity_percent var (83/172)\n", + "Rank 000: Var daily_native_representativity_percent created (83/172)\n", + "Rank 000: Var daily_native_representativity_percent data (83/172)\n", + "Rank 000: Var daily_native_representativity_percent completed (83/172)\n", + "Rank 000: Writing daily_passing_vehicles var (84/172)\n", + "Rank 000: Var daily_passing_vehicles created (84/172)\n", + "Rank 000: Var daily_passing_vehicles data (84/172)\n", + "Rank 000: Var daily_passing_vehicles completed (84/172)\n", + "Rank 000: Writing data_level var (85/172)\n", + "Rank 000: Var data_level created (85/172)\n", + "Rank 000: Var data_level data (85/172)\n", + "Rank 000: Var data_level completed (85/172)\n", + "Rank 000: Writing data_licence var (86/172)\n", + "Rank 000: Var data_licence created (86/172)\n", + "Rank 000: Var data_licence data (86/172)\n", + "Rank 000: Var data_licence completed (86/172)\n", + "Rank 000: Writing day_night_code var (87/172)\n", + "Rank 000: Var day_night_code created (87/172)\n", + "Rank 000: Var day_night_code data (87/172)\n", + "Rank 000: Var day_night_code completed (87/172)\n", + "Rank 000: Writing daytime_traffic_speed var (88/172)\n", + "Rank 000: Var daytime_traffic_speed created (88/172)\n", + "Rank 000: Var daytime_traffic_speed data (88/172)\n", + "Rank 000: Var daytime_traffic_speed completed (88/172)\n", + "Rank 000: Writing derived_uncertainty_per_measurement var (89/172)\n", + "Rank 000: Var derived_uncertainty_per_measurement created (89/172)\n", + "Rank 000: Var derived_uncertainty_per_measurement data (89/172)\n", + "Rank 000: Var derived_uncertainty_per_measurement completed (89/172)\n", + "Rank 000: Writing distance_to_building var (90/172)\n", + "Rank 000: Var distance_to_building created (90/172)\n", + "Rank 000: Var distance_to_building data (90/172)\n", + "Rank 000: Var distance_to_building completed (90/172)\n", + "Rank 000: Writing distance_to_junction var (91/172)\n", + "Rank 000: Var distance_to_junction created (91/172)\n", + "Rank 000: Var distance_to_junction data (91/172)\n", + "Rank 000: Var distance_to_junction completed (91/172)\n", + "Rank 000: Writing distance_to_kerb var (92/172)\n", + "Rank 000: Var distance_to_kerb created (92/172)\n", + "Rank 000: Var distance_to_kerb data (92/172)\n", + "Rank 000: Var distance_to_kerb completed (92/172)\n", + "Rank 000: Writing distance_to_source var (93/172)\n", + "Rank 000: Var distance_to_source created (93/172)\n", + "Rank 000: Var distance_to_source data (93/172)\n", + "Rank 000: Var distance_to_source completed (93/172)\n", + "Rank 000: Writing ellipsoid var (94/172)\n", + "Rank 000: Var ellipsoid created (94/172)\n", + "Rank 000: Var ellipsoid data (94/172)\n", + "Rank 000: Var ellipsoid completed (94/172)\n", + "Rank 000: Writing horizontal_datum var (95/172)\n", + "Rank 000: Var horizontal_datum created (95/172)\n", + "Rank 000: Var horizontal_datum data (95/172)\n", + "Rank 000: Var horizontal_datum completed (95/172)\n", + "Rank 000: Writing land_use var (96/172)\n", + "Rank 000: Var land_use created (96/172)\n", + "Rank 000: Var land_use data (96/172)\n", + "Rank 000: Var land_use completed (96/172)\n", + "Rank 000: Writing main_emission_source var (97/172)\n", + "Rank 000: Var main_emission_source created (97/172)\n", + "Rank 000: Var main_emission_source data (97/172)\n", + "Rank 000: Var main_emission_source completed (97/172)\n", + "Rank 000: Writing measurement_altitude var (98/172)\n", + "Rank 000: Var measurement_altitude created (98/172)\n", + "Rank 000: Var measurement_altitude data (98/172)\n", + "Rank 000: Var measurement_altitude completed (98/172)\n", + "Rank 000: Writing measurement_methodology var (99/172)\n", + "Rank 000: Var measurement_methodology created (99/172)\n", + "Rank 000: Var measurement_methodology data (99/172)\n", + "Rank 000: Var measurement_methodology completed (99/172)\n", + "Rank 000: Writing measurement_scale var (100/172)\n", + "Rank 000: Var measurement_scale created (100/172)\n", + "Rank 000: Var measurement_scale data (100/172)\n", + "Rank 000: Var measurement_scale completed (100/172)\n", + "Rank 000: Writing measuring_instrument_calibration_scale var (101/172)\n", + "Rank 000: Var measuring_instrument_calibration_scale created (101/172)\n", + "Rank 000: Var measuring_instrument_calibration_scale data (101/172)\n", + "Rank 000: Var measuring_instrument_calibration_scale completed (101/172)\n", + "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (102/172)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (102/172)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (102/172)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (102/172)\n", + "Rank 000: Writing measuring_instrument_documented_accuracy var (103/172)\n", + "Rank 000: Var measuring_instrument_documented_accuracy created (103/172)\n", + "Rank 000: Var measuring_instrument_documented_accuracy data (103/172)\n", + "Rank 000: Var measuring_instrument_documented_accuracy completed (103/172)\n", + "Rank 000: Writing measuring_instrument_documented_flow_rate var (104/172)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate created (104/172)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate data (104/172)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate completed (104/172)\n", + "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (105/172)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (105/172)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (105/172)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (105/172)\n", + "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (106/172)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution created (106/172)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution data (106/172)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (106/172)\n", + "Rank 000: Writing measuring_instrument_documented_precision var (107/172)\n", + "Rank 000: Var measuring_instrument_documented_precision created (107/172)\n", + "Rank 000: Var measuring_instrument_documented_precision data (107/172)\n", + "Rank 000: Var measuring_instrument_documented_precision completed (107/172)\n", + "Rank 000: Writing measuring_instrument_documented_span_drift var (108/172)\n", + "Rank 000: Var measuring_instrument_documented_span_drift created (108/172)\n", + "Rank 000: Var measuring_instrument_documented_span_drift data (108/172)\n", + "Rank 000: Var measuring_instrument_documented_span_drift completed (108/172)\n", + "Rank 000: Writing measuring_instrument_documented_uncertainty var (109/172)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty created (109/172)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty data (109/172)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty completed (109/172)\n", + "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (110/172)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (110/172)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (110/172)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (110/172)\n", + "Rank 000: Writing measuring_instrument_documented_zero_drift var (111/172)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift created (111/172)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift data (111/172)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift completed (111/172)\n", + "Rank 000: Writing measuring_instrument_documented_zonal_drift var (112/172)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift created (112/172)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift data (112/172)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift completed (112/172)\n", + "Rank 000: Writing measuring_instrument_further_details var (113/172)\n", + "Rank 000: Var measuring_instrument_further_details created (113/172)\n", + "Rank 000: Var measuring_instrument_further_details data (113/172)\n", + "Rank 000: Var measuring_instrument_further_details completed (113/172)\n", + "Rank 000: Writing measuring_instrument_inlet_information var (114/172)\n", + "Rank 000: Var measuring_instrument_inlet_information created (114/172)\n", + "Rank 000: Var measuring_instrument_inlet_information data (114/172)\n", + "Rank 000: Var measuring_instrument_inlet_information completed (114/172)\n", + "Rank 000: Writing measuring_instrument_manual_name var (115/172)\n", + "Rank 000: Var measuring_instrument_manual_name created (115/172)\n", + "Rank 000: Var measuring_instrument_manual_name data (115/172)\n", + "Rank 000: Var measuring_instrument_manual_name completed (115/172)\n", + "Rank 000: Writing measuring_instrument_name var (116/172)\n", + "Rank 000: Var measuring_instrument_name created (116/172)\n", + "Rank 000: Var measuring_instrument_name data (116/172)\n", + "Rank 000: Var measuring_instrument_name completed (116/172)\n", + "Rank 000: Writing measuring_instrument_process_details var (117/172)\n", + "Rank 000: Var measuring_instrument_process_details created (117/172)\n", + "Rank 000: Var measuring_instrument_process_details data (117/172)\n", + "Rank 000: Var measuring_instrument_process_details completed (117/172)\n", + "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (118/172)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (118/172)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (118/172)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (118/172)\n", + "Rank 000: Writing measuring_instrument_reported_accuracy var (119/172)\n", + "Rank 000: Var measuring_instrument_reported_accuracy created (119/172)\n", + "Rank 000: Var measuring_instrument_reported_accuracy data (119/172)\n", + "Rank 000: Var measuring_instrument_reported_accuracy completed (119/172)\n", + "Rank 000: Writing measuring_instrument_reported_flow_rate var (120/172)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate created (120/172)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate data (120/172)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate completed (120/172)\n", + "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (121/172)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (121/172)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (121/172)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (121/172)\n", + "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (122/172)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution created (122/172)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution data (122/172)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (122/172)\n", + "Rank 000: Writing measuring_instrument_reported_precision var (123/172)\n", + "Rank 000: Var measuring_instrument_reported_precision created (123/172)\n", + "Rank 000: Var measuring_instrument_reported_precision data (123/172)\n", + "Rank 000: Var measuring_instrument_reported_precision completed (123/172)\n", + "Rank 000: Writing measuring_instrument_reported_span_drift var (124/172)\n", + "Rank 000: Var measuring_instrument_reported_span_drift created (124/172)\n", + "Rank 000: Var measuring_instrument_reported_span_drift data (124/172)\n", + "Rank 000: Var measuring_instrument_reported_span_drift completed (124/172)\n", + "Rank 000: Writing measuring_instrument_reported_uncertainty var (125/172)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty created (125/172)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty data (125/172)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty completed (125/172)\n", + "Rank 000: Writing measuring_instrument_reported_units var (126/172)\n", + "Rank 000: Var measuring_instrument_reported_units created (126/172)\n", + "Rank 000: Var measuring_instrument_reported_units data (126/172)\n", + "Rank 000: Var measuring_instrument_reported_units completed (126/172)\n", + "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (127/172)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (127/172)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (127/172)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (127/172)\n", + "Rank 000: Writing measuring_instrument_reported_zero_drift var (128/172)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift created (128/172)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift data (128/172)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift completed (128/172)\n", + "Rank 000: Writing measuring_instrument_reported_zonal_drift var (129/172)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift created (129/172)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift data (129/172)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift completed (129/172)\n", + "Rank 000: Writing measuring_instrument_sampling_type var (130/172)\n", + "Rank 000: Var measuring_instrument_sampling_type created (130/172)\n", + "Rank 000: Var measuring_instrument_sampling_type data (130/172)\n", + "Rank 000: Var measuring_instrument_sampling_type completed (130/172)\n", + "Rank 000: Writing monthly_native_max_gap_percent var (131/172)\n", + "Rank 000: Var monthly_native_max_gap_percent created (131/172)\n", + "Rank 000: Var monthly_native_max_gap_percent data (131/172)\n", + "Rank 000: Var monthly_native_max_gap_percent completed (131/172)\n", + "Rank 000: Writing monthly_native_representativity_percent var (132/172)\n", + "Rank 000: Var monthly_native_representativity_percent created (132/172)\n", + "Rank 000: Var monthly_native_representativity_percent data (132/172)\n", + "Rank 000: Var monthly_native_representativity_percent completed (132/172)\n", + "Rank 000: Writing network var (133/172)\n", + "Rank 000: Var network created (133/172)\n", + "Rank 000: Var network data (133/172)\n", + "Rank 000: Var network completed (133/172)\n", + "Rank 000: Writing network_maintenance_details var (134/172)\n", + "Rank 000: Var network_maintenance_details created (134/172)\n", + "Rank 000: Var network_maintenance_details data (134/172)\n", + "Rank 000: Var network_maintenance_details completed (134/172)\n", + "Rank 000: Writing network_miscellaneous_details var (135/172)\n", + "Rank 000: Var network_miscellaneous_details created (135/172)\n", + "Rank 000: Var network_miscellaneous_details data (135/172)\n", + "Rank 000: Var network_miscellaneous_details completed (135/172)\n", + "Rank 000: Writing network_provided_volume_standard_pressure var (136/172)\n", + "Rank 000: Var network_provided_volume_standard_pressure created (136/172)\n", + "Rank 000: Var network_provided_volume_standard_pressure data (136/172)\n", + "Rank 000: Var network_provided_volume_standard_pressure completed (136/172)\n", + "Rank 000: Writing network_provided_volume_standard_temperature var (137/172)\n", + "Rank 000: Var network_provided_volume_standard_temperature created (137/172)\n", + "Rank 000: Var network_provided_volume_standard_temperature data (137/172)\n", + "Rank 000: Var network_provided_volume_standard_temperature completed (137/172)\n", + "Rank 000: Writing network_qa_details var (138/172)\n", + "Rank 000: Var network_qa_details created (138/172)\n", + "Rank 000: Var network_qa_details data (138/172)\n", + "Rank 000: Var network_qa_details completed (138/172)\n", + "Rank 000: Writing network_sampling_details var (139/172)\n", + "Rank 000: Var network_sampling_details created (139/172)\n", + "Rank 000: Var network_sampling_details data (139/172)\n", + "Rank 000: Var network_sampling_details completed (139/172)\n", + "Rank 000: Writing network_uncertainty_details var (140/172)\n", + "Rank 000: Var network_uncertainty_details created (140/172)\n", + "Rank 000: Var network_uncertainty_details data (140/172)\n", + "Rank 000: Var network_uncertainty_details completed (140/172)\n", + "Rank 000: Writing population var (141/172)\n", + "Rank 000: Var population created (141/172)\n", + "Rank 000: Var population data (141/172)\n", + "Rank 000: Var population completed (141/172)\n", + "Rank 000: Writing primary_sampling_further_details var (142/172)\n", + "Rank 000: Var primary_sampling_further_details created (142/172)\n", + "Rank 000: Var primary_sampling_further_details data (142/172)\n", + "Rank 000: Var primary_sampling_further_details completed (142/172)\n", + "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (143/172)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (143/172)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (143/172)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (143/172)\n", + "Rank 000: Writing primary_sampling_instrument_manual_name var (144/172)\n", + "Rank 000: Var primary_sampling_instrument_manual_name created (144/172)\n", + "Rank 000: Var primary_sampling_instrument_manual_name data (144/172)\n", + "Rank 000: Var primary_sampling_instrument_manual_name completed (144/172)\n", + "Rank 000: Writing primary_sampling_instrument_name var (145/172)\n", + "Rank 000: Var primary_sampling_instrument_name created (145/172)\n", + "Rank 000: Var primary_sampling_instrument_name data (145/172)\n", + "Rank 000: Var primary_sampling_instrument_name completed (145/172)\n", + "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (146/172)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (146/172)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (146/172)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (146/172)\n", + "Rank 000: Writing primary_sampling_process_details var (147/172)\n", + "Rank 000: Var primary_sampling_process_details created (147/172)\n", + "Rank 000: Var primary_sampling_process_details data (147/172)\n", + "Rank 000: Var primary_sampling_process_details completed (147/172)\n", + "Rank 000: Writing primary_sampling_type var (148/172)\n", + "Rank 000: Var primary_sampling_type created (148/172)\n", + "Rank 000: Var primary_sampling_type data (148/172)\n", + "Rank 000: Var primary_sampling_type completed (148/172)\n", + "Rank 000: Writing principal_investigator_email_address var (149/172)\n", + "Rank 000: Var principal_investigator_email_address created (149/172)\n", + "Rank 000: Var principal_investigator_email_address data (149/172)\n", + "Rank 000: Var principal_investigator_email_address completed (149/172)\n", + "Rank 000: Writing principal_investigator_institution var (150/172)\n", + "Rank 000: Var principal_investigator_institution created (150/172)\n", + "Rank 000: Var principal_investigator_institution data (150/172)\n", + "Rank 000: Var principal_investigator_institution completed (150/172)\n", + "Rank 000: Writing principal_investigator_name var (151/172)\n", + "Rank 000: Var principal_investigator_name created (151/172)\n", + "Rank 000: Var principal_investigator_name data (151/172)\n", + "Rank 000: Var principal_investigator_name completed (151/172)\n", + "Rank 000: Writing process_warnings var (152/172)\n", + "Rank 000: Var process_warnings created (152/172)\n", + "Rank 000: Var process_warnings data (152/172)\n", + "Rank 000: Var process_warnings completed (152/172)\n", + "Rank 000: Writing projection var (153/172)\n", + "Rank 000: Var projection created (153/172)\n", + "Rank 000: Var projection data (153/172)\n", + "Rank 000: Var projection completed (153/172)\n", + "Rank 000: Writing reported_uncertainty_per_measurement var (154/172)\n", + "Rank 000: Var reported_uncertainty_per_measurement created (154/172)\n", + "Rank 000: Var reported_uncertainty_per_measurement data (154/172)\n", + "Rank 000: Var reported_uncertainty_per_measurement completed (154/172)\n", + "Rank 000: Writing representative_radius var (155/172)\n", + "Rank 000: Var representative_radius created (155/172)\n", + "Rank 000: Var representative_radius data (155/172)\n", + "Rank 000: Var representative_radius completed (155/172)\n", + "Rank 000: Writing retrieval_algorithm var (156/172)\n", + "Rank 000: Var retrieval_algorithm created (156/172)\n", + "Rank 000: Var retrieval_algorithm data (156/172)\n", + "Rank 000: Var retrieval_algorithm completed (156/172)\n", + "Rank 000: Writing sample_preparation_further_details var (157/172)\n", + "Rank 000: Var sample_preparation_further_details created (157/172)\n", + "Rank 000: Var sample_preparation_further_details data (157/172)\n", + "Rank 000: Var sample_preparation_further_details completed (157/172)\n", + "Rank 000: Writing sample_preparation_process_details var (158/172)\n", + "Rank 000: Var sample_preparation_process_details created (158/172)\n", + "Rank 000: Var sample_preparation_process_details data (158/172)\n", + "Rank 000: Var sample_preparation_process_details completed (158/172)\n", + "Rank 000: Writing sample_preparation_techniques var (159/172)\n", + "Rank 000: Var sample_preparation_techniques created (159/172)\n", + "Rank 000: Var sample_preparation_techniques data (159/172)\n", + "Rank 000: Var sample_preparation_techniques completed (159/172)\n", + "Rank 000: Writing sample_preparation_types var (160/172)\n", + "Rank 000: Var sample_preparation_types created (160/172)\n", + "Rank 000: Var sample_preparation_types data (160/172)\n", + "Rank 000: Var sample_preparation_types completed (160/172)\n", + "Rank 000: Writing sampling_height var (161/172)\n", + "Rank 000: Var sampling_height created (161/172)\n", + "Rank 000: Var sampling_height data (161/172)\n", + "Rank 000: Var sampling_height completed (161/172)\n", + "Rank 000: Writing sconcso4 var (162/172)\n", + "Rank 000: Var sconcso4 created (162/172)\n", + "Rank 000: Var sconcso4 data (162/172)\n", + "Rank 000: Var sconcso4 completed (162/172)\n", + "Rank 000: Writing season_code var (163/172)\n", + "Rank 000: Var season_code created (163/172)\n", + "Rank 000: Var season_code data (163/172)\n", + "Rank 000: Var season_code completed (163/172)\n", + "Rank 000: Writing station_classification var (164/172)\n", + "Rank 000: Var station_classification created (164/172)\n", + "Rank 000: Var station_classification data (164/172)\n", + "Rank 000: Var station_classification completed (164/172)\n", + "Rank 000: Writing station_name var (165/172)\n", + "Rank 000: Var station_name created (165/172)\n", + "Rank 000: Var station_name data (165/172)\n", + "Rank 000: Var station_name completed (165/172)\n", + "Rank 000: Writing station_reference var (166/172)\n", + "Rank 000: Var station_reference created (166/172)\n", + "Rank 000: Var station_reference data (166/172)\n", + "Rank 000: Var station_reference completed (166/172)\n", + "Rank 000: Writing station_timezone var (167/172)\n", + "Rank 000: Var station_timezone created (167/172)\n", + "Rank 000: Var station_timezone data (167/172)\n", + "Rank 000: Var station_timezone completed (167/172)\n", + "Rank 000: Writing street_type var (168/172)\n", + "Rank 000: Var street_type created (168/172)\n", + "Rank 000: Var street_type data (168/172)\n", + "Rank 000: Var street_type completed (168/172)\n", + "Rank 000: Writing street_width var (169/172)\n", + "Rank 000: Var street_width created (169/172)\n", + "Rank 000: Var street_width data (169/172)\n", + "Rank 000: Var street_width completed (169/172)\n", + "Rank 000: Writing terrain var (170/172)\n", + "Rank 000: Var terrain created (170/172)\n", + "Rank 000: Var terrain data (170/172)\n", + "Rank 000: Var terrain completed (170/172)\n", + "Rank 000: Writing vertical_datum var (171/172)\n", + "Rank 000: Var vertical_datum created (171/172)\n", + "Rank 000: Var vertical_datum data (171/172)\n", + "Rank 000: Var vertical_datum completed (171/172)\n", + "Rank 000: Writing weekday_weekend_code var (172/172)\n", + "Rank 000: Var weekday_weekend_code created (172/172)\n", + "Rank 000: Var weekday_weekend_code data (172/172)\n", + "Rank 000: Var weekday_weekend_code completed (172/172)\n" + ] + } + ], + "source": [ + "nessy_ghost_1.to_netcdf('points_file_2.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_2 = open_netcdf('points_file_2.nc', info=True, parallel_method='X')\n", + "nessy_ghost_2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (time: 30, station: 3, N_flag_codes: 190, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "  * station                                                           (station) float64 ...\n",
+       "Dimensions without coordinates: N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/176)\n",
+       "    flag                                                              (station, time, N_flag_codes) int64 ...\n",
+       "    qa                                                                (station, time, N_qa_codes) int64 ...\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    street_width                                                      (station) float32 ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "    lat                                                               (station) float64 ...\n",
+       "    lon                                                               (station) float64 ...\n",
+       "Attributes:\n",
+       "    title:          Surface sulphate data in the EANET network in 2019-11.\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Surface observations\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    version:        1.4\n",
+       "    Conventions:    CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 30, station: 3, N_flag_codes: 190, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + " * station (station) float64 ...\n", + "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", + "Data variables: (12/176)\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " ... ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " title: Surface sulphate data in the EANET network in 2019-11.\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " version: 1.4\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_file_2.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/2-create_nes.ipynb b/Jupyter_notebooks/2-create_nes.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..01ccb10ed32fb8e58455a4a798b134508a714b67 --- /dev/null +++ b/Jupyter_notebooks/2-create_nes.ipynb @@ -0,0 +1,2738 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create regular, rotated and points grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import pandas as pd\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Create regular grid" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.1\n", + "inc_lon = 0.1\n", + "n_lat = 10\n", + "n_lon = 10\n", + "regular_grid = create_nes(comm=None, info=False, projection='regular', create_nes=True,\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating regular_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "regular_grid.to_netcdf('regular_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:  (time: 1, lev: 1, lat: 10, lon: 10)\n",
+       "Coordinates:\n",
+       "  * time     (time) datetime64[ns] 1996-12-31\n",
+       "  * lev      (lev) float64 0.0\n",
+       "  * lat      (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n",
+       "  * lon      (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n",
+       "Data variables:\n",
+       "    crs      |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, lat: 10, lon: 10)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * lat (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n", + " * lon (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n", + "Data variables:\n", + " crs |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('regular_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Create rotated grid" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "centre_lat = 51\n", + "centre_lon = 10\n", + "west_boundary = -35\n", + "south_boundary = -27\n", + "inc_rlat = 0.2\n", + "inc_rlon = 0.2\n", + "rotated_grid = create_nes(comm=None, info=False, projection='rotated', create_nes=True,\n", + " centre_lat=centre_lat, centre_lon=centre_lon,\n", + " west_boundary=west_boundary, south_boundary=south_boundary,\n", + " inc_rlat=inc_rlat, inc_rlon=inc_rlon)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating rotated_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "rotated_grid.to_netcdf('rotated_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:  (time: 1, lev: 1, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time     (time) datetime64[ns] 1996-12-31\n",
+       "  * lev      (lev) float64 0.0\n",
+       "  * rlat     (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon     (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Data variables:\n",
+       "    lat      (rlat, rlon) float64 16.35 16.43 16.52 16.6 ... 58.83 58.68 58.53\n",
+       "    lon      (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 87.87 88.05 88.23\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Data variables:\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('rotated_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Create grid from points" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### File 1" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
station.codelatlonstandardised_network_provided_area_classification
0ES0266A41.3793222.086140urban-centre
1ES0392A41.7277041.838531urban-suburban
2ES0395A41.5678242.014598urban-centre
3ES0559A41.3874242.164918urban-centre
4ES0567A41.3849062.119574urban-centre
...............
129ES2087A41.9292832.257302NaN
130ES2091A40.5799000.553500NaN
131ES2088A41.7710602.250647NaN
132ES1908A41.2390691.856564NaN
133ES9994A42.3583631.459455NaN
\n", + "

134 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " station.code lat lon \\\n", + "0 ES0266A 41.379322 2.086140 \n", + "1 ES0392A 41.727704 1.838531 \n", + "2 ES0395A 41.567824 2.014598 \n", + "3 ES0559A 41.387424 2.164918 \n", + "4 ES0567A 41.384906 2.119574 \n", + ".. ... ... ... \n", + "129 ES2087A 41.929283 2.257302 \n", + "130 ES2091A 40.579900 0.553500 \n", + "131 ES2088A 41.771060 2.250647 \n", + "132 ES1908A 41.239069 1.856564 \n", + "133 ES9994A 42.358363 1.459455 \n", + "\n", + " standardised_network_provided_area_classification \n", + "0 urban-centre \n", + "1 urban-suburban \n", + "2 urban-centre \n", + "3 urban-centre \n", + "4 urban-centre \n", + ".. ... \n", + "129 NaN \n", + "130 NaN \n", + "131 NaN \n", + "132 NaN \n", + "133 NaN \n", + "\n", + "[134 rows x 4 columns]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/XVPCA_info.csv'\n", + "df = pd.read_csv(file_path)\n", + "df" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "lat = df['lat']\n", + "lon = df['lon']\n", + "metadata = {'station_code': {'data': df['station.code'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'area_classification': {'data': df['standardised_network_provided_area_classification'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str}}" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, create_nes=True, parallel_method='X',\n", + " lat=lat, lon=lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_code var (1/2)\n", + "Rank 000: Var station_code created (1/2)\n", + "Rank 000: Var station_code data (1/2)\n", + "Rank 000: Var station_code completed (1/2)\n", + "Rank 000: Writing area_classification var (2/2)\n", + "Rank 000: Var area_classification created (2/2)\n", + "Rank 000: Var area_classification data (2/2)\n", + "Rank 000: Var area_classification completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:362: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:362: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_1.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:              (time: 1, station: 134)\n",
+       "Coordinates:\n",
+       "  * time                 (time) datetime64[ns] 1996-12-31\n",
+       "  * station              (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n",
+       "Data variables:\n",
+       "    station_code         (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n",
+       "    area_classification  (station) object 'urban-centre' ... 'nan'\n",
+       "    lat                  (station) float64 41.38 41.73 41.57 ... 41.24 42.36\n",
+       "    lon                  (station) float64 2.086 1.839 2.015 ... 1.857 1.459\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, station: 134)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", + "Data variables:\n", + " station_code (station) object ...\n", + " area_classification (station) object ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### File 2" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
EstacióPM10 Barcelona (Eixample)PM10 Badalona (guàrdia urbana)PM10 Badalona (Assamblea de Catalunya)PM10 Barcelona (Pl. de la Universitat)PM10 Barcelona (Poblenou)PM10 Barcelona (Zona Universitària)PM10 Barcelona (el Port Vell)PM10 Barcelona (IES Goya)PM10 Barcelona (IES Verdaguer)...PM10 Constantí (Gaudí)PM10 Vila-seca (RENFE)PM10 Sitges (Vallcarca-oficines)PM10 Sant Vicenç dels Horts (Àlaba)PM10 Montsec (OAM)PM10 Montseny (la Castanya)PM10 Caldes de Montbui (Ajuntament)PM10 Sant Feliu de Llobregat (Eugeni d'Ors)PM 10 La Seu d'Urgell (CC Les Monges)PM10 Vic (Centre Cívic Santa Anna)
0Codi europeuES1438AES1928AES2027AES0559AES0691AES0567AES1870AES1852AES1900A...ES1123AES1117AES2033AES2011AES1982AES1778AES1680AES1362AES9994AES1874A
1DiaValor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)...Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)
22017-01-01 00:00:0019.6NaN2020.225.616.529NaN23.8...12.97NaN1122.499.5002997.936455NaNNaNNaNNaN
32017-01-02 00:00:0027.220.862331.63522.82817.232.4...NaN25.382625.391.8296189.7870043222.06NaNNaN
42017-01-03 00:00:0035.7NaN323736.230.931NaN35.8...21.836.494830.658.09460716.978294335.84NaNNaN
..................................................................
3622017-12-27 00:00:0017.57.591016.91413.121NaN20.8...1222.95NaNNaN13.066751NaN10.3NaNNaN
3632017-12-28 00:00:0017NaN1417.915NaN1314.516...NaN6.5NaN9.97613.351872NaN26.81NaNNaN
3642017-12-29 00:00:0024.6212423.225.815.321NaN25.9...8.869.56NaN23.7614.219732NaN14.09NaNNaN
3652017-12-30 00:00:0027.4NaN1522.316.611.21610.718.8...NaNNaNNaN19.041.0911874.713029NaNNaNNaNNaN
3662017-12-31 00:00:0017.312.51316.317.69.914NaN17.4...12.77NaNNaN15.232.156595.024302NaNNaNNaNNaN
\n", + "

367 rows × 84 columns

\n", + "
" + ], + "text/plain": [ + " Estació PM10 Barcelona (Eixample) \\\n", + "0 Codi europeu ES1438A \n", + "1 Dia Valor (µg/m3) \n", + "2 2017-01-01 00:00:00 19.6 \n", + "3 2017-01-02 00:00:00 27.2 \n", + "4 2017-01-03 00:00:00 35.7 \n", + ".. ... ... \n", + "362 2017-12-27 00:00:00 17.5 \n", + "363 2017-12-28 00:00:00 17 \n", + "364 2017-12-29 00:00:00 24.6 \n", + "365 2017-12-30 00:00:00 27.4 \n", + "366 2017-12-31 00:00:00 17.3 \n", + "\n", + " PM10 Badalona (guàrdia urbana) PM10 Badalona (Assamblea de Catalunya) \\\n", + "0 ES1928A ES2027A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN 20 \n", + "3 20.86 23 \n", + "4 NaN 32 \n", + ".. ... ... \n", + "362 7.59 10 \n", + "363 NaN 14 \n", + "364 21 24 \n", + "365 NaN 15 \n", + "366 12.5 13 \n", + "\n", + " PM10 Barcelona (Pl. de la Universitat) PM10 Barcelona (Poblenou) \\\n", + "0 ES0559A ES0691A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 20.2 25.6 \n", + "3 31.6 35 \n", + "4 37 36.2 \n", + ".. ... ... \n", + "362 16.9 14 \n", + "363 17.9 15 \n", + "364 23.2 25.8 \n", + "365 22.3 16.6 \n", + "366 16.3 17.6 \n", + "\n", + " PM10 Barcelona (Zona Universitària) PM10 Barcelona (el Port Vell) \\\n", + "0 ES0567A ES1870A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 16.5 29 \n", + "3 22.8 28 \n", + "4 30.9 31 \n", + ".. ... ... \n", + "362 13.1 21 \n", + "363 NaN 13 \n", + "364 15.3 21 \n", + "365 11.2 16 \n", + "366 9.9 14 \n", + "\n", + " PM10 Barcelona (IES Goya) PM10 Barcelona (IES Verdaguer) ... \\\n", + "0 ES1852A ES1900A ... \n", + "1 Valor (µg/m3) Valor (µg/m3) ... \n", + "2 NaN 23.8 ... \n", + "3 17.2 32.4 ... \n", + "4 NaN 35.8 ... \n", + ".. ... ... ... \n", + "362 NaN 20.8 ... \n", + "363 14.5 16 ... \n", + "364 NaN 25.9 ... \n", + "365 10.7 18.8 ... \n", + "366 NaN 17.4 ... \n", + "\n", + " PM10 Constantí (Gaudí) PM10 Vila-seca (RENFE) \\\n", + "0 ES1123A ES1117A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 12.97 NaN \n", + "3 NaN 25.38 \n", + "4 21.8 36.49 \n", + ".. ... ... \n", + "362 12 22.95 \n", + "363 NaN 6.5 \n", + "364 8.86 9.56 \n", + "365 NaN NaN \n", + "366 12.77 NaN \n", + "\n", + " PM10 Sitges (Vallcarca-oficines) PM10 Sant Vicenç dels Horts (Àlaba) \\\n", + "0 ES2033A ES2011A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 11 22.49 \n", + "3 26 25.39 \n", + "4 48 30.65 \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN 9.976 \n", + "364 NaN 23.76 \n", + "365 NaN 19.04 \n", + "366 NaN 15.23 \n", + "\n", + " PM10 Montsec (OAM) PM10 Montseny (la Castanya) \\\n", + "0 ES1982A ES1778A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 9.500299 7.936455 \n", + "3 1.829618 9.787004 \n", + "4 8.094607 16.97829 \n", + ".. ... ... \n", + "362 1 3.066751 \n", + "363 1 3.351872 \n", + "364 1 4.219732 \n", + "365 1.091187 4.713029 \n", + "366 2.15659 5.024302 \n", + "\n", + " PM10 Caldes de Montbui (Ajuntament) \\\n", + "0 ES1680A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 32 \n", + "4 43 \n", + ".. ... \n", + "362 NaN \n", + "363 NaN \n", + "364 NaN \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM10 Sant Feliu de Llobregat (Eugeni d'Ors) \\\n", + "0 ES1362A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 22.06 \n", + "4 35.84 \n", + ".. ... \n", + "362 10.3 \n", + "363 26.81 \n", + "364 14.09 \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM 10 La Seu d'Urgell (CC Les Monges) PM10 Vic (Centre Cívic Santa Anna) \n", + "0 ES9994A ES1874A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN NaN \n", + "3 NaN NaN \n", + "4 NaN NaN \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN NaN \n", + "364 NaN NaN \n", + "365 NaN NaN \n", + "366 NaN NaN \n", + "\n", + "[367 rows x 84 columns]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/Dades_2017.xlsx'\n", + "df_2 = pd.read_excel(file_path)\n", + "df_2" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "time = df_2['Estació'].iloc[2:].to_numpy()\n", + "lat = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", + "lon = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", + "metadata = {'station_name': {'data': df_2.columns.str.replace('PM10 ', '').str.replace('PM 10 ', '').to_numpy()[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'station_code': {'data': df_2.iloc[0].values[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'pm10': {'data': df_2.iloc[2:, 1:].to_numpy().T,\n", + " 'dimensions': ('station', 'time',),\n", + " 'dtype': float}}" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, create_nes=True, parallel_method='X',\n", + " lat=lat, lon=lon, time=time)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_name var (1/3)\n", + "Rank 000: Var station_name created (1/3)\n", + "Rank 000: Var station_name data (1/3)\n", + "Rank 000: Var station_name completed (1/3)\n", + "Rank 000: Writing station_code var (2/3)\n", + "Rank 000: Var station_code created (2/3)\n", + "Rank 000: Var station_code data (2/3)\n", + "Rank 000: Var station_code completed (2/3)\n", + "Rank 000: Writing pm10 var (3/3)\n", + "Rank 000: Var pm10 created (3/3)\n", + "Rank 000: Var pm10 data (3/3)\n", + "Rank 000: Var pm10 completed (3/3)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:362: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:362: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:362: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_2.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 365, station: 83)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n",
+       "  * station       (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n",
+       "Data variables:\n",
+       "    station_name  (station) object 'Barcelona (Eixample)' ... 'Vic (Centre Cí...\n",
+       "    station_code  (station) object 'ES1438A' 'ES1928A' ... 'ES9994A' 'ES1874A'\n",
+       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
+       "    lat           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
+       "    lon           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 365, station: 83)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " station_code (station) object ...\n", + " pm10 (station, time) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_2.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/3-add_time_bnds.ipynb b/Jupyter_notebooks/3-add_time_bnds.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..9c08016b91387c8db5d3efaeef8d56d2edbd04be --- /dev/null +++ b/Jupyter_notebooks/3-add_time_bnds.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to add time bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import datetime\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Set time bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "test_path = \"/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc\"\n", + "nessy = open_netcdf(path=test_path, info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", + " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "array = np.array([[datetime.datetime(year=2020, month=2, day=20), \n", + " datetime.datetime(year=2020, month=2, day=15)]])\n", + "nessy.set_time_bnds(array)\n", + "nessy.time_bnds" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "nessy.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "nessy.to_netcdf('nc_serial_test.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Explore variables" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", + " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.time_bnds" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/4-providentia.ipynb b/Jupyter_notebooks/4-providentia.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5e0303360a2221c26721ce15c7d648399a6d5e71 --- /dev/null +++ b/Jupyter_notebooks/4-providentia.ipynb @@ -0,0 +1,9953 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from netCDF4 import Dataset\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Observations dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "obs_path = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.3.3/hourly/sconco3/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Read" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/179)\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    station_timezone                                                  (station) object ...\n",
+       "    street_type                                                       (station) object ...\n",
+       "    street_width                                                      (station) float32 ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "Attributes:\n",
+       "    title:                     Surface ozone data in the EBAS network in 2018...\n",
+       "    institution:               Barcelona Supercomputing Center\n",
+       "    source:                    Surface observations\n",
+       "    creator_name:              Dene R. Bowdalo\n",
+       "    creator_email:             dene.bowdalo@bsc.es\n",
+       "    conventions:               CF-1.7\n",
+       "    data_version:              1.3.3\n",
+       "    history:                   Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n",
+       "    NCO:                       4.7.2\n",
+       "    nco_openmp_thread_number:  1
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", + "Data variables: (12/179)\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", + " ... ...\n", + " station_timezone (station) object ...\n", + " street_type (station) object ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + "Attributes:\n", + " title: Surface ozone data in the EBAS network in 2018...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.3.3\n", + " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", + " NCO: 4.7.2\n", + " nco_openmp_thread_number: 1" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(obs_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2018, 4, 1, 0, 0),\n", + " datetime.datetime(2018, 4, 1, 1, 0),\n", + " datetime.datetime(2018, 4, 1, 2, 0),\n", + " datetime.datetime(2018, 4, 1, 3, 0),\n", + " datetime.datetime(2018, 4, 1, 4, 0),\n", + " datetime.datetime(2018, 4, 1, 5, 0),\n", + " datetime.datetime(2018, 4, 1, 6, 0),\n", + " datetime.datetime(2018, 4, 1, 7, 0),\n", + " datetime.datetime(2018, 4, 1, 8, 0),\n", + " datetime.datetime(2018, 4, 1, 9, 0),\n", + " datetime.datetime(2018, 4, 1, 10, 0),\n", + " datetime.datetime(2018, 4, 1, 11, 0),\n", + " datetime.datetime(2018, 4, 1, 12, 0),\n", + " datetime.datetime(2018, 4, 1, 13, 0),\n", + " datetime.datetime(2018, 4, 1, 14, 0),\n", + " datetime.datetime(2018, 4, 1, 15, 0),\n", + " datetime.datetime(2018, 4, 1, 16, 0),\n", + " datetime.datetime(2018, 4, 1, 17, 0),\n", + " datetime.datetime(2018, 4, 1, 18, 0),\n", + " datetime.datetime(2018, 4, 1, 19, 0),\n", + " datetime.datetime(2018, 4, 1, 20, 0),\n", + " datetime.datetime(2018, 4, 1, 21, 0),\n", + " datetime.datetime(2018, 4, 1, 22, 0),\n", + " datetime.datetime(2018, 4, 1, 23, 0),\n", + " datetime.datetime(2018, 4, 2, 0, 0),\n", + " datetime.datetime(2018, 4, 2, 1, 0),\n", + " datetime.datetime(2018, 4, 2, 2, 0),\n", + " datetime.datetime(2018, 4, 2, 3, 0),\n", + " datetime.datetime(2018, 4, 2, 4, 0),\n", + " datetime.datetime(2018, 4, 2, 5, 0),\n", + " datetime.datetime(2018, 4, 2, 6, 0),\n", + " datetime.datetime(2018, 4, 2, 7, 0),\n", + " datetime.datetime(2018, 4, 2, 8, 0),\n", + " datetime.datetime(2018, 4, 2, 9, 0),\n", + " datetime.datetime(2018, 4, 2, 10, 0),\n", + " datetime.datetime(2018, 4, 2, 11, 0),\n", + " datetime.datetime(2018, 4, 2, 12, 0),\n", + " datetime.datetime(2018, 4, 2, 13, 0),\n", + " datetime.datetime(2018, 4, 2, 14, 0),\n", + " datetime.datetime(2018, 4, 2, 15, 0),\n", + " datetime.datetime(2018, 4, 2, 16, 0),\n", + " datetime.datetime(2018, 4, 2, 17, 0),\n", + " datetime.datetime(2018, 4, 2, 18, 0),\n", + " datetime.datetime(2018, 4, 2, 19, 0),\n", + " datetime.datetime(2018, 4, 2, 20, 0),\n", + " datetime.datetime(2018, 4, 2, 21, 0),\n", + " datetime.datetime(2018, 4, 2, 22, 0),\n", + " datetime.datetime(2018, 4, 2, 23, 0),\n", + " datetime.datetime(2018, 4, 3, 0, 0),\n", + " datetime.datetime(2018, 4, 3, 1, 0),\n", + " datetime.datetime(2018, 4, 3, 2, 0),\n", + " datetime.datetime(2018, 4, 3, 3, 0),\n", + " datetime.datetime(2018, 4, 3, 4, 0),\n", + " datetime.datetime(2018, 4, 3, 5, 0),\n", + " datetime.datetime(2018, 4, 3, 6, 0),\n", + " datetime.datetime(2018, 4, 3, 7, 0),\n", + " datetime.datetime(2018, 4, 3, 8, 0),\n", + " datetime.datetime(2018, 4, 3, 9, 0),\n", + " datetime.datetime(2018, 4, 3, 10, 0),\n", + " datetime.datetime(2018, 4, 3, 11, 0),\n", + " datetime.datetime(2018, 4, 3, 12, 0),\n", + " datetime.datetime(2018, 4, 3, 13, 0),\n", + " datetime.datetime(2018, 4, 3, 14, 0),\n", + " datetime.datetime(2018, 4, 3, 15, 0),\n", + " datetime.datetime(2018, 4, 3, 16, 0),\n", + " datetime.datetime(2018, 4, 3, 17, 0),\n", + " datetime.datetime(2018, 4, 3, 18, 0),\n", + " datetime.datetime(2018, 4, 3, 19, 0),\n", + " datetime.datetime(2018, 4, 3, 20, 0),\n", + " datetime.datetime(2018, 4, 3, 21, 0),\n", + " datetime.datetime(2018, 4, 3, 22, 0),\n", + " datetime.datetime(2018, 4, 3, 23, 0),\n", + " datetime.datetime(2018, 4, 4, 0, 0),\n", + " datetime.datetime(2018, 4, 4, 1, 0),\n", + " datetime.datetime(2018, 4, 4, 2, 0),\n", + " datetime.datetime(2018, 4, 4, 3, 0),\n", + " datetime.datetime(2018, 4, 4, 4, 0),\n", + " datetime.datetime(2018, 4, 4, 5, 0),\n", + " datetime.datetime(2018, 4, 4, 6, 0),\n", + " datetime.datetime(2018, 4, 4, 7, 0),\n", + " datetime.datetime(2018, 4, 4, 8, 0),\n", + " datetime.datetime(2018, 4, 4, 9, 0),\n", + " datetime.datetime(2018, 4, 4, 10, 0),\n", + " datetime.datetime(2018, 4, 4, 11, 0),\n", + " datetime.datetime(2018, 4, 4, 12, 0),\n", + " datetime.datetime(2018, 4, 4, 13, 0),\n", + " datetime.datetime(2018, 4, 4, 14, 0),\n", + " datetime.datetime(2018, 4, 4, 15, 0),\n", + " datetime.datetime(2018, 4, 4, 16, 0),\n", + " datetime.datetime(2018, 4, 4, 17, 0),\n", + " datetime.datetime(2018, 4, 4, 18, 0),\n", + " datetime.datetime(2018, 4, 4, 19, 0),\n", + " datetime.datetime(2018, 4, 4, 20, 0),\n", + " datetime.datetime(2018, 4, 4, 21, 0),\n", + " datetime.datetime(2018, 4, 4, 22, 0),\n", + " datetime.datetime(2018, 4, 4, 23, 0),\n", + " datetime.datetime(2018, 4, 5, 0, 0),\n", + " datetime.datetime(2018, 4, 5, 1, 0),\n", + " datetime.datetime(2018, 4, 5, 2, 0),\n", + " datetime.datetime(2018, 4, 5, 3, 0),\n", + " datetime.datetime(2018, 4, 5, 4, 0),\n", + " datetime.datetime(2018, 4, 5, 5, 0),\n", + " datetime.datetime(2018, 4, 5, 6, 0),\n", + " datetime.datetime(2018, 4, 5, 7, 0),\n", + " datetime.datetime(2018, 4, 5, 8, 0),\n", + " datetime.datetime(2018, 4, 5, 9, 0),\n", + " datetime.datetime(2018, 4, 5, 10, 0),\n", + " datetime.datetime(2018, 4, 5, 11, 0),\n", + " datetime.datetime(2018, 4, 5, 12, 0),\n", + " datetime.datetime(2018, 4, 5, 13, 0),\n", + " datetime.datetime(2018, 4, 5, 14, 0),\n", + " datetime.datetime(2018, 4, 5, 15, 0),\n", + " datetime.datetime(2018, 4, 5, 16, 0),\n", + " datetime.datetime(2018, 4, 5, 17, 0),\n", + " datetime.datetime(2018, 4, 5, 18, 0),\n", + " datetime.datetime(2018, 4, 5, 19, 0),\n", + " datetime.datetime(2018, 4, 5, 20, 0),\n", + " datetime.datetime(2018, 4, 5, 21, 0),\n", + " datetime.datetime(2018, 4, 5, 22, 0),\n", + " datetime.datetime(2018, 4, 5, 23, 0),\n", + " datetime.datetime(2018, 4, 6, 0, 0),\n", + " datetime.datetime(2018, 4, 6, 1, 0),\n", + " datetime.datetime(2018, 4, 6, 2, 0),\n", + " datetime.datetime(2018, 4, 6, 3, 0),\n", + " datetime.datetime(2018, 4, 6, 4, 0),\n", + " datetime.datetime(2018, 4, 6, 5, 0),\n", + " datetime.datetime(2018, 4, 6, 6, 0),\n", + " datetime.datetime(2018, 4, 6, 7, 0),\n", + " datetime.datetime(2018, 4, 6, 8, 0),\n", + " datetime.datetime(2018, 4, 6, 9, 0),\n", + " datetime.datetime(2018, 4, 6, 10, 0),\n", + " datetime.datetime(2018, 4, 6, 11, 0),\n", + " datetime.datetime(2018, 4, 6, 12, 0),\n", + " datetime.datetime(2018, 4, 6, 13, 0),\n", + " datetime.datetime(2018, 4, 6, 14, 0),\n", + " datetime.datetime(2018, 4, 6, 15, 0),\n", + " datetime.datetime(2018, 4, 6, 16, 0),\n", + " datetime.datetime(2018, 4, 6, 17, 0),\n", + " datetime.datetime(2018, 4, 6, 18, 0),\n", + " datetime.datetime(2018, 4, 6, 19, 0),\n", + " datetime.datetime(2018, 4, 6, 20, 0),\n", + " datetime.datetime(2018, 4, 6, 21, 0),\n", + " datetime.datetime(2018, 4, 6, 22, 0),\n", + " datetime.datetime(2018, 4, 6, 23, 0),\n", + " datetime.datetime(2018, 4, 7, 0, 0),\n", + " datetime.datetime(2018, 4, 7, 1, 0),\n", + " datetime.datetime(2018, 4, 7, 2, 0),\n", + " datetime.datetime(2018, 4, 7, 3, 0),\n", + " datetime.datetime(2018, 4, 7, 4, 0),\n", + " datetime.datetime(2018, 4, 7, 5, 0),\n", + " datetime.datetime(2018, 4, 7, 6, 0),\n", + " datetime.datetime(2018, 4, 7, 7, 0),\n", + " datetime.datetime(2018, 4, 7, 8, 0),\n", + " datetime.datetime(2018, 4, 7, 9, 0),\n", + " datetime.datetime(2018, 4, 7, 10, 0),\n", + " datetime.datetime(2018, 4, 7, 11, 0),\n", + " datetime.datetime(2018, 4, 7, 12, 0),\n", + " datetime.datetime(2018, 4, 7, 13, 0),\n", + " datetime.datetime(2018, 4, 7, 14, 0),\n", + " datetime.datetime(2018, 4, 7, 15, 0),\n", + " datetime.datetime(2018, 4, 7, 16, 0),\n", + " datetime.datetime(2018, 4, 7, 17, 0),\n", + " datetime.datetime(2018, 4, 7, 18, 0),\n", + " datetime.datetime(2018, 4, 7, 19, 0),\n", + " datetime.datetime(2018, 4, 7, 20, 0),\n", + " datetime.datetime(2018, 4, 7, 21, 0),\n", + " datetime.datetime(2018, 4, 7, 22, 0),\n", + " datetime.datetime(2018, 4, 7, 23, 0),\n", + " datetime.datetime(2018, 4, 8, 0, 0),\n", + " datetime.datetime(2018, 4, 8, 1, 0),\n", + " datetime.datetime(2018, 4, 8, 2, 0),\n", + " datetime.datetime(2018, 4, 8, 3, 0),\n", + " datetime.datetime(2018, 4, 8, 4, 0),\n", + " datetime.datetime(2018, 4, 8, 5, 0),\n", + " datetime.datetime(2018, 4, 8, 6, 0),\n", + " datetime.datetime(2018, 4, 8, 7, 0),\n", + " datetime.datetime(2018, 4, 8, 8, 0),\n", + " datetime.datetime(2018, 4, 8, 9, 0),\n", + " datetime.datetime(2018, 4, 8, 10, 0),\n", + " datetime.datetime(2018, 4, 8, 11, 0),\n", + " datetime.datetime(2018, 4, 8, 12, 0),\n", + " datetime.datetime(2018, 4, 8, 13, 0),\n", + " datetime.datetime(2018, 4, 8, 14, 0),\n", + " datetime.datetime(2018, 4, 8, 15, 0),\n", + " datetime.datetime(2018, 4, 8, 16, 0),\n", + " datetime.datetime(2018, 4, 8, 17, 0),\n", + " datetime.datetime(2018, 4, 8, 18, 0),\n", + " datetime.datetime(2018, 4, 8, 19, 0),\n", + " datetime.datetime(2018, 4, 8, 20, 0),\n", + " datetime.datetime(2018, 4, 8, 21, 0),\n", + " datetime.datetime(2018, 4, 8, 22, 0),\n", + " datetime.datetime(2018, 4, 8, 23, 0),\n", + " datetime.datetime(2018, 4, 9, 0, 0),\n", + " datetime.datetime(2018, 4, 9, 1, 0),\n", + " datetime.datetime(2018, 4, 9, 2, 0),\n", + " datetime.datetime(2018, 4, 9, 3, 0),\n", + " datetime.datetime(2018, 4, 9, 4, 0),\n", + " datetime.datetime(2018, 4, 9, 5, 0),\n", + " datetime.datetime(2018, 4, 9, 6, 0),\n", + " datetime.datetime(2018, 4, 9, 7, 0),\n", + " datetime.datetime(2018, 4, 9, 8, 0),\n", + " datetime.datetime(2018, 4, 9, 9, 0),\n", + " datetime.datetime(2018, 4, 9, 10, 0),\n", + " datetime.datetime(2018, 4, 9, 11, 0),\n", + " datetime.datetime(2018, 4, 9, 12, 0),\n", + " datetime.datetime(2018, 4, 9, 13, 0),\n", + " datetime.datetime(2018, 4, 9, 14, 0),\n", + " datetime.datetime(2018, 4, 9, 15, 0),\n", + " datetime.datetime(2018, 4, 9, 16, 0),\n", + " datetime.datetime(2018, 4, 9, 17, 0),\n", + " datetime.datetime(2018, 4, 9, 18, 0),\n", + " datetime.datetime(2018, 4, 9, 19, 0),\n", + " datetime.datetime(2018, 4, 9, 20, 0),\n", + " datetime.datetime(2018, 4, 9, 21, 0),\n", + " datetime.datetime(2018, 4, 9, 22, 0),\n", + " datetime.datetime(2018, 4, 9, 23, 0),\n", + " datetime.datetime(2018, 4, 10, 0, 0),\n", + " datetime.datetime(2018, 4, 10, 1, 0),\n", + " datetime.datetime(2018, 4, 10, 2, 0),\n", + " datetime.datetime(2018, 4, 10, 3, 0),\n", + " datetime.datetime(2018, 4, 10, 4, 0),\n", + " datetime.datetime(2018, 4, 10, 5, 0),\n", + " datetime.datetime(2018, 4, 10, 6, 0),\n", + " datetime.datetime(2018, 4, 10, 7, 0),\n", + " datetime.datetime(2018, 4, 10, 8, 0),\n", + " datetime.datetime(2018, 4, 10, 9, 0),\n", + " datetime.datetime(2018, 4, 10, 10, 0),\n", + " datetime.datetime(2018, 4, 10, 11, 0),\n", + " datetime.datetime(2018, 4, 10, 12, 0),\n", + " datetime.datetime(2018, 4, 10, 13, 0),\n", + " datetime.datetime(2018, 4, 10, 14, 0),\n", + " datetime.datetime(2018, 4, 10, 15, 0),\n", + " datetime.datetime(2018, 4, 10, 16, 0),\n", + " datetime.datetime(2018, 4, 10, 17, 0),\n", + " datetime.datetime(2018, 4, 10, 18, 0),\n", + " datetime.datetime(2018, 4, 10, 19, 0),\n", + " datetime.datetime(2018, 4, 10, 20, 0),\n", + " datetime.datetime(2018, 4, 10, 21, 0),\n", + " datetime.datetime(2018, 4, 10, 22, 0),\n", + " datetime.datetime(2018, 4, 10, 23, 0),\n", + " datetime.datetime(2018, 4, 11, 0, 0),\n", + " datetime.datetime(2018, 4, 11, 1, 0),\n", + " datetime.datetime(2018, 4, 11, 2, 0),\n", + " datetime.datetime(2018, 4, 11, 3, 0),\n", + " datetime.datetime(2018, 4, 11, 4, 0),\n", + " datetime.datetime(2018, 4, 11, 5, 0),\n", + " datetime.datetime(2018, 4, 11, 6, 0),\n", + " datetime.datetime(2018, 4, 11, 7, 0),\n", + " datetime.datetime(2018, 4, 11, 8, 0),\n", + " datetime.datetime(2018, 4, 11, 9, 0),\n", + " datetime.datetime(2018, 4, 11, 10, 0),\n", + " datetime.datetime(2018, 4, 11, 11, 0),\n", + " datetime.datetime(2018, 4, 11, 12, 0),\n", + " datetime.datetime(2018, 4, 11, 13, 0),\n", + " datetime.datetime(2018, 4, 11, 14, 0),\n", + " datetime.datetime(2018, 4, 11, 15, 0),\n", + " datetime.datetime(2018, 4, 11, 16, 0),\n", + " datetime.datetime(2018, 4, 11, 17, 0),\n", + " datetime.datetime(2018, 4, 11, 18, 0),\n", + " datetime.datetime(2018, 4, 11, 19, 0),\n", + " datetime.datetime(2018, 4, 11, 20, 0),\n", + " datetime.datetime(2018, 4, 11, 21, 0),\n", + " datetime.datetime(2018, 4, 11, 22, 0),\n", + " datetime.datetime(2018, 4, 11, 23, 0),\n", + " datetime.datetime(2018, 4, 12, 0, 0),\n", + " datetime.datetime(2018, 4, 12, 1, 0),\n", + " datetime.datetime(2018, 4, 12, 2, 0),\n", + " datetime.datetime(2018, 4, 12, 3, 0),\n", + " datetime.datetime(2018, 4, 12, 4, 0),\n", + " datetime.datetime(2018, 4, 12, 5, 0),\n", + " datetime.datetime(2018, 4, 12, 6, 0),\n", + " datetime.datetime(2018, 4, 12, 7, 0),\n", + " datetime.datetime(2018, 4, 12, 8, 0),\n", + " datetime.datetime(2018, 4, 12, 9, 0),\n", + " datetime.datetime(2018, 4, 12, 10, 0),\n", + " datetime.datetime(2018, 4, 12, 11, 0),\n", + " datetime.datetime(2018, 4, 12, 12, 0),\n", + " datetime.datetime(2018, 4, 12, 13, 0),\n", + " datetime.datetime(2018, 4, 12, 14, 0),\n", + " datetime.datetime(2018, 4, 12, 15, 0),\n", + " datetime.datetime(2018, 4, 12, 16, 0),\n", + " datetime.datetime(2018, 4, 12, 17, 0),\n", + " datetime.datetime(2018, 4, 12, 18, 0),\n", + " datetime.datetime(2018, 4, 12, 19, 0),\n", + " datetime.datetime(2018, 4, 12, 20, 0),\n", + " datetime.datetime(2018, 4, 12, 21, 0),\n", + " datetime.datetime(2018, 4, 12, 22, 0),\n", + " datetime.datetime(2018, 4, 12, 23, 0),\n", + " datetime.datetime(2018, 4, 13, 0, 0),\n", + " datetime.datetime(2018, 4, 13, 1, 0),\n", + " datetime.datetime(2018, 4, 13, 2, 0),\n", + " datetime.datetime(2018, 4, 13, 3, 0),\n", + " datetime.datetime(2018, 4, 13, 4, 0),\n", + " datetime.datetime(2018, 4, 13, 5, 0),\n", + " datetime.datetime(2018, 4, 13, 6, 0),\n", + " datetime.datetime(2018, 4, 13, 7, 0),\n", + " datetime.datetime(2018, 4, 13, 8, 0),\n", + " datetime.datetime(2018, 4, 13, 9, 0),\n", + " datetime.datetime(2018, 4, 13, 10, 0),\n", + " datetime.datetime(2018, 4, 13, 11, 0),\n", + " datetime.datetime(2018, 4, 13, 12, 0),\n", + " datetime.datetime(2018, 4, 13, 13, 0),\n", + " datetime.datetime(2018, 4, 13, 14, 0),\n", + " datetime.datetime(2018, 4, 13, 15, 0),\n", + " datetime.datetime(2018, 4, 13, 16, 0),\n", + " datetime.datetime(2018, 4, 13, 17, 0),\n", + " datetime.datetime(2018, 4, 13, 18, 0),\n", + " datetime.datetime(2018, 4, 13, 19, 0),\n", + " datetime.datetime(2018, 4, 13, 20, 0),\n", + " datetime.datetime(2018, 4, 13, 21, 0),\n", + " datetime.datetime(2018, 4, 13, 22, 0),\n", + " datetime.datetime(2018, 4, 13, 23, 0),\n", + " datetime.datetime(2018, 4, 14, 0, 0),\n", + " datetime.datetime(2018, 4, 14, 1, 0),\n", + " datetime.datetime(2018, 4, 14, 2, 0),\n", + " datetime.datetime(2018, 4, 14, 3, 0),\n", + " datetime.datetime(2018, 4, 14, 4, 0),\n", + " datetime.datetime(2018, 4, 14, 5, 0),\n", + " datetime.datetime(2018, 4, 14, 6, 0),\n", + " datetime.datetime(2018, 4, 14, 7, 0),\n", + " datetime.datetime(2018, 4, 14, 8, 0),\n", + " datetime.datetime(2018, 4, 14, 9, 0),\n", + " datetime.datetime(2018, 4, 14, 10, 0),\n", + " datetime.datetime(2018, 4, 14, 11, 0),\n", + " datetime.datetime(2018, 4, 14, 12, 0),\n", + " datetime.datetime(2018, 4, 14, 13, 0),\n", + " datetime.datetime(2018, 4, 14, 14, 0),\n", + " datetime.datetime(2018, 4, 14, 15, 0),\n", + " datetime.datetime(2018, 4, 14, 16, 0),\n", + " datetime.datetime(2018, 4, 14, 17, 0),\n", + " datetime.datetime(2018, 4, 14, 18, 0),\n", + " datetime.datetime(2018, 4, 14, 19, 0),\n", + " datetime.datetime(2018, 4, 14, 20, 0),\n", + " datetime.datetime(2018, 4, 14, 21, 0),\n", + " datetime.datetime(2018, 4, 14, 22, 0),\n", + " datetime.datetime(2018, 4, 14, 23, 0),\n", + " datetime.datetime(2018, 4, 15, 0, 0),\n", + " datetime.datetime(2018, 4, 15, 1, 0),\n", + " datetime.datetime(2018, 4, 15, 2, 0),\n", + " datetime.datetime(2018, 4, 15, 3, 0),\n", + " datetime.datetime(2018, 4, 15, 4, 0),\n", + " datetime.datetime(2018, 4, 15, 5, 0),\n", + " datetime.datetime(2018, 4, 15, 6, 0),\n", + " datetime.datetime(2018, 4, 15, 7, 0),\n", + " datetime.datetime(2018, 4, 15, 8, 0),\n", + " datetime.datetime(2018, 4, 15, 9, 0),\n", + " datetime.datetime(2018, 4, 15, 10, 0),\n", + " datetime.datetime(2018, 4, 15, 11, 0),\n", + " datetime.datetime(2018, 4, 15, 12, 0),\n", + " datetime.datetime(2018, 4, 15, 13, 0),\n", + " datetime.datetime(2018, 4, 15, 14, 0),\n", + " datetime.datetime(2018, 4, 15, 15, 0),\n", + " datetime.datetime(2018, 4, 15, 16, 0),\n", + " datetime.datetime(2018, 4, 15, 17, 0),\n", + " datetime.datetime(2018, 4, 15, 18, 0),\n", + " datetime.datetime(2018, 4, 15, 19, 0),\n", + " datetime.datetime(2018, 4, 15, 20, 0),\n", + " datetime.datetime(2018, 4, 15, 21, 0),\n", + " datetime.datetime(2018, 4, 15, 22, 0),\n", + " datetime.datetime(2018, 4, 15, 23, 0),\n", + " datetime.datetime(2018, 4, 16, 0, 0),\n", + " datetime.datetime(2018, 4, 16, 1, 0),\n", + " datetime.datetime(2018, 4, 16, 2, 0),\n", + " datetime.datetime(2018, 4, 16, 3, 0),\n", + " datetime.datetime(2018, 4, 16, 4, 0),\n", + " datetime.datetime(2018, 4, 16, 5, 0),\n", + " datetime.datetime(2018, 4, 16, 6, 0),\n", + " datetime.datetime(2018, 4, 16, 7, 0),\n", + " datetime.datetime(2018, 4, 16, 8, 0),\n", + " datetime.datetime(2018, 4, 16, 9, 0),\n", + " datetime.datetime(2018, 4, 16, 10, 0),\n", + " datetime.datetime(2018, 4, 16, 11, 0),\n", + " datetime.datetime(2018, 4, 16, 12, 0),\n", + " datetime.datetime(2018, 4, 16, 13, 0),\n", + " datetime.datetime(2018, 4, 16, 14, 0),\n", + " datetime.datetime(2018, 4, 16, 15, 0),\n", + " datetime.datetime(2018, 4, 16, 16, 0),\n", + " datetime.datetime(2018, 4, 16, 17, 0),\n", + " datetime.datetime(2018, 4, 16, 18, 0),\n", + " datetime.datetime(2018, 4, 16, 19, 0),\n", + " datetime.datetime(2018, 4, 16, 20, 0),\n", + " datetime.datetime(2018, 4, 16, 21, 0),\n", + " datetime.datetime(2018, 4, 16, 22, 0),\n", + " datetime.datetime(2018, 4, 16, 23, 0),\n", + " datetime.datetime(2018, 4, 17, 0, 0),\n", + " datetime.datetime(2018, 4, 17, 1, 0),\n", + " datetime.datetime(2018, 4, 17, 2, 0),\n", + " datetime.datetime(2018, 4, 17, 3, 0),\n", + " datetime.datetime(2018, 4, 17, 4, 0),\n", + " datetime.datetime(2018, 4, 17, 5, 0),\n", + " datetime.datetime(2018, 4, 17, 6, 0),\n", + " datetime.datetime(2018, 4, 17, 7, 0),\n", + " datetime.datetime(2018, 4, 17, 8, 0),\n", + " datetime.datetime(2018, 4, 17, 9, 0),\n", + " datetime.datetime(2018, 4, 17, 10, 0),\n", + " datetime.datetime(2018, 4, 17, 11, 0),\n", + " datetime.datetime(2018, 4, 17, 12, 0),\n", + " datetime.datetime(2018, 4, 17, 13, 0),\n", + " datetime.datetime(2018, 4, 17, 14, 0),\n", + " datetime.datetime(2018, 4, 17, 15, 0),\n", + " datetime.datetime(2018, 4, 17, 16, 0),\n", + " datetime.datetime(2018, 4, 17, 17, 0),\n", + " datetime.datetime(2018, 4, 17, 18, 0),\n", + " datetime.datetime(2018, 4, 17, 19, 0),\n", + " datetime.datetime(2018, 4, 17, 20, 0),\n", + " datetime.datetime(2018, 4, 17, 21, 0),\n", + " datetime.datetime(2018, 4, 17, 22, 0),\n", + " datetime.datetime(2018, 4, 17, 23, 0),\n", + " datetime.datetime(2018, 4, 18, 0, 0),\n", + " datetime.datetime(2018, 4, 18, 1, 0),\n", + " datetime.datetime(2018, 4, 18, 2, 0),\n", + " datetime.datetime(2018, 4, 18, 3, 0),\n", + " datetime.datetime(2018, 4, 18, 4, 0),\n", + " datetime.datetime(2018, 4, 18, 5, 0),\n", + " datetime.datetime(2018, 4, 18, 6, 0),\n", + " datetime.datetime(2018, 4, 18, 7, 0),\n", + " datetime.datetime(2018, 4, 18, 8, 0),\n", + " datetime.datetime(2018, 4, 18, 9, 0),\n", + " datetime.datetime(2018, 4, 18, 10, 0),\n", + " datetime.datetime(2018, 4, 18, 11, 0),\n", + " datetime.datetime(2018, 4, 18, 12, 0),\n", + " datetime.datetime(2018, 4, 18, 13, 0),\n", + " datetime.datetime(2018, 4, 18, 14, 0),\n", + " datetime.datetime(2018, 4, 18, 15, 0),\n", + " datetime.datetime(2018, 4, 18, 16, 0),\n", + " datetime.datetime(2018, 4, 18, 17, 0),\n", + " datetime.datetime(2018, 4, 18, 18, 0),\n", + " datetime.datetime(2018, 4, 18, 19, 0),\n", + " datetime.datetime(2018, 4, 18, 20, 0),\n", + " datetime.datetime(2018, 4, 18, 21, 0),\n", + " datetime.datetime(2018, 4, 18, 22, 0),\n", + " datetime.datetime(2018, 4, 18, 23, 0),\n", + " datetime.datetime(2018, 4, 19, 0, 0),\n", + " datetime.datetime(2018, 4, 19, 1, 0),\n", + " datetime.datetime(2018, 4, 19, 2, 0),\n", + " datetime.datetime(2018, 4, 19, 3, 0),\n", + " datetime.datetime(2018, 4, 19, 4, 0),\n", + " datetime.datetime(2018, 4, 19, 5, 0),\n", + " datetime.datetime(2018, 4, 19, 6, 0),\n", + " datetime.datetime(2018, 4, 19, 7, 0),\n", + " datetime.datetime(2018, 4, 19, 8, 0),\n", + " datetime.datetime(2018, 4, 19, 9, 0),\n", + " datetime.datetime(2018, 4, 19, 10, 0),\n", + " datetime.datetime(2018, 4, 19, 11, 0),\n", + " datetime.datetime(2018, 4, 19, 12, 0),\n", + " datetime.datetime(2018, 4, 19, 13, 0),\n", + " datetime.datetime(2018, 4, 19, 14, 0),\n", + " datetime.datetime(2018, 4, 19, 15, 0),\n", + " datetime.datetime(2018, 4, 19, 16, 0),\n", + " datetime.datetime(2018, 4, 19, 17, 0),\n", + " datetime.datetime(2018, 4, 19, 18, 0),\n", + " datetime.datetime(2018, 4, 19, 19, 0),\n", + " datetime.datetime(2018, 4, 19, 20, 0),\n", + " datetime.datetime(2018, 4, 19, 21, 0),\n", + " datetime.datetime(2018, 4, 19, 22, 0),\n", + " datetime.datetime(2018, 4, 19, 23, 0),\n", + " datetime.datetime(2018, 4, 20, 0, 0),\n", + " datetime.datetime(2018, 4, 20, 1, 0),\n", + " datetime.datetime(2018, 4, 20, 2, 0),\n", + " datetime.datetime(2018, 4, 20, 3, 0),\n", + " datetime.datetime(2018, 4, 20, 4, 0),\n", + " datetime.datetime(2018, 4, 20, 5, 0),\n", + " datetime.datetime(2018, 4, 20, 6, 0),\n", + " datetime.datetime(2018, 4, 20, 7, 0),\n", + " datetime.datetime(2018, 4, 20, 8, 0),\n", + " datetime.datetime(2018, 4, 20, 9, 0),\n", + " datetime.datetime(2018, 4, 20, 10, 0),\n", + " datetime.datetime(2018, 4, 20, 11, 0),\n", + " datetime.datetime(2018, 4, 20, 12, 0),\n", + " datetime.datetime(2018, 4, 20, 13, 0),\n", + " datetime.datetime(2018, 4, 20, 14, 0),\n", + " datetime.datetime(2018, 4, 20, 15, 0),\n", + " datetime.datetime(2018, 4, 20, 16, 0),\n", + " datetime.datetime(2018, 4, 20, 17, 0),\n", + " datetime.datetime(2018, 4, 20, 18, 0),\n", + " datetime.datetime(2018, 4, 20, 19, 0),\n", + " datetime.datetime(2018, 4, 20, 20, 0),\n", + " datetime.datetime(2018, 4, 20, 21, 0),\n", + " datetime.datetime(2018, 4, 20, 22, 0),\n", + " datetime.datetime(2018, 4, 20, 23, 0),\n", + " datetime.datetime(2018, 4, 21, 0, 0),\n", + " datetime.datetime(2018, 4, 21, 1, 0),\n", + " datetime.datetime(2018, 4, 21, 2, 0),\n", + " datetime.datetime(2018, 4, 21, 3, 0),\n", + " datetime.datetime(2018, 4, 21, 4, 0),\n", + " datetime.datetime(2018, 4, 21, 5, 0),\n", + " datetime.datetime(2018, 4, 21, 6, 0),\n", + " datetime.datetime(2018, 4, 21, 7, 0),\n", + " datetime.datetime(2018, 4, 21, 8, 0),\n", + " datetime.datetime(2018, 4, 21, 9, 0),\n", + " datetime.datetime(2018, 4, 21, 10, 0),\n", + " datetime.datetime(2018, 4, 21, 11, 0),\n", + " datetime.datetime(2018, 4, 21, 12, 0),\n", + " datetime.datetime(2018, 4, 21, 13, 0),\n", + " datetime.datetime(2018, 4, 21, 14, 0),\n", + " datetime.datetime(2018, 4, 21, 15, 0),\n", + " datetime.datetime(2018, 4, 21, 16, 0),\n", + " datetime.datetime(2018, 4, 21, 17, 0),\n", + " datetime.datetime(2018, 4, 21, 18, 0),\n", + " datetime.datetime(2018, 4, 21, 19, 0),\n", + " datetime.datetime(2018, 4, 21, 20, 0),\n", + " datetime.datetime(2018, 4, 21, 21, 0),\n", + " datetime.datetime(2018, 4, 21, 22, 0),\n", + " datetime.datetime(2018, 4, 21, 23, 0),\n", + " datetime.datetime(2018, 4, 22, 0, 0),\n", + " datetime.datetime(2018, 4, 22, 1, 0),\n", + " datetime.datetime(2018, 4, 22, 2, 0),\n", + " datetime.datetime(2018, 4, 22, 3, 0),\n", + " datetime.datetime(2018, 4, 22, 4, 0),\n", + " datetime.datetime(2018, 4, 22, 5, 0),\n", + " datetime.datetime(2018, 4, 22, 6, 0),\n", + " datetime.datetime(2018, 4, 22, 7, 0),\n", + " datetime.datetime(2018, 4, 22, 8, 0),\n", + " datetime.datetime(2018, 4, 22, 9, 0),\n", + " datetime.datetime(2018, 4, 22, 10, 0),\n", + " datetime.datetime(2018, 4, 22, 11, 0),\n", + " datetime.datetime(2018, 4, 22, 12, 0),\n", + " datetime.datetime(2018, 4, 22, 13, 0),\n", + " datetime.datetime(2018, 4, 22, 14, 0),\n", + " datetime.datetime(2018, 4, 22, 15, 0),\n", + " datetime.datetime(2018, 4, 22, 16, 0),\n", + " datetime.datetime(2018, 4, 22, 17, 0),\n", + " datetime.datetime(2018, 4, 22, 18, 0),\n", + " datetime.datetime(2018, 4, 22, 19, 0),\n", + " datetime.datetime(2018, 4, 22, 20, 0),\n", + " datetime.datetime(2018, 4, 22, 21, 0),\n", + " datetime.datetime(2018, 4, 22, 22, 0),\n", + " datetime.datetime(2018, 4, 22, 23, 0),\n", + " datetime.datetime(2018, 4, 23, 0, 0),\n", + " datetime.datetime(2018, 4, 23, 1, 0),\n", + " datetime.datetime(2018, 4, 23, 2, 0),\n", + " datetime.datetime(2018, 4, 23, 3, 0),\n", + " datetime.datetime(2018, 4, 23, 4, 0),\n", + " datetime.datetime(2018, 4, 23, 5, 0),\n", + " datetime.datetime(2018, 4, 23, 6, 0),\n", + " datetime.datetime(2018, 4, 23, 7, 0),\n", + " datetime.datetime(2018, 4, 23, 8, 0),\n", + " datetime.datetime(2018, 4, 23, 9, 0),\n", + " datetime.datetime(2018, 4, 23, 10, 0),\n", + " datetime.datetime(2018, 4, 23, 11, 0),\n", + " datetime.datetime(2018, 4, 23, 12, 0),\n", + " datetime.datetime(2018, 4, 23, 13, 0),\n", + " datetime.datetime(2018, 4, 23, 14, 0),\n", + " datetime.datetime(2018, 4, 23, 15, 0),\n", + " datetime.datetime(2018, 4, 23, 16, 0),\n", + " datetime.datetime(2018, 4, 23, 17, 0),\n", + " datetime.datetime(2018, 4, 23, 18, 0),\n", + " datetime.datetime(2018, 4, 23, 19, 0),\n", + " datetime.datetime(2018, 4, 23, 20, 0),\n", + " datetime.datetime(2018, 4, 23, 21, 0),\n", + " datetime.datetime(2018, 4, 23, 22, 0),\n", + " datetime.datetime(2018, 4, 23, 23, 0),\n", + " datetime.datetime(2018, 4, 24, 0, 0),\n", + " datetime.datetime(2018, 4, 24, 1, 0),\n", + " datetime.datetime(2018, 4, 24, 2, 0),\n", + " datetime.datetime(2018, 4, 24, 3, 0),\n", + " datetime.datetime(2018, 4, 24, 4, 0),\n", + " datetime.datetime(2018, 4, 24, 5, 0),\n", + " datetime.datetime(2018, 4, 24, 6, 0),\n", + " datetime.datetime(2018, 4, 24, 7, 0),\n", + " datetime.datetime(2018, 4, 24, 8, 0),\n", + " datetime.datetime(2018, 4, 24, 9, 0),\n", + " datetime.datetime(2018, 4, 24, 10, 0),\n", + " datetime.datetime(2018, 4, 24, 11, 0),\n", + " datetime.datetime(2018, 4, 24, 12, 0),\n", + " datetime.datetime(2018, 4, 24, 13, 0),\n", + " datetime.datetime(2018, 4, 24, 14, 0),\n", + " datetime.datetime(2018, 4, 24, 15, 0),\n", + " datetime.datetime(2018, 4, 24, 16, 0),\n", + " datetime.datetime(2018, 4, 24, 17, 0),\n", + " datetime.datetime(2018, 4, 24, 18, 0),\n", + " datetime.datetime(2018, 4, 24, 19, 0),\n", + " datetime.datetime(2018, 4, 24, 20, 0),\n", + " datetime.datetime(2018, 4, 24, 21, 0),\n", + " datetime.datetime(2018, 4, 24, 22, 0),\n", + " datetime.datetime(2018, 4, 24, 23, 0),\n", + " datetime.datetime(2018, 4, 25, 0, 0),\n", + " datetime.datetime(2018, 4, 25, 1, 0),\n", + " datetime.datetime(2018, 4, 25, 2, 0),\n", + " datetime.datetime(2018, 4, 25, 3, 0),\n", + " datetime.datetime(2018, 4, 25, 4, 0),\n", + " datetime.datetime(2018, 4, 25, 5, 0),\n", + " datetime.datetime(2018, 4, 25, 6, 0),\n", + " datetime.datetime(2018, 4, 25, 7, 0),\n", + " datetime.datetime(2018, 4, 25, 8, 0),\n", + " datetime.datetime(2018, 4, 25, 9, 0),\n", + " datetime.datetime(2018, 4, 25, 10, 0),\n", + " datetime.datetime(2018, 4, 25, 11, 0),\n", + " datetime.datetime(2018, 4, 25, 12, 0),\n", + " datetime.datetime(2018, 4, 25, 13, 0),\n", + " datetime.datetime(2018, 4, 25, 14, 0),\n", + " datetime.datetime(2018, 4, 25, 15, 0),\n", + " datetime.datetime(2018, 4, 25, 16, 0),\n", + " datetime.datetime(2018, 4, 25, 17, 0),\n", + " datetime.datetime(2018, 4, 25, 18, 0),\n", + " datetime.datetime(2018, 4, 25, 19, 0),\n", + " datetime.datetime(2018, 4, 25, 20, 0),\n", + " datetime.datetime(2018, 4, 25, 21, 0),\n", + " datetime.datetime(2018, 4, 25, 22, 0),\n", + " datetime.datetime(2018, 4, 25, 23, 0),\n", + " datetime.datetime(2018, 4, 26, 0, 0),\n", + " datetime.datetime(2018, 4, 26, 1, 0),\n", + " datetime.datetime(2018, 4, 26, 2, 0),\n", + " datetime.datetime(2018, 4, 26, 3, 0),\n", + " datetime.datetime(2018, 4, 26, 4, 0),\n", + " datetime.datetime(2018, 4, 26, 5, 0),\n", + " datetime.datetime(2018, 4, 26, 6, 0),\n", + " datetime.datetime(2018, 4, 26, 7, 0),\n", + " datetime.datetime(2018, 4, 26, 8, 0),\n", + " datetime.datetime(2018, 4, 26, 9, 0),\n", + " datetime.datetime(2018, 4, 26, 10, 0),\n", + " datetime.datetime(2018, 4, 26, 11, 0),\n", + " datetime.datetime(2018, 4, 26, 12, 0),\n", + " datetime.datetime(2018, 4, 26, 13, 0),\n", + " datetime.datetime(2018, 4, 26, 14, 0),\n", + " datetime.datetime(2018, 4, 26, 15, 0),\n", + " datetime.datetime(2018, 4, 26, 16, 0),\n", + " datetime.datetime(2018, 4, 26, 17, 0),\n", + " datetime.datetime(2018, 4, 26, 18, 0),\n", + " datetime.datetime(2018, 4, 26, 19, 0),\n", + " datetime.datetime(2018, 4, 26, 20, 0),\n", + " datetime.datetime(2018, 4, 26, 21, 0),\n", + " datetime.datetime(2018, 4, 26, 22, 0),\n", + " datetime.datetime(2018, 4, 26, 23, 0),\n", + " datetime.datetime(2018, 4, 27, 0, 0),\n", + " datetime.datetime(2018, 4, 27, 1, 0),\n", + " datetime.datetime(2018, 4, 27, 2, 0),\n", + " datetime.datetime(2018, 4, 27, 3, 0),\n", + " datetime.datetime(2018, 4, 27, 4, 0),\n", + " datetime.datetime(2018, 4, 27, 5, 0),\n", + " datetime.datetime(2018, 4, 27, 6, 0),\n", + " datetime.datetime(2018, 4, 27, 7, 0),\n", + " datetime.datetime(2018, 4, 27, 8, 0),\n", + " datetime.datetime(2018, 4, 27, 9, 0),\n", + " datetime.datetime(2018, 4, 27, 10, 0),\n", + " datetime.datetime(2018, 4, 27, 11, 0),\n", + " datetime.datetime(2018, 4, 27, 12, 0),\n", + " datetime.datetime(2018, 4, 27, 13, 0),\n", + " datetime.datetime(2018, 4, 27, 14, 0),\n", + " datetime.datetime(2018, 4, 27, 15, 0),\n", + " datetime.datetime(2018, 4, 27, 16, 0),\n", + " datetime.datetime(2018, 4, 27, 17, 0),\n", + " datetime.datetime(2018, 4, 27, 18, 0),\n", + " datetime.datetime(2018, 4, 27, 19, 0),\n", + " datetime.datetime(2018, 4, 27, 20, 0),\n", + " datetime.datetime(2018, 4, 27, 21, 0),\n", + " datetime.datetime(2018, 4, 27, 22, 0),\n", + " datetime.datetime(2018, 4, 27, 23, 0),\n", + " datetime.datetime(2018, 4, 28, 0, 0),\n", + " datetime.datetime(2018, 4, 28, 1, 0),\n", + " datetime.datetime(2018, 4, 28, 2, 0),\n", + " datetime.datetime(2018, 4, 28, 3, 0),\n", + " datetime.datetime(2018, 4, 28, 4, 0),\n", + " datetime.datetime(2018, 4, 28, 5, 0),\n", + " datetime.datetime(2018, 4, 28, 6, 0),\n", + " datetime.datetime(2018, 4, 28, 7, 0),\n", + " datetime.datetime(2018, 4, 28, 8, 0),\n", + " datetime.datetime(2018, 4, 28, 9, 0),\n", + " datetime.datetime(2018, 4, 28, 10, 0),\n", + " datetime.datetime(2018, 4, 28, 11, 0),\n", + " datetime.datetime(2018, 4, 28, 12, 0),\n", + " datetime.datetime(2018, 4, 28, 13, 0),\n", + " datetime.datetime(2018, 4, 28, 14, 0),\n", + " datetime.datetime(2018, 4, 28, 15, 0),\n", + " datetime.datetime(2018, 4, 28, 16, 0),\n", + " datetime.datetime(2018, 4, 28, 17, 0),\n", + " datetime.datetime(2018, 4, 28, 18, 0),\n", + " datetime.datetime(2018, 4, 28, 19, 0),\n", + " datetime.datetime(2018, 4, 28, 20, 0),\n", + " datetime.datetime(2018, 4, 28, 21, 0),\n", + " datetime.datetime(2018, 4, 28, 22, 0),\n", + " datetime.datetime(2018, 4, 28, 23, 0),\n", + " datetime.datetime(2018, 4, 29, 0, 0),\n", + " datetime.datetime(2018, 4, 29, 1, 0),\n", + " datetime.datetime(2018, 4, 29, 2, 0),\n", + " datetime.datetime(2018, 4, 29, 3, 0),\n", + " datetime.datetime(2018, 4, 29, 4, 0),\n", + " datetime.datetime(2018, 4, 29, 5, 0),\n", + " datetime.datetime(2018, 4, 29, 6, 0),\n", + " datetime.datetime(2018, 4, 29, 7, 0),\n", + " datetime.datetime(2018, 4, 29, 8, 0),\n", + " datetime.datetime(2018, 4, 29, 9, 0),\n", + " datetime.datetime(2018, 4, 29, 10, 0),\n", + " datetime.datetime(2018, 4, 29, 11, 0),\n", + " datetime.datetime(2018, 4, 29, 12, 0),\n", + " datetime.datetime(2018, 4, 29, 13, 0),\n", + " datetime.datetime(2018, 4, 29, 14, 0),\n", + " datetime.datetime(2018, 4, 29, 15, 0),\n", + " datetime.datetime(2018, 4, 29, 16, 0),\n", + " datetime.datetime(2018, 4, 29, 17, 0),\n", + " datetime.datetime(2018, 4, 29, 18, 0),\n", + " datetime.datetime(2018, 4, 29, 19, 0),\n", + " datetime.datetime(2018, 4, 29, 20, 0),\n", + " datetime.datetime(2018, 4, 29, 21, 0),\n", + " datetime.datetime(2018, 4, 29, 22, 0),\n", + " datetime.datetime(2018, 4, 29, 23, 0),\n", + " datetime.datetime(2018, 4, 30, 0, 0),\n", + " datetime.datetime(2018, 4, 30, 1, 0),\n", + " datetime.datetime(2018, 4, 30, 2, 0),\n", + " datetime.datetime(2018, 4, 30, 3, 0),\n", + " datetime.datetime(2018, 4, 30, 4, 0),\n", + " datetime.datetime(2018, 4, 30, 5, 0),\n", + " datetime.datetime(2018, 4, 30, 6, 0),\n", + " datetime.datetime(2018, 4, 30, 7, 0),\n", + " datetime.datetime(2018, 4, 30, 8, 0),\n", + " datetime.datetime(2018, 4, 30, 9, 0),\n", + " datetime.datetime(2018, 4, 30, 10, 0),\n", + " datetime.datetime(2018, 4, 30, 11, 0),\n", + " datetime.datetime(2018, 4, 30, 12, 0),\n", + " datetime.datetime(2018, 4, 30, 13, 0),\n", + " datetime.datetime(2018, 4, 30, 14, 0),\n", + " datetime.datetime(2018, 4, 30, 15, 0),\n", + " datetime.datetime(2018, 4, 30, 16, 0),\n", + " datetime.datetime(2018, 4, 30, 17, 0),\n", + " datetime.datetime(2018, 4, 30, 18, 0),\n", + " datetime.datetime(2018, 4, 30, 19, 0),\n", + " datetime.datetime(2018, 4, 30, 20, 0),\n", + " datetime.datetime(2018, 4, 30, 21, 0),\n", + " datetime.datetime(2018, 4, 30, 22, 0),\n", + " datetime.datetime(2018, 4, 30, 23, 0)]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", + " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", + " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", + " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", + " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", + " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", + " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", + " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", + " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", + " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", + " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", + " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", + " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", + " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", + " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", + " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", + " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", + " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", + " 43.616667 , 47.3 , 46.65 , 45. ,\n", + " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", + " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", + " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", + " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", + " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", + " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", + " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", + " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", + " 78.90715 , 59. , 69.45 , 59.2 ,\n", + " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", + " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", + " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", + " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", + " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", + " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", + " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", + " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", + " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", + " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", + " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", + " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", + " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", + " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", + " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", + " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", + " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", + " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", + " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", + " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", + " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", + " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", + " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", + " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", + " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", + " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", + " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", + " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", + " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", + " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", + " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", + " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", + " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", + " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", + " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", + " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", + " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading ASTER_v3_altitude var (1/175)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ETOPO1_altitude var (18/175)\n", + "Rank 000: Loaded ETOPO1_altitude var ((168,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((168,))\n", + "Rank 000: Loading GHOST_version var (20/175)\n", + "Rank 000: Loaded GHOST_version var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((168,))\n", + "Rank 000: Loading GHSL_population_density var (32/175)\n", + "Rank 000: Loaded GHSL_population_density var ((168,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_population_density var (38/175)\n", + "Rank 000: Loaded GPW_population_density var ((168,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((168,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((168,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((168,))\n", + "Rank 000: Loading WMO_region var (65/175)\n", + "Rank 000: Loaded WMO_region var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biome var (67/175)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((168,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((168,))\n", + "Rank 000: Loading administrative_country_division_1 var (69/175)\n", + "Rank 000: Loaded administrative_country_division_1 var ((168,))\n", + "Rank 000: Loading administrative_country_division_2 var (70/175)\n", + "Rank 000: Loaded administrative_country_division_2 var ((168,))\n", + "Rank 000: Loading altitude var (71/175)\n", + "Rank 000: Loaded altitude var ((168,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading annual_native_representativity_percent var (73/175)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading area_classification var (74/175)\n", + "Rank 000: Loaded area_classification var ((168,))\n", + "Rank 000: Loading associated_networks var (75/175)\n", + "Rank 000: Loaded associated_networks var ((168,))\n", + "Rank 000: Loading city var (76/175)\n", + "Rank 000: Loaded city var ((168,))\n", + "Rank 000: Loading climatology var (77/175)\n", + "Rank 000: Loaded climatology var ((168,))\n", + "Rank 000: Loading contact_email_address var (78/175)\n", + "Rank 000: Loaded contact_email_address var ((168,))\n", + "Rank 000: Loading contact_institution var (79/175)\n", + "Rank 000: Loaded contact_institution var ((168,))\n", + "Rank 000: Loading contact_name var (80/175)\n", + "Rank 000: Loaded contact_name var ((168,))\n", + "Rank 000: Loading country var (81/175)\n", + "Rank 000: Loaded country var ((168,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading daily_native_representativity_percent var (83/175)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading daily_passing_vehicles var (84/175)\n", + "Rank 000: Loaded daily_passing_vehicles var ((168,))\n", + "Rank 000: Loading data_level var (85/175)\n", + "Rank 000: Loaded data_level var ((168,))\n", + "Rank 000: Loading data_licence var (86/175)\n", + "Rank 000: Loaded data_licence var ((168,))\n", + "Rank 000: Loading day_night_code var (87/175)\n", + "Rank 000: Loaded day_night_code var ((168, 720))\n", + "Rank 000: Loading daytime_traffic_speed var (88/175)\n", + "Rank 000: Loaded daytime_traffic_speed var ((168,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading distance_to_building var (90/175)\n", + "Rank 000: Loaded distance_to_building var ((168,))\n", + "Rank 000: Loading distance_to_junction var (91/175)\n", + "Rank 000: Loaded distance_to_junction var ((168,))\n", + "Rank 000: Loading distance_to_kerb var (92/175)\n", + "Rank 000: Loaded distance_to_kerb var ((168,))\n", + "Rank 000: Loading distance_to_source var (93/175)\n", + "Rank 000: Loaded distance_to_source var ((168,))\n", + "Rank 000: Loading ellipsoid var (94/175)\n", + "Rank 000: Loaded ellipsoid var ((168,))\n", + "Rank 000: Loading horizontal_datum var (95/175)\n", + "Rank 000: Loaded horizontal_datum var ((168,))\n", + "Rank 000: Loading hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Loaded hourly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Loaded hourly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading land_use var (98/175)\n", + "Rank 000: Loaded land_use var ((168,))\n", + "Rank 000: Loading local_time var (99/175)\n", + "Rank 000: Loaded local_time var ((168, 720))\n", + "Rank 000: Loading main_emission_source var (100/175)\n", + "Rank 000: Loaded main_emission_source var ((168,))\n", + "Rank 000: Loading mean_solar_time var (101/175)\n", + "Rank 000: Loaded mean_solar_time var ((168, 720))\n", + "Rank 000: Loading measurement_altitude var (102/175)\n", + "Rank 000: Loaded measurement_altitude var ((168,))\n", + "Rank 000: Loading measurement_methodology var (103/175)\n", + "Rank 000: Loaded measurement_methodology var ((168,))\n", + "Rank 000: Loading measurement_scale var (104/175)\n", + "Rank 000: Loaded measurement_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_further_details var (117/175)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((168,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_name var (120/175)\n", + "Rank 000: Loaded measuring_instrument_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_process_details var (121/175)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((168,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading network var (137/175)\n", + "Rank 000: Loaded network var ((168,))\n", + "Rank 000: Loading network_maintenance_details var (138/175)\n", + "Rank 000: Loaded network_maintenance_details var ((168,))\n", + "Rank 000: Loading network_miscellaneous_details var (139/175)\n", + "Rank 000: Loaded network_miscellaneous_details var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((168,))\n", + "Rank 000: Loading network_qa_details var (142/175)\n", + "Rank 000: Loaded network_qa_details var ((168,))\n", + "Rank 000: Loading network_sampling_details var (143/175)\n", + "Rank 000: Loaded network_sampling_details var ((168,))\n", + "Rank 000: Loading network_uncertainty_details var (144/175)\n", + "Rank 000: Loaded network_uncertainty_details var ((168,))\n", + "Rank 000: Loading population var (145/175)\n", + "Rank 000: Loaded population var ((168,))\n", + "Rank 000: Loading primary_sampling_further_details var (146/175)\n", + "Rank 000: Loaded primary_sampling_further_details var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_process_details var (151/175)\n", + "Rank 000: Loaded primary_sampling_process_details var ((168,))\n", + "Rank 000: Loading primary_sampling_type var (152/175)\n", + "Rank 000: Loaded primary_sampling_type var ((168,))\n", + "Rank 000: Loading principal_investigator_email_address var (153/175)\n", + "Rank 000: Loaded principal_investigator_email_address var ((168,))\n", + "Rank 000: Loading principal_investigator_institution var (154/175)\n", + "Rank 000: Loaded principal_investigator_institution var ((168,))\n", + "Rank 000: Loading principal_investigator_name var (155/175)\n", + "Rank 000: Loaded principal_investigator_name var ((168,))\n", + "Rank 000: Loading process_warnings var (156/175)\n", + "Rank 000: Loaded process_warnings var ((168,))\n", + "Rank 000: Loading projection var (157/175)\n", + "Rank 000: Loaded projection var ((168,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading representative_radius var (159/175)\n", + "Rank 000: Loaded representative_radius var ((168,))\n", + "Rank 000: Loading sample_preparation_further_details var (160/175)\n", + "Rank 000: Loaded sample_preparation_further_details var ((168,))\n", + "Rank 000: Loading sample_preparation_process_details var (161/175)\n", + "Rank 000: Loaded sample_preparation_process_details var ((168,))\n", + "Rank 000: Loading sample_preparation_techniques var (162/175)\n", + "Rank 000: Loaded sample_preparation_techniques var ((168,))\n", + "Rank 000: Loading sample_preparation_types var (163/175)\n", + "Rank 000: Loaded sample_preparation_types var ((168,))\n", + "Rank 000: Loading sampling_height var (164/175)\n", + "Rank 000: Loaded sampling_height var ((168,))\n", + "Rank 000: Loading sconco3 var (165/175)\n", + "Rank 000: Loaded sconco3 var ((168, 720))\n", + "Rank 000: Loading season_code var (166/175)\n", + "Rank 000: Loaded season_code var ((168, 720))\n", + "Rank 000: Loading station_classification var (167/175)\n", + "Rank 000: Loaded station_classification var ((168,))\n", + "Rank 000: Loading station_name var (168/175)\n", + "Rank 000: Loaded station_name var ((168,))\n", + "Rank 000: Loading station_reference var (169/175)\n", + "Rank 000: Loaded station_reference var ((168,))\n", + "Rank 000: Loading station_timezone var (170/175)\n", + "Rank 000: Loaded station_timezone var ((168,))\n", + "Rank 000: Loading street_type var (171/175)\n", + "Rank 000: Loaded street_type var ((168,))\n", + "Rank 000: Loading street_width var (172/175)\n", + "Rank 000: Loaded street_width var ((168,))\n", + "Rank 000: Loading terrain var (173/175)\n", + "Rank 000: Loaded terrain var ((168,))\n", + "Rank 000: Loading vertical_datum var (174/175)\n", + "Rank 000: Loaded vertical_datum var ((168,))\n", + "Rank 000: Loading weekday_weekend_code var (175/175)\n", + "Rank 000: Loaded weekday_weekend_code var ((168, 720))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating providentia_obs_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing ASTER_v3_altitude var (1/175)\n", + "Rank 000: Var ASTER_v3_altitude created (1/175)\n", + "Rank 000: Var ASTER_v3_altitude data (1/175)\n", + "Rank 000: Var ASTER_v3_altitude completed (1/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/175)\n", + "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/175)\n", + "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/175)\n", + "Rank 000: Writing ETOPO1_altitude var (18/175)\n", + "Rank 000: Var ETOPO1_altitude created (18/175)\n", + "Rank 000: Var ETOPO1_altitude data (18/175)\n", + "Rank 000: Var ETOPO1_altitude completed (18/175)\n", + "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/175)\n", + "Rank 000: Writing GHOST_version var (20/175)\n", + "Rank 000: Var GHOST_version created (20/175)\n", + "Rank 000: Var GHOST_version data (20/175)\n", + "Rank 000: Var GHOST_version completed (20/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/175)\n", + "Rank 000: Writing GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km created (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km data (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km completed (23/175)\n", + "Rank 000: Writing GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km created (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km data (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km completed (24/175)\n", + "Rank 000: Writing GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density created (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density data (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density completed (25/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/175)\n", + "Rank 000: Writing GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km created (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km data (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km completed (28/175)\n", + "Rank 000: Writing GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km created (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km data (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km completed (29/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/175)\n", + "Rank 000: Writing GHSL_population_density var (32/175)\n", + "Rank 000: Var GHSL_population_density created (32/175)\n", + "Rank 000: Var GHSL_population_density data (32/175)\n", + "Rank 000: Var GHSL_population_density completed (32/175)\n", + "Rank 000: Writing GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification created (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification data (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification completed (33/175)\n", + "Rank 000: Writing GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km created (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km data (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km completed (34/175)\n", + "Rank 000: Writing GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km created (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km data (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km completed (35/175)\n", + "Rank 000: Writing GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km created (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km data (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km completed (36/175)\n", + "Rank 000: Writing GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km created (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km data (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km completed (37/175)\n", + "Rank 000: Writing GPW_population_density var (38/175)\n", + "Rank 000: Var GPW_population_density created (38/175)\n", + "Rank 000: Var GPW_population_density data (38/175)\n", + "Rank 000: Var GPW_population_density completed (38/175)\n", + "Rank 000: Writing GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity created (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity data (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity completed (39/175)\n", + "Rank 000: Writing Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code created (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code data (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code completed (40/175)\n", + "Rank 000: Writing Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification created (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification data (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification completed (41/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/175)\n", + "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/175)\n", + "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/175)\n", + "Rank 000: Writing UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification created (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification data (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification completed (62/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/175)\n", + "Rank 000: Writing WMO_region var (65/175)\n", + "Rank 000: Var WMO_region created (65/175)\n", + "Rank 000: Var WMO_region data (65/175)\n", + "Rank 000: Var WMO_region completed (65/175)\n", + "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/175)\n", + "Rank 000: Writing WWF_TEOW_biome var (67/175)\n", + "Rank 000: Var WWF_TEOW_biome created (67/175)\n", + "Rank 000: Var WWF_TEOW_biome data (67/175)\n", + "Rank 000: Var WWF_TEOW_biome completed (67/175)\n", + "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/175)\n", + "Rank 000: Writing administrative_country_division_1 var (69/175)\n", + "Rank 000: Var administrative_country_division_1 created (69/175)\n", + "Rank 000: Var administrative_country_division_1 data (69/175)\n", + "Rank 000: Var administrative_country_division_1 completed (69/175)\n", + "Rank 000: Writing administrative_country_division_2 var (70/175)\n", + "Rank 000: Var administrative_country_division_2 created (70/175)\n", + "Rank 000: Var administrative_country_division_2 data (70/175)\n", + "Rank 000: Var administrative_country_division_2 completed (70/175)\n", + "Rank 000: Writing altitude var (71/175)\n", + "Rank 000: Var altitude created (71/175)\n", + "Rank 000: Var altitude data (71/175)\n", + "Rank 000: Var altitude completed (71/175)\n", + "Rank 000: Writing annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent created (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent data (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent completed (72/175)\n", + "Rank 000: Writing annual_native_representativity_percent var (73/175)\n", + "Rank 000: Var annual_native_representativity_percent created (73/175)\n", + "Rank 000: Var annual_native_representativity_percent data (73/175)\n", + "Rank 000: Var annual_native_representativity_percent completed (73/175)\n", + "Rank 000: Writing area_classification var (74/175)\n", + "Rank 000: Var area_classification created (74/175)\n", + "Rank 000: Var area_classification data (74/175)\n", + "Rank 000: Var area_classification completed (74/175)\n", + "Rank 000: Writing associated_networks var (75/175)\n", + "Rank 000: Var associated_networks created (75/175)\n", + "Rank 000: Var associated_networks data (75/175)\n", + "Rank 000: Var associated_networks completed (75/175)\n", + "Rank 000: Writing city var (76/175)\n", + "Rank 000: Var city created (76/175)\n", + "Rank 000: Var city data (76/175)\n", + "Rank 000: Var city completed (76/175)\n", + "Rank 000: Writing climatology var (77/175)\n", + "Rank 000: Var climatology created (77/175)\n", + "Rank 000: Var climatology data (77/175)\n", + "Rank 000: Var climatology completed (77/175)\n", + "Rank 000: Writing contact_email_address var (78/175)\n", + "Rank 000: Var contact_email_address created (78/175)\n", + "Rank 000: Var contact_email_address data (78/175)\n", + "Rank 000: Var contact_email_address completed (78/175)\n", + "Rank 000: Writing contact_institution var (79/175)\n", + "Rank 000: Var contact_institution created (79/175)\n", + "Rank 000: Var contact_institution data (79/175)\n", + "Rank 000: Var contact_institution completed (79/175)\n", + "Rank 000: Writing contact_name var (80/175)\n", + "Rank 000: Var contact_name created (80/175)\n", + "Rank 000: Var contact_name data (80/175)\n", + "Rank 000: Var contact_name completed (80/175)\n", + "Rank 000: Writing country var (81/175)\n", + "Rank 000: Var country created (81/175)\n", + "Rank 000: Var country data (81/175)\n", + "Rank 000: Var country completed (81/175)\n", + "Rank 000: Writing daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent data (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent completed (82/175)\n", + "Rank 000: Writing daily_native_representativity_percent var (83/175)\n", + "Rank 000: Var daily_native_representativity_percent created (83/175)\n", + "Rank 000: Var daily_native_representativity_percent data (83/175)\n", + "Rank 000: Var daily_native_representativity_percent completed (83/175)\n", + "Rank 000: Writing daily_passing_vehicles var (84/175)\n", + "Rank 000: Var daily_passing_vehicles created (84/175)\n", + "Rank 000: Var daily_passing_vehicles data (84/175)\n", + "Rank 000: Var daily_passing_vehicles completed (84/175)\n", + "Rank 000: Writing data_level var (85/175)\n", + "Rank 000: Var data_level created (85/175)\n", + "Rank 000: Var data_level data (85/175)\n", + "Rank 000: Var data_level completed (85/175)\n", + "Rank 000: Writing data_licence var (86/175)\n", + "Rank 000: Var data_licence created (86/175)\n", + "Rank 000: Var data_licence data (86/175)\n", + "Rank 000: Var data_licence completed (86/175)\n", + "Rank 000: Writing day_night_code var (87/175)\n", + "Rank 000: Var day_night_code created (87/175)\n", + "Rank 000: Var day_night_code data (87/175)\n", + "Rank 000: Var day_night_code completed (87/175)\n", + "Rank 000: Writing daytime_traffic_speed var (88/175)\n", + "Rank 000: Var daytime_traffic_speed created (88/175)\n", + "Rank 000: Var daytime_traffic_speed data (88/175)\n", + "Rank 000: Var daytime_traffic_speed completed (88/175)\n", + "Rank 000: Writing derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement created (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement data (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement completed (89/175)\n", + "Rank 000: Writing distance_to_building var (90/175)\n", + "Rank 000: Var distance_to_building created (90/175)\n", + "Rank 000: Var distance_to_building data (90/175)\n", + "Rank 000: Var distance_to_building completed (90/175)\n", + "Rank 000: Writing distance_to_junction var (91/175)\n", + "Rank 000: Var distance_to_junction created (91/175)\n", + "Rank 000: Var distance_to_junction data (91/175)\n", + "Rank 000: Var distance_to_junction completed (91/175)\n", + "Rank 000: Writing distance_to_kerb var (92/175)\n", + "Rank 000: Var distance_to_kerb created (92/175)\n", + "Rank 000: Var distance_to_kerb data (92/175)\n", + "Rank 000: Var distance_to_kerb completed (92/175)\n", + "Rank 000: Writing distance_to_source var (93/175)\n", + "Rank 000: Var distance_to_source created (93/175)\n", + "Rank 000: Var distance_to_source data (93/175)\n", + "Rank 000: Var distance_to_source completed (93/175)\n", + "Rank 000: Writing ellipsoid var (94/175)\n", + "Rank 000: Var ellipsoid created (94/175)\n", + "Rank 000: Var ellipsoid data (94/175)\n", + "Rank 000: Var ellipsoid completed (94/175)\n", + "Rank 000: Writing horizontal_datum var (95/175)\n", + "Rank 000: Var horizontal_datum created (95/175)\n", + "Rank 000: Var horizontal_datum data (95/175)\n", + "Rank 000: Var horizontal_datum completed (95/175)\n", + "Rank 000: Writing hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent created (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent data (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent completed (96/175)\n", + "Rank 000: Writing hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent created (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent data (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent completed (97/175)\n", + "Rank 000: Writing land_use var (98/175)\n", + "Rank 000: Var land_use created (98/175)\n", + "Rank 000: Var land_use data (98/175)\n", + "Rank 000: Var land_use completed (98/175)\n", + "Rank 000: Writing local_time var (99/175)\n", + "Rank 000: Var local_time created (99/175)\n", + "Rank 000: Var local_time data (99/175)\n", + "Rank 000: Var local_time completed (99/175)\n", + "Rank 000: Writing main_emission_source var (100/175)\n", + "Rank 000: Var main_emission_source created (100/175)\n", + "Rank 000: Var main_emission_source data (100/175)\n", + "Rank 000: Var main_emission_source completed (100/175)\n", + "Rank 000: Writing mean_solar_time var (101/175)\n", + "Rank 000: Var mean_solar_time created (101/175)\n", + "Rank 000: Var mean_solar_time data (101/175)\n", + "Rank 000: Var mean_solar_time completed (101/175)\n", + "Rank 000: Writing measurement_altitude var (102/175)\n", + "Rank 000: Var measurement_altitude created (102/175)\n", + "Rank 000: Var measurement_altitude data (102/175)\n", + "Rank 000: Var measurement_altitude completed (102/175)\n", + "Rank 000: Writing measurement_methodology var (103/175)\n", + "Rank 000: Var measurement_methodology created (103/175)\n", + "Rank 000: Var measurement_methodology data (103/175)\n", + "Rank 000: Var measurement_methodology completed (103/175)\n", + "Rank 000: Writing measurement_scale var (104/175)\n", + "Rank 000: Var measurement_scale created (104/175)\n", + "Rank 000: Var measurement_scale data (104/175)\n", + "Rank 000: Var measurement_scale completed (104/175)\n", + "Rank 000: Writing measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale created (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale data (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale completed (105/175)\n", + "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (106/175)\n", + "Rank 000: Writing measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy created (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy data (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy completed (107/175)\n", + "Rank 000: Writing measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate created (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate data (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate completed (108/175)\n", + "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (109/175)\n", + "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution created (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution data (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (110/175)\n", + "Rank 000: Writing measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision created (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision data (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision completed (111/175)\n", + "Rank 000: Writing measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift created (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift data (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift completed (112/175)\n", + "Rank 000: Writing measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty created (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty data (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty completed (113/175)\n", + "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (114/175)\n", + "Rank 000: Writing measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift created (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift data (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift completed (115/175)\n", + "Rank 000: Writing measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift created (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift data (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift completed (116/175)\n", + "Rank 000: Writing measuring_instrument_further_details var (117/175)\n", + "Rank 000: Var measuring_instrument_further_details created (117/175)\n", + "Rank 000: Var measuring_instrument_further_details data (117/175)\n", + "Rank 000: Var measuring_instrument_further_details completed (117/175)\n", + "Rank 000: Writing measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information created (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information data (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information completed (118/175)\n", + "Rank 000: Writing measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name created (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name data (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name completed (119/175)\n", + "Rank 000: Writing measuring_instrument_name var (120/175)\n", + "Rank 000: Var measuring_instrument_name created (120/175)\n", + "Rank 000: Var measuring_instrument_name data (120/175)\n", + "Rank 000: Var measuring_instrument_name completed (120/175)\n", + "Rank 000: Writing measuring_instrument_process_details var (121/175)\n", + "Rank 000: Var measuring_instrument_process_details created (121/175)\n", + "Rank 000: Var measuring_instrument_process_details data (121/175)\n", + "Rank 000: Var measuring_instrument_process_details completed (121/175)\n", + "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (122/175)\n", + "Rank 000: Writing measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy created (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy data (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy completed (123/175)\n", + "Rank 000: Writing measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate created (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate data (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate completed (124/175)\n", + "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (125/175)\n", + "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution created (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution data (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (126/175)\n", + "Rank 000: Writing measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision created (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision data (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision completed (127/175)\n", + "Rank 000: Writing measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift created (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift data (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift completed (128/175)\n", + "Rank 000: Writing measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty created (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty data (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty completed (129/175)\n", + "Rank 000: Writing measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units created (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units data (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units completed (130/175)\n", + "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (131/175)\n", + "Rank 000: Writing measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift created (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift data (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift completed (132/175)\n", + "Rank 000: Writing measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift created (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift data (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift completed (133/175)\n", + "Rank 000: Writing measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type created (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type data (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type completed (134/175)\n", + "Rank 000: Writing monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent created (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent data (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent completed (135/175)\n", + "Rank 000: Writing monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent created (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent data (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent completed (136/175)\n", + "Rank 000: Writing network var (137/175)\n", + "Rank 000: Var network created (137/175)\n", + "Rank 000: Var network data (137/175)\n", + "Rank 000: Var network completed (137/175)\n", + "Rank 000: Writing network_maintenance_details var (138/175)\n", + "Rank 000: Var network_maintenance_details created (138/175)\n", + "Rank 000: Var network_maintenance_details data (138/175)\n", + "Rank 000: Var network_maintenance_details completed (138/175)\n", + "Rank 000: Writing network_miscellaneous_details var (139/175)\n", + "Rank 000: Var network_miscellaneous_details created (139/175)\n", + "Rank 000: Var network_miscellaneous_details data (139/175)\n", + "Rank 000: Var network_miscellaneous_details completed (139/175)\n", + "Rank 000: Writing network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure created (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure data (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure completed (140/175)\n", + "Rank 000: Writing network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature created (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature data (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature completed (141/175)\n", + "Rank 000: Writing network_qa_details var (142/175)\n", + "Rank 000: Var network_qa_details created (142/175)\n", + "Rank 000: Var network_qa_details data (142/175)\n", + "Rank 000: Var network_qa_details completed (142/175)\n", + "Rank 000: Writing network_sampling_details var (143/175)\n", + "Rank 000: Var network_sampling_details created (143/175)\n", + "Rank 000: Var network_sampling_details data (143/175)\n", + "Rank 000: Var network_sampling_details completed (143/175)\n", + "Rank 000: Writing network_uncertainty_details var (144/175)\n", + "Rank 000: Var network_uncertainty_details created (144/175)\n", + "Rank 000: Var network_uncertainty_details data (144/175)\n", + "Rank 000: Var network_uncertainty_details completed (144/175)\n", + "Rank 000: Writing population var (145/175)\n", + "Rank 000: Var population created (145/175)\n", + "Rank 000: Var population data (145/175)\n", + "Rank 000: Var population completed (145/175)\n", + "Rank 000: Writing primary_sampling_further_details var (146/175)\n", + "Rank 000: Var primary_sampling_further_details created (146/175)\n", + "Rank 000: Var primary_sampling_further_details data (146/175)\n", + "Rank 000: Var primary_sampling_further_details completed (146/175)\n", + "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (147/175)\n", + "Rank 000: Writing primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name created (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name data (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name completed (148/175)\n", + "Rank 000: Writing primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name created (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name data (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name completed (149/175)\n", + "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (150/175)\n", + "Rank 000: Writing primary_sampling_process_details var (151/175)\n", + "Rank 000: Var primary_sampling_process_details created (151/175)\n", + "Rank 000: Var primary_sampling_process_details data (151/175)\n", + "Rank 000: Var primary_sampling_process_details completed (151/175)\n", + "Rank 000: Writing primary_sampling_type var (152/175)\n", + "Rank 000: Var primary_sampling_type created (152/175)\n", + "Rank 000: Var primary_sampling_type data (152/175)\n", + "Rank 000: Var primary_sampling_type completed (152/175)\n", + "Rank 000: Writing principal_investigator_email_address var (153/175)\n", + "Rank 000: Var principal_investigator_email_address created (153/175)\n", + "Rank 000: Var principal_investigator_email_address data (153/175)\n", + "Rank 000: Var principal_investigator_email_address completed (153/175)\n", + "Rank 000: Writing principal_investigator_institution var (154/175)\n", + "Rank 000: Var principal_investigator_institution created (154/175)\n", + "Rank 000: Var principal_investigator_institution data (154/175)\n", + "Rank 000: Var principal_investigator_institution completed (154/175)\n", + "Rank 000: Writing principal_investigator_name var (155/175)\n", + "Rank 000: Var principal_investigator_name created (155/175)\n", + "Rank 000: Var principal_investigator_name data (155/175)\n", + "Rank 000: Var principal_investigator_name completed (155/175)\n", + "Rank 000: Writing process_warnings var (156/175)\n", + "Rank 000: Var process_warnings created (156/175)\n", + "Rank 000: Var process_warnings data (156/175)\n", + "Rank 000: Var process_warnings completed (156/175)\n", + "Rank 000: Writing projection var (157/175)\n", + "Rank 000: Var projection created (157/175)\n", + "Rank 000: Var projection data (157/175)\n", + "Rank 000: Var projection completed (157/175)\n", + "Rank 000: Writing reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement created (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement data (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement completed (158/175)\n", + "Rank 000: Writing representative_radius var (159/175)\n", + "Rank 000: Var representative_radius created (159/175)\n", + "Rank 000: Var representative_radius data (159/175)\n", + "Rank 000: Var representative_radius completed (159/175)\n", + "Rank 000: Writing sample_preparation_further_details var (160/175)\n", + "Rank 000: Var sample_preparation_further_details created (160/175)\n", + "Rank 000: Var sample_preparation_further_details data (160/175)\n", + "Rank 000: Var sample_preparation_further_details completed (160/175)\n", + "Rank 000: Writing sample_preparation_process_details var (161/175)\n", + "Rank 000: Var sample_preparation_process_details created (161/175)\n", + "Rank 000: Var sample_preparation_process_details data (161/175)\n", + "Rank 000: Var sample_preparation_process_details completed (161/175)\n", + "Rank 000: Writing sample_preparation_techniques var (162/175)\n", + "Rank 000: Var sample_preparation_techniques created (162/175)\n", + "Rank 000: Var sample_preparation_techniques data (162/175)\n", + "Rank 000: Var sample_preparation_techniques completed (162/175)\n", + "Rank 000: Writing sample_preparation_types var (163/175)\n", + "Rank 000: Var sample_preparation_types created (163/175)\n", + "Rank 000: Var sample_preparation_types data (163/175)\n", + "Rank 000: Var sample_preparation_types completed (163/175)\n", + "Rank 000: Writing sampling_height var (164/175)\n", + "Rank 000: Var sampling_height created (164/175)\n", + "Rank 000: Var sampling_height data (164/175)\n", + "Rank 000: Var sampling_height completed (164/175)\n", + "Rank 000: Writing sconco3 var (165/175)\n", + "Rank 000: Var sconco3 created (165/175)\n", + "Rank 000: Var sconco3 data (165/175)\n", + "Rank 000: Var sconco3 completed (165/175)\n", + "Rank 000: Writing season_code var (166/175)\n", + "Rank 000: Var season_code created (166/175)\n", + "Rank 000: Var season_code data (166/175)\n", + "Rank 000: Var season_code completed (166/175)\n", + "Rank 000: Writing station_classification var (167/175)\n", + "Rank 000: Var station_classification created (167/175)\n", + "Rank 000: Var station_classification data (167/175)\n", + "Rank 000: Var station_classification completed (167/175)\n", + "Rank 000: Writing station_name var (168/175)\n", + "Rank 000: Var station_name created (168/175)\n", + "Rank 000: Var station_name data (168/175)\n", + "Rank 000: Var station_name completed (168/175)\n", + "Rank 000: Writing station_reference var (169/175)\n", + "Rank 000: Var station_reference created (169/175)\n", + "Rank 000: Var station_reference data (169/175)\n", + "Rank 000: Var station_reference completed (169/175)\n", + "Rank 000: Writing station_timezone var (170/175)\n", + "Rank 000: Var station_timezone created (170/175)\n", + "Rank 000: Var station_timezone data (170/175)\n", + "Rank 000: Var station_timezone completed (170/175)\n", + "Rank 000: Writing street_type var (171/175)\n", + "Rank 000: Var street_type created (171/175)\n", + "Rank 000: Var street_type data (171/175)\n", + "Rank 000: Var street_type completed (171/175)\n", + "Rank 000: Writing street_width var (172/175)\n", + "Rank 000: Var street_width created (172/175)\n", + "Rank 000: Var street_width data (172/175)\n", + "Rank 000: Var street_width completed (172/175)\n", + "Rank 000: Writing terrain var (173/175)\n", + "Rank 000: Var terrain created (173/175)\n", + "Rank 000: Var terrain data (173/175)\n", + "Rank 000: Var terrain completed (173/175)\n", + "Rank 000: Writing vertical_datum var (174/175)\n", + "Rank 000: Var vertical_datum created (174/175)\n", + "Rank 000: Var vertical_datum data (174/175)\n", + "Rank 000: Var vertical_datum completed (174/175)\n", + "Rank 000: Writing weekday_weekend_code var (175/175)\n", + "Rank 000: Var weekday_weekend_code created (175/175)\n", + "Rank 000: Var weekday_weekend_code data (175/175)\n", + "Rank 000: Var weekday_weekend_code completed (175/175)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('providentia_obs_file.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Experiments dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Read" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
+       "Coordinates:\n",
+       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
+       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
+       "Data variables:\n",
+       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
+       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
+       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
+       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
+       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
+       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
+       "    sconco3                 (station, time) float32 ...\n",
+       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
+       "Attributes:\n",
+       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Experiment cams61_chimere_ph2\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    conventions:    CF-1.7\n",
+       "    data_version:   1.0\n",
+       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
+       "    NCO:            4.7.2
" + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf(path=exp_path, info=True, parallel_method='X')\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2018, 4, 1, 0, 0),\n", + " datetime.datetime(2018, 4, 1, 1, 0),\n", + " datetime.datetime(2018, 4, 1, 2, 0),\n", + " datetime.datetime(2018, 4, 1, 3, 0),\n", + " datetime.datetime(2018, 4, 1, 4, 0),\n", + " datetime.datetime(2018, 4, 1, 5, 0),\n", + " datetime.datetime(2018, 4, 1, 6, 0),\n", + " datetime.datetime(2018, 4, 1, 7, 0),\n", + " datetime.datetime(2018, 4, 1, 8, 0),\n", + " datetime.datetime(2018, 4, 1, 9, 0),\n", + " datetime.datetime(2018, 4, 1, 10, 0),\n", + " datetime.datetime(2018, 4, 1, 11, 0),\n", + " datetime.datetime(2018, 4, 1, 12, 0),\n", + " datetime.datetime(2018, 4, 1, 13, 0),\n", + " datetime.datetime(2018, 4, 1, 14, 0),\n", + " datetime.datetime(2018, 4, 1, 15, 0),\n", + " datetime.datetime(2018, 4, 1, 16, 0),\n", + " datetime.datetime(2018, 4, 1, 17, 0),\n", + " datetime.datetime(2018, 4, 1, 18, 0),\n", + " datetime.datetime(2018, 4, 1, 19, 0),\n", + " datetime.datetime(2018, 4, 1, 20, 0),\n", + " datetime.datetime(2018, 4, 1, 21, 0),\n", + " datetime.datetime(2018, 4, 1, 22, 0),\n", + " datetime.datetime(2018, 4, 1, 23, 0),\n", + " datetime.datetime(2018, 4, 2, 0, 0),\n", + " datetime.datetime(2018, 4, 2, 1, 0),\n", + " datetime.datetime(2018, 4, 2, 2, 0),\n", + " datetime.datetime(2018, 4, 2, 3, 0),\n", + " datetime.datetime(2018, 4, 2, 4, 0),\n", + " datetime.datetime(2018, 4, 2, 5, 0),\n", + " datetime.datetime(2018, 4, 2, 6, 0),\n", + " datetime.datetime(2018, 4, 2, 7, 0),\n", + " datetime.datetime(2018, 4, 2, 8, 0),\n", + " datetime.datetime(2018, 4, 2, 9, 0),\n", + " datetime.datetime(2018, 4, 2, 10, 0),\n", + " datetime.datetime(2018, 4, 2, 11, 0),\n", + " datetime.datetime(2018, 4, 2, 12, 0),\n", + " datetime.datetime(2018, 4, 2, 13, 0),\n", + " datetime.datetime(2018, 4, 2, 14, 0),\n", + " datetime.datetime(2018, 4, 2, 15, 0),\n", + " datetime.datetime(2018, 4, 2, 16, 0),\n", + " datetime.datetime(2018, 4, 2, 17, 0),\n", + " datetime.datetime(2018, 4, 2, 18, 0),\n", + " datetime.datetime(2018, 4, 2, 19, 0),\n", + " datetime.datetime(2018, 4, 2, 20, 0),\n", + " datetime.datetime(2018, 4, 2, 21, 0),\n", + " datetime.datetime(2018, 4, 2, 22, 0),\n", + " datetime.datetime(2018, 4, 2, 23, 0),\n", + " datetime.datetime(2018, 4, 3, 0, 0),\n", + " datetime.datetime(2018, 4, 3, 1, 0),\n", + " datetime.datetime(2018, 4, 3, 2, 0),\n", + " datetime.datetime(2018, 4, 3, 3, 0),\n", + " datetime.datetime(2018, 4, 3, 4, 0),\n", + " datetime.datetime(2018, 4, 3, 5, 0),\n", + " datetime.datetime(2018, 4, 3, 6, 0),\n", + " datetime.datetime(2018, 4, 3, 7, 0),\n", + " datetime.datetime(2018, 4, 3, 8, 0),\n", + " datetime.datetime(2018, 4, 3, 9, 0),\n", + " datetime.datetime(2018, 4, 3, 10, 0),\n", + " datetime.datetime(2018, 4, 3, 11, 0),\n", + " datetime.datetime(2018, 4, 3, 12, 0),\n", + " datetime.datetime(2018, 4, 3, 13, 0),\n", + " datetime.datetime(2018, 4, 3, 14, 0),\n", + " datetime.datetime(2018, 4, 3, 15, 0),\n", + " datetime.datetime(2018, 4, 3, 16, 0),\n", + " datetime.datetime(2018, 4, 3, 17, 0),\n", + " datetime.datetime(2018, 4, 3, 18, 0),\n", + " datetime.datetime(2018, 4, 3, 19, 0),\n", + " datetime.datetime(2018, 4, 3, 20, 0),\n", + " datetime.datetime(2018, 4, 3, 21, 0),\n", + " datetime.datetime(2018, 4, 3, 22, 0),\n", + " datetime.datetime(2018, 4, 3, 23, 0),\n", + " datetime.datetime(2018, 4, 4, 0, 0),\n", + " datetime.datetime(2018, 4, 4, 1, 0),\n", + " datetime.datetime(2018, 4, 4, 2, 0),\n", + " datetime.datetime(2018, 4, 4, 3, 0),\n", + " datetime.datetime(2018, 4, 4, 4, 0),\n", + " datetime.datetime(2018, 4, 4, 5, 0),\n", + " datetime.datetime(2018, 4, 4, 6, 0),\n", + " datetime.datetime(2018, 4, 4, 7, 0),\n", + " datetime.datetime(2018, 4, 4, 8, 0),\n", + " datetime.datetime(2018, 4, 4, 9, 0),\n", + " datetime.datetime(2018, 4, 4, 10, 0),\n", + " datetime.datetime(2018, 4, 4, 11, 0),\n", + " datetime.datetime(2018, 4, 4, 12, 0),\n", + " datetime.datetime(2018, 4, 4, 13, 0),\n", + " datetime.datetime(2018, 4, 4, 14, 0),\n", + " datetime.datetime(2018, 4, 4, 15, 0),\n", + " datetime.datetime(2018, 4, 4, 16, 0),\n", + " datetime.datetime(2018, 4, 4, 17, 0),\n", + " datetime.datetime(2018, 4, 4, 18, 0),\n", + " datetime.datetime(2018, 4, 4, 19, 0),\n", + " datetime.datetime(2018, 4, 4, 20, 0),\n", + " datetime.datetime(2018, 4, 4, 21, 0),\n", + " datetime.datetime(2018, 4, 4, 22, 0),\n", + " datetime.datetime(2018, 4, 4, 23, 0),\n", + " datetime.datetime(2018, 4, 5, 0, 0),\n", + " datetime.datetime(2018, 4, 5, 1, 0),\n", + " datetime.datetime(2018, 4, 5, 2, 0),\n", + " datetime.datetime(2018, 4, 5, 3, 0),\n", + " datetime.datetime(2018, 4, 5, 4, 0),\n", + " datetime.datetime(2018, 4, 5, 5, 0),\n", + " datetime.datetime(2018, 4, 5, 6, 0),\n", + " datetime.datetime(2018, 4, 5, 7, 0),\n", + " datetime.datetime(2018, 4, 5, 8, 0),\n", + " datetime.datetime(2018, 4, 5, 9, 0),\n", + " datetime.datetime(2018, 4, 5, 10, 0),\n", + " datetime.datetime(2018, 4, 5, 11, 0),\n", + " datetime.datetime(2018, 4, 5, 12, 0),\n", + " datetime.datetime(2018, 4, 5, 13, 0),\n", + " datetime.datetime(2018, 4, 5, 14, 0),\n", + " datetime.datetime(2018, 4, 5, 15, 0),\n", + " datetime.datetime(2018, 4, 5, 16, 0),\n", + " datetime.datetime(2018, 4, 5, 17, 0),\n", + " datetime.datetime(2018, 4, 5, 18, 0),\n", + " datetime.datetime(2018, 4, 5, 19, 0),\n", + " datetime.datetime(2018, 4, 5, 20, 0),\n", + " datetime.datetime(2018, 4, 5, 21, 0),\n", + " datetime.datetime(2018, 4, 5, 22, 0),\n", + " datetime.datetime(2018, 4, 5, 23, 0),\n", + " datetime.datetime(2018, 4, 6, 0, 0),\n", + " datetime.datetime(2018, 4, 6, 1, 0),\n", + " datetime.datetime(2018, 4, 6, 2, 0),\n", + " datetime.datetime(2018, 4, 6, 3, 0),\n", + " datetime.datetime(2018, 4, 6, 4, 0),\n", + " datetime.datetime(2018, 4, 6, 5, 0),\n", + " datetime.datetime(2018, 4, 6, 6, 0),\n", + " datetime.datetime(2018, 4, 6, 7, 0),\n", + " datetime.datetime(2018, 4, 6, 8, 0),\n", + " datetime.datetime(2018, 4, 6, 9, 0),\n", + " datetime.datetime(2018, 4, 6, 10, 0),\n", + " datetime.datetime(2018, 4, 6, 11, 0),\n", + " datetime.datetime(2018, 4, 6, 12, 0),\n", + " datetime.datetime(2018, 4, 6, 13, 0),\n", + " datetime.datetime(2018, 4, 6, 14, 0),\n", + " datetime.datetime(2018, 4, 6, 15, 0),\n", + " datetime.datetime(2018, 4, 6, 16, 0),\n", + " datetime.datetime(2018, 4, 6, 17, 0),\n", + " datetime.datetime(2018, 4, 6, 18, 0),\n", + " datetime.datetime(2018, 4, 6, 19, 0),\n", + " datetime.datetime(2018, 4, 6, 20, 0),\n", + " datetime.datetime(2018, 4, 6, 21, 0),\n", + " datetime.datetime(2018, 4, 6, 22, 0),\n", + " datetime.datetime(2018, 4, 6, 23, 0),\n", + " datetime.datetime(2018, 4, 7, 0, 0),\n", + " datetime.datetime(2018, 4, 7, 1, 0),\n", + " datetime.datetime(2018, 4, 7, 2, 0),\n", + " datetime.datetime(2018, 4, 7, 3, 0),\n", + " datetime.datetime(2018, 4, 7, 4, 0),\n", + " datetime.datetime(2018, 4, 7, 5, 0),\n", + " datetime.datetime(2018, 4, 7, 6, 0),\n", + " datetime.datetime(2018, 4, 7, 7, 0),\n", + " datetime.datetime(2018, 4, 7, 8, 0),\n", + " datetime.datetime(2018, 4, 7, 9, 0),\n", + " datetime.datetime(2018, 4, 7, 10, 0),\n", + " datetime.datetime(2018, 4, 7, 11, 0),\n", + " datetime.datetime(2018, 4, 7, 12, 0),\n", + " datetime.datetime(2018, 4, 7, 13, 0),\n", + " datetime.datetime(2018, 4, 7, 14, 0),\n", + " datetime.datetime(2018, 4, 7, 15, 0),\n", + " datetime.datetime(2018, 4, 7, 16, 0),\n", + " datetime.datetime(2018, 4, 7, 17, 0),\n", + " datetime.datetime(2018, 4, 7, 18, 0),\n", + " datetime.datetime(2018, 4, 7, 19, 0),\n", + " datetime.datetime(2018, 4, 7, 20, 0),\n", + " datetime.datetime(2018, 4, 7, 21, 0),\n", + " datetime.datetime(2018, 4, 7, 22, 0),\n", + " datetime.datetime(2018, 4, 7, 23, 0),\n", + " datetime.datetime(2018, 4, 8, 0, 0),\n", + " datetime.datetime(2018, 4, 8, 1, 0),\n", + " datetime.datetime(2018, 4, 8, 2, 0),\n", + " datetime.datetime(2018, 4, 8, 3, 0),\n", + " datetime.datetime(2018, 4, 8, 4, 0),\n", + " datetime.datetime(2018, 4, 8, 5, 0),\n", + " datetime.datetime(2018, 4, 8, 6, 0),\n", + " datetime.datetime(2018, 4, 8, 7, 0),\n", + " datetime.datetime(2018, 4, 8, 8, 0),\n", + " datetime.datetime(2018, 4, 8, 9, 0),\n", + " datetime.datetime(2018, 4, 8, 10, 0),\n", + " datetime.datetime(2018, 4, 8, 11, 0),\n", + " datetime.datetime(2018, 4, 8, 12, 0),\n", + " datetime.datetime(2018, 4, 8, 13, 0),\n", + " datetime.datetime(2018, 4, 8, 14, 0),\n", + " datetime.datetime(2018, 4, 8, 15, 0),\n", + " datetime.datetime(2018, 4, 8, 16, 0),\n", + " datetime.datetime(2018, 4, 8, 17, 0),\n", + " datetime.datetime(2018, 4, 8, 18, 0),\n", + " datetime.datetime(2018, 4, 8, 19, 0),\n", + " datetime.datetime(2018, 4, 8, 20, 0),\n", + " datetime.datetime(2018, 4, 8, 21, 0),\n", + " datetime.datetime(2018, 4, 8, 22, 0),\n", + " datetime.datetime(2018, 4, 8, 23, 0),\n", + " datetime.datetime(2018, 4, 9, 0, 0),\n", + " datetime.datetime(2018, 4, 9, 1, 0),\n", + " datetime.datetime(2018, 4, 9, 2, 0),\n", + " datetime.datetime(2018, 4, 9, 3, 0),\n", + " datetime.datetime(2018, 4, 9, 4, 0),\n", + " datetime.datetime(2018, 4, 9, 5, 0),\n", + " datetime.datetime(2018, 4, 9, 6, 0),\n", + " datetime.datetime(2018, 4, 9, 7, 0),\n", + " datetime.datetime(2018, 4, 9, 8, 0),\n", + " datetime.datetime(2018, 4, 9, 9, 0),\n", + " datetime.datetime(2018, 4, 9, 10, 0),\n", + " datetime.datetime(2018, 4, 9, 11, 0),\n", + " datetime.datetime(2018, 4, 9, 12, 0),\n", + " datetime.datetime(2018, 4, 9, 13, 0),\n", + " datetime.datetime(2018, 4, 9, 14, 0),\n", + " datetime.datetime(2018, 4, 9, 15, 0),\n", + " datetime.datetime(2018, 4, 9, 16, 0),\n", + " datetime.datetime(2018, 4, 9, 17, 0),\n", + " datetime.datetime(2018, 4, 9, 18, 0),\n", + " datetime.datetime(2018, 4, 9, 19, 0),\n", + " datetime.datetime(2018, 4, 9, 20, 0),\n", + " datetime.datetime(2018, 4, 9, 21, 0),\n", + " datetime.datetime(2018, 4, 9, 22, 0),\n", + " datetime.datetime(2018, 4, 9, 23, 0),\n", + " datetime.datetime(2018, 4, 10, 0, 0),\n", + " datetime.datetime(2018, 4, 10, 1, 0),\n", + " datetime.datetime(2018, 4, 10, 2, 0),\n", + " datetime.datetime(2018, 4, 10, 3, 0),\n", + " datetime.datetime(2018, 4, 10, 4, 0),\n", + " datetime.datetime(2018, 4, 10, 5, 0),\n", + " datetime.datetime(2018, 4, 10, 6, 0),\n", + " datetime.datetime(2018, 4, 10, 7, 0),\n", + " datetime.datetime(2018, 4, 10, 8, 0),\n", + " datetime.datetime(2018, 4, 10, 9, 0),\n", + " datetime.datetime(2018, 4, 10, 10, 0),\n", + " datetime.datetime(2018, 4, 10, 11, 0),\n", + " datetime.datetime(2018, 4, 10, 12, 0),\n", + " datetime.datetime(2018, 4, 10, 13, 0),\n", + " datetime.datetime(2018, 4, 10, 14, 0),\n", + " datetime.datetime(2018, 4, 10, 15, 0),\n", + " datetime.datetime(2018, 4, 10, 16, 0),\n", + " datetime.datetime(2018, 4, 10, 17, 0),\n", + " datetime.datetime(2018, 4, 10, 18, 0),\n", + " datetime.datetime(2018, 4, 10, 19, 0),\n", + " datetime.datetime(2018, 4, 10, 20, 0),\n", + " datetime.datetime(2018, 4, 10, 21, 0),\n", + " datetime.datetime(2018, 4, 10, 22, 0),\n", + " datetime.datetime(2018, 4, 10, 23, 0),\n", + " datetime.datetime(2018, 4, 11, 0, 0),\n", + " datetime.datetime(2018, 4, 11, 1, 0),\n", + " datetime.datetime(2018, 4, 11, 2, 0),\n", + " datetime.datetime(2018, 4, 11, 3, 0),\n", + " datetime.datetime(2018, 4, 11, 4, 0),\n", + " datetime.datetime(2018, 4, 11, 5, 0),\n", + " datetime.datetime(2018, 4, 11, 6, 0),\n", + " datetime.datetime(2018, 4, 11, 7, 0),\n", + " datetime.datetime(2018, 4, 11, 8, 0),\n", + " datetime.datetime(2018, 4, 11, 9, 0),\n", + " datetime.datetime(2018, 4, 11, 10, 0),\n", + " datetime.datetime(2018, 4, 11, 11, 0),\n", + " datetime.datetime(2018, 4, 11, 12, 0),\n", + " datetime.datetime(2018, 4, 11, 13, 0),\n", + " datetime.datetime(2018, 4, 11, 14, 0),\n", + " datetime.datetime(2018, 4, 11, 15, 0),\n", + " datetime.datetime(2018, 4, 11, 16, 0),\n", + " datetime.datetime(2018, 4, 11, 17, 0),\n", + " datetime.datetime(2018, 4, 11, 18, 0),\n", + " datetime.datetime(2018, 4, 11, 19, 0),\n", + " datetime.datetime(2018, 4, 11, 20, 0),\n", + " datetime.datetime(2018, 4, 11, 21, 0),\n", + " datetime.datetime(2018, 4, 11, 22, 0),\n", + " datetime.datetime(2018, 4, 11, 23, 0),\n", + " datetime.datetime(2018, 4, 12, 0, 0),\n", + " datetime.datetime(2018, 4, 12, 1, 0),\n", + " datetime.datetime(2018, 4, 12, 2, 0),\n", + " datetime.datetime(2018, 4, 12, 3, 0),\n", + " datetime.datetime(2018, 4, 12, 4, 0),\n", + " datetime.datetime(2018, 4, 12, 5, 0),\n", + " datetime.datetime(2018, 4, 12, 6, 0),\n", + " datetime.datetime(2018, 4, 12, 7, 0),\n", + " datetime.datetime(2018, 4, 12, 8, 0),\n", + " datetime.datetime(2018, 4, 12, 9, 0),\n", + " datetime.datetime(2018, 4, 12, 10, 0),\n", + " datetime.datetime(2018, 4, 12, 11, 0),\n", + " datetime.datetime(2018, 4, 12, 12, 0),\n", + " datetime.datetime(2018, 4, 12, 13, 0),\n", + " datetime.datetime(2018, 4, 12, 14, 0),\n", + " datetime.datetime(2018, 4, 12, 15, 0),\n", + " datetime.datetime(2018, 4, 12, 16, 0),\n", + " datetime.datetime(2018, 4, 12, 17, 0),\n", + " datetime.datetime(2018, 4, 12, 18, 0),\n", + " datetime.datetime(2018, 4, 12, 19, 0),\n", + " datetime.datetime(2018, 4, 12, 20, 0),\n", + " datetime.datetime(2018, 4, 12, 21, 0),\n", + " datetime.datetime(2018, 4, 12, 22, 0),\n", + " datetime.datetime(2018, 4, 12, 23, 0),\n", + " datetime.datetime(2018, 4, 13, 0, 0),\n", + " datetime.datetime(2018, 4, 13, 1, 0),\n", + " datetime.datetime(2018, 4, 13, 2, 0),\n", + " datetime.datetime(2018, 4, 13, 3, 0),\n", + " datetime.datetime(2018, 4, 13, 4, 0),\n", + " datetime.datetime(2018, 4, 13, 5, 0),\n", + " datetime.datetime(2018, 4, 13, 6, 0),\n", + " datetime.datetime(2018, 4, 13, 7, 0),\n", + " datetime.datetime(2018, 4, 13, 8, 0),\n", + " datetime.datetime(2018, 4, 13, 9, 0),\n", + " datetime.datetime(2018, 4, 13, 10, 0),\n", + " datetime.datetime(2018, 4, 13, 11, 0),\n", + " datetime.datetime(2018, 4, 13, 12, 0),\n", + " datetime.datetime(2018, 4, 13, 13, 0),\n", + " datetime.datetime(2018, 4, 13, 14, 0),\n", + " datetime.datetime(2018, 4, 13, 15, 0),\n", + " datetime.datetime(2018, 4, 13, 16, 0),\n", + " datetime.datetime(2018, 4, 13, 17, 0),\n", + " datetime.datetime(2018, 4, 13, 18, 0),\n", + " datetime.datetime(2018, 4, 13, 19, 0),\n", + " datetime.datetime(2018, 4, 13, 20, 0),\n", + " datetime.datetime(2018, 4, 13, 21, 0),\n", + " datetime.datetime(2018, 4, 13, 22, 0),\n", + " datetime.datetime(2018, 4, 13, 23, 0),\n", + " datetime.datetime(2018, 4, 14, 0, 0),\n", + " datetime.datetime(2018, 4, 14, 1, 0),\n", + " datetime.datetime(2018, 4, 14, 2, 0),\n", + " datetime.datetime(2018, 4, 14, 3, 0),\n", + " datetime.datetime(2018, 4, 14, 4, 0),\n", + " datetime.datetime(2018, 4, 14, 5, 0),\n", + " datetime.datetime(2018, 4, 14, 6, 0),\n", + " datetime.datetime(2018, 4, 14, 7, 0),\n", + " datetime.datetime(2018, 4, 14, 8, 0),\n", + " datetime.datetime(2018, 4, 14, 9, 0),\n", + " datetime.datetime(2018, 4, 14, 10, 0),\n", + " datetime.datetime(2018, 4, 14, 11, 0),\n", + " datetime.datetime(2018, 4, 14, 12, 0),\n", + " datetime.datetime(2018, 4, 14, 13, 0),\n", + " datetime.datetime(2018, 4, 14, 14, 0),\n", + " datetime.datetime(2018, 4, 14, 15, 0),\n", + " datetime.datetime(2018, 4, 14, 16, 0),\n", + " datetime.datetime(2018, 4, 14, 17, 0),\n", + " datetime.datetime(2018, 4, 14, 18, 0),\n", + " datetime.datetime(2018, 4, 14, 19, 0),\n", + " datetime.datetime(2018, 4, 14, 20, 0),\n", + " datetime.datetime(2018, 4, 14, 21, 0),\n", + " datetime.datetime(2018, 4, 14, 22, 0),\n", + " datetime.datetime(2018, 4, 14, 23, 0),\n", + " datetime.datetime(2018, 4, 15, 0, 0),\n", + " datetime.datetime(2018, 4, 15, 1, 0),\n", + " datetime.datetime(2018, 4, 15, 2, 0),\n", + " datetime.datetime(2018, 4, 15, 3, 0),\n", + " datetime.datetime(2018, 4, 15, 4, 0),\n", + " datetime.datetime(2018, 4, 15, 5, 0),\n", + " datetime.datetime(2018, 4, 15, 6, 0),\n", + " datetime.datetime(2018, 4, 15, 7, 0),\n", + " datetime.datetime(2018, 4, 15, 8, 0),\n", + " datetime.datetime(2018, 4, 15, 9, 0),\n", + " datetime.datetime(2018, 4, 15, 10, 0),\n", + " datetime.datetime(2018, 4, 15, 11, 0),\n", + " datetime.datetime(2018, 4, 15, 12, 0),\n", + " datetime.datetime(2018, 4, 15, 13, 0),\n", + " datetime.datetime(2018, 4, 15, 14, 0),\n", + " datetime.datetime(2018, 4, 15, 15, 0),\n", + " datetime.datetime(2018, 4, 15, 16, 0),\n", + " datetime.datetime(2018, 4, 15, 17, 0),\n", + " datetime.datetime(2018, 4, 15, 18, 0),\n", + " datetime.datetime(2018, 4, 15, 19, 0),\n", + " datetime.datetime(2018, 4, 15, 20, 0),\n", + " datetime.datetime(2018, 4, 15, 21, 0),\n", + " datetime.datetime(2018, 4, 15, 22, 0),\n", + " datetime.datetime(2018, 4, 15, 23, 0),\n", + " datetime.datetime(2018, 4, 16, 0, 0),\n", + " datetime.datetime(2018, 4, 16, 1, 0),\n", + " datetime.datetime(2018, 4, 16, 2, 0),\n", + " datetime.datetime(2018, 4, 16, 3, 0),\n", + " datetime.datetime(2018, 4, 16, 4, 0),\n", + " datetime.datetime(2018, 4, 16, 5, 0),\n", + " datetime.datetime(2018, 4, 16, 6, 0),\n", + " datetime.datetime(2018, 4, 16, 7, 0),\n", + " datetime.datetime(2018, 4, 16, 8, 0),\n", + " datetime.datetime(2018, 4, 16, 9, 0),\n", + " datetime.datetime(2018, 4, 16, 10, 0),\n", + " datetime.datetime(2018, 4, 16, 11, 0),\n", + " datetime.datetime(2018, 4, 16, 12, 0),\n", + " datetime.datetime(2018, 4, 16, 13, 0),\n", + " datetime.datetime(2018, 4, 16, 14, 0),\n", + " datetime.datetime(2018, 4, 16, 15, 0),\n", + " datetime.datetime(2018, 4, 16, 16, 0),\n", + " datetime.datetime(2018, 4, 16, 17, 0),\n", + " datetime.datetime(2018, 4, 16, 18, 0),\n", + " datetime.datetime(2018, 4, 16, 19, 0),\n", + " datetime.datetime(2018, 4, 16, 20, 0),\n", + " datetime.datetime(2018, 4, 16, 21, 0),\n", + " datetime.datetime(2018, 4, 16, 22, 0),\n", + " datetime.datetime(2018, 4, 16, 23, 0),\n", + " datetime.datetime(2018, 4, 17, 0, 0),\n", + " datetime.datetime(2018, 4, 17, 1, 0),\n", + " datetime.datetime(2018, 4, 17, 2, 0),\n", + " datetime.datetime(2018, 4, 17, 3, 0),\n", + " datetime.datetime(2018, 4, 17, 4, 0),\n", + " datetime.datetime(2018, 4, 17, 5, 0),\n", + " datetime.datetime(2018, 4, 17, 6, 0),\n", + " datetime.datetime(2018, 4, 17, 7, 0),\n", + " datetime.datetime(2018, 4, 17, 8, 0),\n", + " datetime.datetime(2018, 4, 17, 9, 0),\n", + " datetime.datetime(2018, 4, 17, 10, 0),\n", + " datetime.datetime(2018, 4, 17, 11, 0),\n", + " datetime.datetime(2018, 4, 17, 12, 0),\n", + " datetime.datetime(2018, 4, 17, 13, 0),\n", + " datetime.datetime(2018, 4, 17, 14, 0),\n", + " datetime.datetime(2018, 4, 17, 15, 0),\n", + " datetime.datetime(2018, 4, 17, 16, 0),\n", + " datetime.datetime(2018, 4, 17, 17, 0),\n", + " datetime.datetime(2018, 4, 17, 18, 0),\n", + " datetime.datetime(2018, 4, 17, 19, 0),\n", + " datetime.datetime(2018, 4, 17, 20, 0),\n", + " datetime.datetime(2018, 4, 17, 21, 0),\n", + " datetime.datetime(2018, 4, 17, 22, 0),\n", + " datetime.datetime(2018, 4, 17, 23, 0),\n", + " datetime.datetime(2018, 4, 18, 0, 0),\n", + " datetime.datetime(2018, 4, 18, 1, 0),\n", + " datetime.datetime(2018, 4, 18, 2, 0),\n", + " datetime.datetime(2018, 4, 18, 3, 0),\n", + " datetime.datetime(2018, 4, 18, 4, 0),\n", + " datetime.datetime(2018, 4, 18, 5, 0),\n", + " datetime.datetime(2018, 4, 18, 6, 0),\n", + " datetime.datetime(2018, 4, 18, 7, 0),\n", + " datetime.datetime(2018, 4, 18, 8, 0),\n", + " datetime.datetime(2018, 4, 18, 9, 0),\n", + " datetime.datetime(2018, 4, 18, 10, 0),\n", + " datetime.datetime(2018, 4, 18, 11, 0),\n", + " datetime.datetime(2018, 4, 18, 12, 0),\n", + " datetime.datetime(2018, 4, 18, 13, 0),\n", + " datetime.datetime(2018, 4, 18, 14, 0),\n", + " datetime.datetime(2018, 4, 18, 15, 0),\n", + " datetime.datetime(2018, 4, 18, 16, 0),\n", + " datetime.datetime(2018, 4, 18, 17, 0),\n", + " datetime.datetime(2018, 4, 18, 18, 0),\n", + " datetime.datetime(2018, 4, 18, 19, 0),\n", + " datetime.datetime(2018, 4, 18, 20, 0),\n", + " datetime.datetime(2018, 4, 18, 21, 0),\n", + " datetime.datetime(2018, 4, 18, 22, 0),\n", + " datetime.datetime(2018, 4, 18, 23, 0),\n", + " datetime.datetime(2018, 4, 19, 0, 0),\n", + " datetime.datetime(2018, 4, 19, 1, 0),\n", + " datetime.datetime(2018, 4, 19, 2, 0),\n", + " datetime.datetime(2018, 4, 19, 3, 0),\n", + " datetime.datetime(2018, 4, 19, 4, 0),\n", + " datetime.datetime(2018, 4, 19, 5, 0),\n", + " datetime.datetime(2018, 4, 19, 6, 0),\n", + " datetime.datetime(2018, 4, 19, 7, 0),\n", + " datetime.datetime(2018, 4, 19, 8, 0),\n", + " datetime.datetime(2018, 4, 19, 9, 0),\n", + " datetime.datetime(2018, 4, 19, 10, 0),\n", + " datetime.datetime(2018, 4, 19, 11, 0),\n", + " datetime.datetime(2018, 4, 19, 12, 0),\n", + " datetime.datetime(2018, 4, 19, 13, 0),\n", + " datetime.datetime(2018, 4, 19, 14, 0),\n", + " datetime.datetime(2018, 4, 19, 15, 0),\n", + " datetime.datetime(2018, 4, 19, 16, 0),\n", + " datetime.datetime(2018, 4, 19, 17, 0),\n", + " datetime.datetime(2018, 4, 19, 18, 0),\n", + " datetime.datetime(2018, 4, 19, 19, 0),\n", + " datetime.datetime(2018, 4, 19, 20, 0),\n", + " datetime.datetime(2018, 4, 19, 21, 0),\n", + " datetime.datetime(2018, 4, 19, 22, 0),\n", + " datetime.datetime(2018, 4, 19, 23, 0),\n", + " datetime.datetime(2018, 4, 20, 0, 0),\n", + " datetime.datetime(2018, 4, 20, 1, 0),\n", + " datetime.datetime(2018, 4, 20, 2, 0),\n", + " datetime.datetime(2018, 4, 20, 3, 0),\n", + " datetime.datetime(2018, 4, 20, 4, 0),\n", + " datetime.datetime(2018, 4, 20, 5, 0),\n", + " datetime.datetime(2018, 4, 20, 6, 0),\n", + " datetime.datetime(2018, 4, 20, 7, 0),\n", + " datetime.datetime(2018, 4, 20, 8, 0),\n", + " datetime.datetime(2018, 4, 20, 9, 0),\n", + " datetime.datetime(2018, 4, 20, 10, 0),\n", + " datetime.datetime(2018, 4, 20, 11, 0),\n", + " datetime.datetime(2018, 4, 20, 12, 0),\n", + " datetime.datetime(2018, 4, 20, 13, 0),\n", + " datetime.datetime(2018, 4, 20, 14, 0),\n", + " datetime.datetime(2018, 4, 20, 15, 0),\n", + " datetime.datetime(2018, 4, 20, 16, 0),\n", + " datetime.datetime(2018, 4, 20, 17, 0),\n", + " datetime.datetime(2018, 4, 20, 18, 0),\n", + " datetime.datetime(2018, 4, 20, 19, 0),\n", + " datetime.datetime(2018, 4, 20, 20, 0),\n", + " datetime.datetime(2018, 4, 20, 21, 0),\n", + " datetime.datetime(2018, 4, 20, 22, 0),\n", + " datetime.datetime(2018, 4, 20, 23, 0),\n", + " datetime.datetime(2018, 4, 21, 0, 0),\n", + " datetime.datetime(2018, 4, 21, 1, 0),\n", + " datetime.datetime(2018, 4, 21, 2, 0),\n", + " datetime.datetime(2018, 4, 21, 3, 0),\n", + " datetime.datetime(2018, 4, 21, 4, 0),\n", + " datetime.datetime(2018, 4, 21, 5, 0),\n", + " datetime.datetime(2018, 4, 21, 6, 0),\n", + " datetime.datetime(2018, 4, 21, 7, 0),\n", + " datetime.datetime(2018, 4, 21, 8, 0),\n", + " datetime.datetime(2018, 4, 21, 9, 0),\n", + " datetime.datetime(2018, 4, 21, 10, 0),\n", + " datetime.datetime(2018, 4, 21, 11, 0),\n", + " datetime.datetime(2018, 4, 21, 12, 0),\n", + " datetime.datetime(2018, 4, 21, 13, 0),\n", + " datetime.datetime(2018, 4, 21, 14, 0),\n", + " datetime.datetime(2018, 4, 21, 15, 0),\n", + " datetime.datetime(2018, 4, 21, 16, 0),\n", + " datetime.datetime(2018, 4, 21, 17, 0),\n", + " datetime.datetime(2018, 4, 21, 18, 0),\n", + " datetime.datetime(2018, 4, 21, 19, 0),\n", + " datetime.datetime(2018, 4, 21, 20, 0),\n", + " datetime.datetime(2018, 4, 21, 21, 0),\n", + " datetime.datetime(2018, 4, 21, 22, 0),\n", + " datetime.datetime(2018, 4, 21, 23, 0),\n", + " datetime.datetime(2018, 4, 22, 0, 0),\n", + " datetime.datetime(2018, 4, 22, 1, 0),\n", + " datetime.datetime(2018, 4, 22, 2, 0),\n", + " datetime.datetime(2018, 4, 22, 3, 0),\n", + " datetime.datetime(2018, 4, 22, 4, 0),\n", + " datetime.datetime(2018, 4, 22, 5, 0),\n", + " datetime.datetime(2018, 4, 22, 6, 0),\n", + " datetime.datetime(2018, 4, 22, 7, 0),\n", + " datetime.datetime(2018, 4, 22, 8, 0),\n", + " datetime.datetime(2018, 4, 22, 9, 0),\n", + " datetime.datetime(2018, 4, 22, 10, 0),\n", + " datetime.datetime(2018, 4, 22, 11, 0),\n", + " datetime.datetime(2018, 4, 22, 12, 0),\n", + " datetime.datetime(2018, 4, 22, 13, 0),\n", + " datetime.datetime(2018, 4, 22, 14, 0),\n", + " datetime.datetime(2018, 4, 22, 15, 0),\n", + " datetime.datetime(2018, 4, 22, 16, 0),\n", + " datetime.datetime(2018, 4, 22, 17, 0),\n", + " datetime.datetime(2018, 4, 22, 18, 0),\n", + " datetime.datetime(2018, 4, 22, 19, 0),\n", + " datetime.datetime(2018, 4, 22, 20, 0),\n", + " datetime.datetime(2018, 4, 22, 21, 0),\n", + " datetime.datetime(2018, 4, 22, 22, 0),\n", + " datetime.datetime(2018, 4, 22, 23, 0),\n", + " datetime.datetime(2018, 4, 23, 0, 0),\n", + " datetime.datetime(2018, 4, 23, 1, 0),\n", + " datetime.datetime(2018, 4, 23, 2, 0),\n", + " datetime.datetime(2018, 4, 23, 3, 0),\n", + " datetime.datetime(2018, 4, 23, 4, 0),\n", + " datetime.datetime(2018, 4, 23, 5, 0),\n", + " datetime.datetime(2018, 4, 23, 6, 0),\n", + " datetime.datetime(2018, 4, 23, 7, 0),\n", + " datetime.datetime(2018, 4, 23, 8, 0),\n", + " datetime.datetime(2018, 4, 23, 9, 0),\n", + " datetime.datetime(2018, 4, 23, 10, 0),\n", + " datetime.datetime(2018, 4, 23, 11, 0),\n", + " datetime.datetime(2018, 4, 23, 12, 0),\n", + " datetime.datetime(2018, 4, 23, 13, 0),\n", + " datetime.datetime(2018, 4, 23, 14, 0),\n", + " datetime.datetime(2018, 4, 23, 15, 0),\n", + " datetime.datetime(2018, 4, 23, 16, 0),\n", + " datetime.datetime(2018, 4, 23, 17, 0),\n", + " datetime.datetime(2018, 4, 23, 18, 0),\n", + " datetime.datetime(2018, 4, 23, 19, 0),\n", + " datetime.datetime(2018, 4, 23, 20, 0),\n", + " datetime.datetime(2018, 4, 23, 21, 0),\n", + " datetime.datetime(2018, 4, 23, 22, 0),\n", + " datetime.datetime(2018, 4, 23, 23, 0),\n", + " datetime.datetime(2018, 4, 24, 0, 0),\n", + " datetime.datetime(2018, 4, 24, 1, 0),\n", + " datetime.datetime(2018, 4, 24, 2, 0),\n", + " datetime.datetime(2018, 4, 24, 3, 0),\n", + " datetime.datetime(2018, 4, 24, 4, 0),\n", + " datetime.datetime(2018, 4, 24, 5, 0),\n", + " datetime.datetime(2018, 4, 24, 6, 0),\n", + " datetime.datetime(2018, 4, 24, 7, 0),\n", + " datetime.datetime(2018, 4, 24, 8, 0),\n", + " datetime.datetime(2018, 4, 24, 9, 0),\n", + " datetime.datetime(2018, 4, 24, 10, 0),\n", + " datetime.datetime(2018, 4, 24, 11, 0),\n", + " datetime.datetime(2018, 4, 24, 12, 0),\n", + " datetime.datetime(2018, 4, 24, 13, 0),\n", + " datetime.datetime(2018, 4, 24, 14, 0),\n", + " datetime.datetime(2018, 4, 24, 15, 0),\n", + " datetime.datetime(2018, 4, 24, 16, 0),\n", + " datetime.datetime(2018, 4, 24, 17, 0),\n", + " datetime.datetime(2018, 4, 24, 18, 0),\n", + " datetime.datetime(2018, 4, 24, 19, 0),\n", + " datetime.datetime(2018, 4, 24, 20, 0),\n", + " datetime.datetime(2018, 4, 24, 21, 0),\n", + " datetime.datetime(2018, 4, 24, 22, 0),\n", + " datetime.datetime(2018, 4, 24, 23, 0),\n", + " datetime.datetime(2018, 4, 25, 0, 0),\n", + " datetime.datetime(2018, 4, 25, 1, 0),\n", + " datetime.datetime(2018, 4, 25, 2, 0),\n", + " datetime.datetime(2018, 4, 25, 3, 0),\n", + " datetime.datetime(2018, 4, 25, 4, 0),\n", + " datetime.datetime(2018, 4, 25, 5, 0),\n", + " datetime.datetime(2018, 4, 25, 6, 0),\n", + " datetime.datetime(2018, 4, 25, 7, 0),\n", + " datetime.datetime(2018, 4, 25, 8, 0),\n", + " datetime.datetime(2018, 4, 25, 9, 0),\n", + " datetime.datetime(2018, 4, 25, 10, 0),\n", + " datetime.datetime(2018, 4, 25, 11, 0),\n", + " datetime.datetime(2018, 4, 25, 12, 0),\n", + " datetime.datetime(2018, 4, 25, 13, 0),\n", + " datetime.datetime(2018, 4, 25, 14, 0),\n", + " datetime.datetime(2018, 4, 25, 15, 0),\n", + " datetime.datetime(2018, 4, 25, 16, 0),\n", + " datetime.datetime(2018, 4, 25, 17, 0),\n", + " datetime.datetime(2018, 4, 25, 18, 0),\n", + " datetime.datetime(2018, 4, 25, 19, 0),\n", + " datetime.datetime(2018, 4, 25, 20, 0),\n", + " datetime.datetime(2018, 4, 25, 21, 0),\n", + " datetime.datetime(2018, 4, 25, 22, 0),\n", + " datetime.datetime(2018, 4, 25, 23, 0),\n", + " datetime.datetime(2018, 4, 26, 0, 0),\n", + " datetime.datetime(2018, 4, 26, 1, 0),\n", + " datetime.datetime(2018, 4, 26, 2, 0),\n", + " datetime.datetime(2018, 4, 26, 3, 0),\n", + " datetime.datetime(2018, 4, 26, 4, 0),\n", + " datetime.datetime(2018, 4, 26, 5, 0),\n", + " datetime.datetime(2018, 4, 26, 6, 0),\n", + " datetime.datetime(2018, 4, 26, 7, 0),\n", + " datetime.datetime(2018, 4, 26, 8, 0),\n", + " datetime.datetime(2018, 4, 26, 9, 0),\n", + " datetime.datetime(2018, 4, 26, 10, 0),\n", + " datetime.datetime(2018, 4, 26, 11, 0),\n", + " datetime.datetime(2018, 4, 26, 12, 0),\n", + " datetime.datetime(2018, 4, 26, 13, 0),\n", + " datetime.datetime(2018, 4, 26, 14, 0),\n", + " datetime.datetime(2018, 4, 26, 15, 0),\n", + " datetime.datetime(2018, 4, 26, 16, 0),\n", + " datetime.datetime(2018, 4, 26, 17, 0),\n", + " datetime.datetime(2018, 4, 26, 18, 0),\n", + " datetime.datetime(2018, 4, 26, 19, 0),\n", + " datetime.datetime(2018, 4, 26, 20, 0),\n", + " datetime.datetime(2018, 4, 26, 21, 0),\n", + " datetime.datetime(2018, 4, 26, 22, 0),\n", + " datetime.datetime(2018, 4, 26, 23, 0),\n", + " datetime.datetime(2018, 4, 27, 0, 0),\n", + " datetime.datetime(2018, 4, 27, 1, 0),\n", + " datetime.datetime(2018, 4, 27, 2, 0),\n", + " datetime.datetime(2018, 4, 27, 3, 0),\n", + " datetime.datetime(2018, 4, 27, 4, 0),\n", + " datetime.datetime(2018, 4, 27, 5, 0),\n", + " datetime.datetime(2018, 4, 27, 6, 0),\n", + " datetime.datetime(2018, 4, 27, 7, 0),\n", + " datetime.datetime(2018, 4, 27, 8, 0),\n", + " datetime.datetime(2018, 4, 27, 9, 0),\n", + " datetime.datetime(2018, 4, 27, 10, 0),\n", + " datetime.datetime(2018, 4, 27, 11, 0),\n", + " datetime.datetime(2018, 4, 27, 12, 0),\n", + " datetime.datetime(2018, 4, 27, 13, 0),\n", + " datetime.datetime(2018, 4, 27, 14, 0),\n", + " datetime.datetime(2018, 4, 27, 15, 0),\n", + " datetime.datetime(2018, 4, 27, 16, 0),\n", + " datetime.datetime(2018, 4, 27, 17, 0),\n", + " datetime.datetime(2018, 4, 27, 18, 0),\n", + " datetime.datetime(2018, 4, 27, 19, 0),\n", + " datetime.datetime(2018, 4, 27, 20, 0),\n", + " datetime.datetime(2018, 4, 27, 21, 0),\n", + " datetime.datetime(2018, 4, 27, 22, 0),\n", + " datetime.datetime(2018, 4, 27, 23, 0),\n", + " datetime.datetime(2018, 4, 28, 0, 0),\n", + " datetime.datetime(2018, 4, 28, 1, 0),\n", + " datetime.datetime(2018, 4, 28, 2, 0),\n", + " datetime.datetime(2018, 4, 28, 3, 0),\n", + " datetime.datetime(2018, 4, 28, 4, 0),\n", + " datetime.datetime(2018, 4, 28, 5, 0),\n", + " datetime.datetime(2018, 4, 28, 6, 0),\n", + " datetime.datetime(2018, 4, 28, 7, 0),\n", + " datetime.datetime(2018, 4, 28, 8, 0),\n", + " datetime.datetime(2018, 4, 28, 9, 0),\n", + " datetime.datetime(2018, 4, 28, 10, 0),\n", + " datetime.datetime(2018, 4, 28, 11, 0),\n", + " datetime.datetime(2018, 4, 28, 12, 0),\n", + " datetime.datetime(2018, 4, 28, 13, 0),\n", + " datetime.datetime(2018, 4, 28, 14, 0),\n", + " datetime.datetime(2018, 4, 28, 15, 0),\n", + " datetime.datetime(2018, 4, 28, 16, 0),\n", + " datetime.datetime(2018, 4, 28, 17, 0),\n", + " datetime.datetime(2018, 4, 28, 18, 0),\n", + " datetime.datetime(2018, 4, 28, 19, 0),\n", + " datetime.datetime(2018, 4, 28, 20, 0),\n", + " datetime.datetime(2018, 4, 28, 21, 0),\n", + " datetime.datetime(2018, 4, 28, 22, 0),\n", + " datetime.datetime(2018, 4, 28, 23, 0),\n", + " datetime.datetime(2018, 4, 29, 0, 0),\n", + " datetime.datetime(2018, 4, 29, 1, 0),\n", + " datetime.datetime(2018, 4, 29, 2, 0),\n", + " datetime.datetime(2018, 4, 29, 3, 0),\n", + " datetime.datetime(2018, 4, 29, 4, 0),\n", + " datetime.datetime(2018, 4, 29, 5, 0),\n", + " datetime.datetime(2018, 4, 29, 6, 0),\n", + " datetime.datetime(2018, 4, 29, 7, 0),\n", + " datetime.datetime(2018, 4, 29, 8, 0),\n", + " datetime.datetime(2018, 4, 29, 9, 0),\n", + " datetime.datetime(2018, 4, 29, 10, 0),\n", + " datetime.datetime(2018, 4, 29, 11, 0),\n", + " datetime.datetime(2018, 4, 29, 12, 0),\n", + " datetime.datetime(2018, 4, 29, 13, 0),\n", + " datetime.datetime(2018, 4, 29, 14, 0),\n", + " datetime.datetime(2018, 4, 29, 15, 0),\n", + " datetime.datetime(2018, 4, 29, 16, 0),\n", + " datetime.datetime(2018, 4, 29, 17, 0),\n", + " datetime.datetime(2018, 4, 29, 18, 0),\n", + " datetime.datetime(2018, 4, 29, 19, 0),\n", + " datetime.datetime(2018, 4, 29, 20, 0),\n", + " datetime.datetime(2018, 4, 29, 21, 0),\n", + " datetime.datetime(2018, 4, 29, 22, 0),\n", + " datetime.datetime(2018, 4, 29, 23, 0),\n", + " datetime.datetime(2018, 4, 30, 0, 0),\n", + " datetime.datetime(2018, 4, 30, 1, 0),\n", + " datetime.datetime(2018, 4, 30, 2, 0),\n", + " datetime.datetime(2018, 4, 30, 3, 0),\n", + " datetime.datetime(2018, 4, 30, 4, 0),\n", + " datetime.datetime(2018, 4, 30, 5, 0),\n", + " datetime.datetime(2018, 4, 30, 6, 0),\n", + " datetime.datetime(2018, 4, 30, 7, 0),\n", + " datetime.datetime(2018, 4, 30, 8, 0),\n", + " datetime.datetime(2018, 4, 30, 9, 0),\n", + " datetime.datetime(2018, 4, 30, 10, 0),\n", + " datetime.datetime(2018, 4, 30, 11, 0),\n", + " datetime.datetime(2018, 4, 30, 12, 0),\n", + " datetime.datetime(2018, 4, 30, 13, 0),\n", + " datetime.datetime(2018, 4, 30, 14, 0),\n", + " datetime.datetime(2018, 4, 30, 15, 0),\n", + " datetime.datetime(2018, 4, 30, 16, 0),\n", + " datetime.datetime(2018, 4, 30, 17, 0),\n", + " datetime.datetime(2018, 4, 30, 18, 0),\n", + " datetime.datetime(2018, 4, 30, 19, 0),\n", + " datetime.datetime(2018, 4, 30, 20, 0),\n", + " datetime.datetime(2018, 4, 30, 21, 0),\n", + " datetime.datetime(2018, 4, 30, 22, 0),\n", + " datetime.datetime(2018, 4, 30, 23, 0)]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.time" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, -22.10333333, -31.66861111,\n", + " 47.76666641, 46.677778 , 48.721111 , 47.529167 ,\n", + " 47.05407 , 46.693611 , 47.348056 , 47.973056 ,\n", + " 48.878611 , 48.106111 , 48.371111 , 48.334722 ,\n", + " 48.050833 , 47.838611 , 47.040277 , 47.06694444,\n", + " 49.877778 , 50.629421 , 50.503333 , 41.695833 ,\n", + " 32.27000046, 80.05000305, 46.5475 , 46.813056 ,\n", + " 47.479722 , 47.049722 , 47.0675 , 47.18961391,\n", + " -30.17254 , 16.86403 , 35.0381 , 49.73508444,\n", + " 49.573394 , 49.066667 , 54.925556 , 52.802222 ,\n", + " 47.914722 , 53.166667 , 50.65 , 54.4368 ,\n", + " 47.80149841, 47.4165 , -70.666 , 54.746495 ,\n", + " 81.6 , 55.693588 , 72.58000183, 56.290424 ,\n", + " 59.5 , 58.383333 , 39.54694 , 42.72056 ,\n", + " 39.87528 , 37.23722 , 43.43917 , 41.27417 ,\n", + " 42.31917 , 38.47278 , 39.08278 , 41.23889 ,\n", + " 41.39389 , 42.63472 , 37.05194 , 28.309 ,\n", + " 59.779167 , 60.53002 , 66.320278 , 67.97333333,\n", + " 48.5 , 49.9 , 47.266667 , 43.616667 ,\n", + " 47.3 , 46.65 , 45. , 45.8 ,\n", + " 48.633333 , 42.936667 , 48.70861111, 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , -75.62 , 51.149617 ,\n", + " 38.366667 , 35.316667 , 46.966667 , 46.91 ,\n", + " -0.20194 , 51.939722 , 53.32583 , 45.8 ,\n", + " 44.183333 , 37.571111 , 35.5182 , 42.805462 ,\n", + " -69.005 , 39.0319 , 24.2883 , 24.466941 ,\n", + " 36.53833389, 33.293917 , 55.37611111, 56.161944 ,\n", + " 57.135278 , 41.536111 , 36.0722 , 52.083333 ,\n", + " 53.333889 , 51.541111 , 52.3 , 51.974444 ,\n", + " 58.38853 , 65.833333 , 62.783333 , 78.90715 ,\n", + " 59. , 69.45 , 59.2 , 60.372386 ,\n", + " -72.0117 , 59.2 , -41.40819168, -77.83200073,\n", + " -45.0379982 , 51.814408 , 50.736444 , 54.753894 ,\n", + " 54.15 , 43.4 , 71.58616638, 63.85 ,\n", + " 67.883333 , 57.394 , 57.1645 , 57.9525 ,\n", + " 56.0429 , 60.0858 , 57.816667 , 64.25 ,\n", + " 59.728 , 45.566667 , 46.428611 , 46.299444 ,\n", + " 48.933333 , 49.15 , 49.05 , 47.96 ,\n", + " 71.32301331, 40.12498 , 19.53623009, -89.99694824,\n", + " 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'long_name': 'latitude',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", + " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", + " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", + " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", + " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", + " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", + " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", + " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", + " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", + " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", + " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", + " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", + " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", + " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", + " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", + " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", + " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", + " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", + " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", + " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", + " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", + " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", + " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", + " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", + " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", + " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", + " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", + " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", + " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", + " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", + " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", + " 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'long_name': 'longitude',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading grid_edge_latitude var (1/6)\n", + "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", + "Rank 000: Loading grid_edge_longitude var (2/6)\n", + "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", + "Rank 000: Loading model_centre_latitude var (3/6)\n", + "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", + "Rank 000: Loading model_centre_longitude var (4/6)\n", + "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", + "Rank 000: Loading sconco3 var (5/6)\n", + "Rank 000: Loaded sconco3 var ((175, 720))\n", + "Rank 000: Loading station_reference var (6/6)\n", + "Rank 000: Loaded station_reference var ((175,))\n" + ] + } + ], + "source": [ + "nessy_2.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating providentia_exp_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing grid_edge_latitude var (1/6)\n", + "**ERROR** an error has occurred while writing the 'grid_edge_latitude' variable\n" + ] + }, + { + "ename": "ValueError", + "evalue": "cannot find dimension grid_edge in this group or parent groups", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0mdim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdimensions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mdimname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dimensions'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'parent'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnessy_2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'providentia_exp_file.nc'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1431\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1432\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1433\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1434\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1435\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1381\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1382\u001b[0m \u001b[0;31m# Create variables\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1383\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_variables\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1384\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1385\u001b[0m \u001b[0;31m# Create metadata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 439\u001b[0m \u001b[0;31m# print(\"**ERROR** an error hase occurred while writing the '{0}' variable\".format(var_name),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 440\u001b[0m \u001b[0;31m# file=sys.stderr)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 442\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 443\u001b[0m \u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'WARNING!!! '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 379\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 380\u001b[0m var = netcdf.createVariable(var_name, var_dtype, var_dims,\n\u001b[0;32m--> 381\u001b[0;31m zlib=self.zip_lvl > 0, complevel=self.zip_lvl)\n\u001b[0m\u001b[1;32m 382\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaster\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.createVariable\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Variable.__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"cannot find dimension %s in this group or parent groups\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mdimname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: cannot find dimension grid_edge in this group or parent groups" + ] + } + ], + "source": [ + "nessy_2.to_netcdf('providentia_exp_file.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd index e85ee59f905fc06300c118818690a3890e184b21..a1aa14f7ac85d67c68c977bab0ad673e139ccf71 100644 --- a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd +++ b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd @@ -1,7 +1,7 @@ #!/bin/bash #SBATCH --ntasks 1 #SBATCH --time 03:00:00 -#SBATCH --job-name jupyter-notebook +#SBATCH --job-name NES #SBATCH --output log_jupyter-notebook-%J.out #SBATCH --error log_jupyter-notebook-%J.err #SBATCH --exclusive @@ -26,8 +26,9 @@ localhost:${port} (prefix w/ https:// if using password) module load jupyterlab/3.0.9-foss-2019b-Python-3.7.4 module load Python/3.7.4-GCCcore-8.3.0 module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 -module load cfunits/1.8-foss-2019b-Python-3.7.4 module load xarray/0.19.0-foss-2019b-Python-3.7.4 +module load cftime/1.0.1-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 # export PYTHONPATH=/gpfs/scratch/bsc32/bsc32538/NES_tests/NES:${PYTHONPATH} export PYTHONPATH=/esarchive/scratch/avilanova/software/NES:${PYTHONPATH} diff --git a/Jupyter_notebooks/NES_create_netcdf_test.ipynb b/Jupyter_notebooks/NES_create_netcdf_test.ipynb deleted file mode 100644 index bf8560655eba7e1957cbd870672bf3d251492cd6..0000000000000000000000000000000000000000 --- a/Jupyter_notebooks/NES_create_netcdf_test.ipynb +++ /dev/null @@ -1,2228 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from nes import *\n", - "import xarray as xr\n", - "from netCDF4 import Dataset" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create regular grid" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "lat_orig = 41.1\n", - "lon_orig = 1.8\n", - "inc_lat = 0.1\n", - "inc_lon = 0.1\n", - "n_lat = 10\n", - "n_lon = 10\n", - "regular_grid = create_nes(comm=None, info=False, projection='regular', create_nes=True,\n", - " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", - " n_lat=n_lat, n_lon=n_lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating regular_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "regular_grid.to_netcdf('regular_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:  (time: 1, lev: 1, lat: 10, lon: 10)\n",
-       "Coordinates:\n",
-       "  * time     (time) datetime64[ns] 1996-12-31\n",
-       "  * lev      (lev) float64 0.0\n",
-       "  * lat      (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n",
-       "  * lon      (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n",
-       "Data variables:\n",
-       "    crs      |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, lat: 10, lon: 10)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * lat (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n", - " * lon (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n", - "Data variables:\n", - " crs |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('regular_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create rotated grid" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "centre_lat = 51\n", - "centre_lon = 10\n", - "west_boundary = -35\n", - "south_boundary = -27\n", - "inc_rlat = 0.2\n", - "inc_rlon = 0.2\n", - "n_lat = 10\n", - "n_lon = 10\n", - "rotated_grid = create_nes(comm=None, info=False, projection='rotated', create_nes=True,\n", - " centre_lat=centre_lat, centre_lon=centre_lon,\n", - " west_boundary=west_boundary, south_boundary=south_boundary,\n", - " inc_rlat=inc_rlat, inc_rlon=inc_rlon, n_lat=n_lat, n_lon=n_lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating rotated_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "rotated_grid.to_netcdf('rotated_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:       (time: 1, lev: 1, rlat: 10, rlon: 10)\n",
-       "Coordinates:\n",
-       "  * time          (time) datetime64[ns] 1996-12-31\n",
-       "  * lev           (lev) float64 0.0\n",
-       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... -25.6 -25.4 -25.2\n",
-       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... -33.6 -33.4 -33.2\n",
-       "Data variables:\n",
-       "    lat           (rlat, rlon) float64 -27.0 -26.8 -26.6 ... -25.6 -25.4 -25.2\n",
-       "    lon           (rlat, rlon) float64 -35.0 -34.8 -34.6 ... -33.6 -33.4 -33.2\n",
-       "    rotated_pole  |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, rlat: 10, rlon: 10)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... -25.6 -25.4 -25.2\n", - " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... -33.6 -33.4 -33.2\n", - "Data variables:\n", - " lat (rlat, rlon) float64 ...\n", - " lon (rlat, rlon) float64 ...\n", - " rotated_pole |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('rotated_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create grid from random points" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "lat = [0, 10, 23, 50]\n", - "lon = [25, -30, 41, 12]\n", - "points_grid = create_nes(comm=None, info=False, projection=None, create_nes=True,\n", - " lat=lat, lon=lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating points_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "points_grid.to_netcdf('points_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:  (time: 1, lev: 1, lat: 4, lon: 4)\n",
-       "Coordinates:\n",
-       "  * time     (time) datetime64[ns] 1996-12-31\n",
-       "  * lev      (lev) float64 0.0\n",
-       "  * lat      (lat) float64 0.0 10.0 23.0 50.0\n",
-       "  * lon      (lon) float64 25.0 -30.0 41.0 12.0\n",
-       "Data variables:\n",
-       "    *empty*\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, lat: 4, lon: 4)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * lat (lat) float64 0.0 10.0 23.0 50.0\n", - " * lon (lon) float64 25.0 -30.0 41.0 12.0\n", - "Data variables:\n", - " *empty*\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('points_grid.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create grid from NetCDF file" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:                       (station: 8643, time: 744)\n",
-       "Coordinates:\n",
-       "  * time                          (time) datetime64[ns] 2021-07-01 ... 2021-0...\n",
-       "Dimensions without coordinates: station\n",
-       "Data variables: (12/20)\n",
-       "    station_code                  (station) |S75 b'AD0942A' ... b'ES2074A'\n",
-       "    station_start_date            (station) |S75 b'2004-06-17' ... b'nan'\n",
-       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nature'\n",
-       "    lat                           (station) float32 42.51 42.52 ... 28.09 28.48\n",
-       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
-       "    country_code                  (station) |S75 b'AD' b'AD' ... b'ES' b'ES'\n",
-       "    ...                            ...\n",
-       "    lon                           (station) float32 1.539 1.565 ... -16.26\n",
-       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
-       "    station_local_code            (station) |S75 b'942' b'944' ... b'38038033'\n",
-       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
-       "    station_ozone_classification  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
-       "    pm10                          (time, station) float32 ...
" - ], - "text/plain": [ - "\n", - "Dimensions: (station: 8643, time: 744)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 2021-07-01 ... 2021-0...\n", - "Dimensions without coordinates: station\n", - "Data variables: (12/20)\n", - " station_code (station) |S75 ...\n", - " station_start_date (station) |S75 ...\n", - " station_zone (station) |S75 ...\n", - " lat (station) float32 ...\n", - " street_type (station) |S75 ...\n", - " country_code (station) |S75 ...\n", - " ... ...\n", - " lon (station) float32 ...\n", - " station_end_date (station) |S75 ...\n", - " station_local_code (station) |S75 ...\n", - " station_rural_back (station) |S75 ...\n", - " station_ozone_classification (station) |S75 ...\n", - " pm10 (time, station) float32 ..." - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nc_path = '/esarchive/obs/eea/eionet/hourly/pm10/pm10_202107.nc'\n", - "xr.open_dataset(nc_path)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "nc = Dataset(nc_path, mode='r', clobber=False)\n", - "lat = nc.variables['lat'][:]\n", - "lon = nc.variables['lon'][:]\n", - "nc_points_grid = create_nes(comm=None, info=False, projection=None, create_nes=True,\n", - " lat=lat, lon=lon)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Creating nc_points_grid.nc\n", - "Rank 000: NetCDF ready to write\n", - "Rank 000: Dimensions done\n" - ] - } - ], - "source": [ - "nc_points_grid.to_netcdf('nc_points_grid.nc', info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:  (time: 1, lev: 1, lat: 8643, lon: 8643)\n",
-       "Coordinates:\n",
-       "  * time     (time) datetime64[ns] 1996-12-31\n",
-       "  * lev      (lev) float64 0.0\n",
-       "  * lat      (lat) float64 42.51 42.52 42.53 41.33 ... 36.96 40.95 28.09 28.48\n",
-       "  * lon      (lon) float64 1.539 1.565 1.717 19.82 ... -0.2908 -17.12 -16.26\n",
-       "Data variables:\n",
-       "    *empty*\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, lev: 1, lat: 8643, lon: 8643)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 1996-12-31\n", - " * lev (lev) float64 0.0\n", - " * lat (lat) float64 42.51 42.52 42.53 41.33 ... 36.96 40.95 28.09 28.48\n", - " * lon (lon) float64 1.539 1.565 1.717 19.82 ... -0.2908 -17.12 -16.26\n", - "Data variables:\n", - " *empty*\n", - "Attributes:\n", - " Conventions: CF-1.7" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('nc_points_grid.nc')" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/Jupyter_notebooks/NES_time_bnds_test.ipynb b/Jupyter_notebooks/NES_time_bnds_test.ipynb deleted file mode 100644 index 1751bf61e7008473953defbfabd828e224f2e8b8..0000000000000000000000000000000000000000 --- a/Jupyter_notebooks/NES_time_bnds_test.ipynb +++ /dev/null @@ -1,675 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from nes import *\n", - "import xarray as xr\n", - "import datetime\n", - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Set time bounds" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "test_path = \"/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc\"\n", - "nessy = open_netcdf(path=test_path, info=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", - " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array = np.array([[datetime.datetime(year=2020, month=2, day=20), \n", - " datetime.datetime(year=2020, month=2, day=15)]])\n", - "nessy.set_time_bnds(array)\n", - "nessy.time_bnds" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Rank 000: Loading O3_all var (1/1)\n", - "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" - ] - } - ], - "source": [ - "nessy.load()" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "nessy.to_netcdf('nc_serial_test.nc')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Explore variables" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'data': masked_array(\n", - " data=[[[[2.82636070e-09, 2.83436141e-09, 2.82522827e-09, ...,\n", - " 2.96334601e-09, 2.94810221e-09, 2.91839553e-09],\n", - " [2.80930834e-09, 2.57182142e-09, 2.55521360e-09, ...,\n", - " 2.56395216e-09, 2.55890820e-09, 2.89073032e-09],\n", - " [2.79031842e-09, 2.53415999e-09, 2.50317100e-09, ...,\n", - " 2.56737764e-09, 2.58685584e-09, 2.85498758e-09],\n", - " ...,\n", - " [4.54469973e-09, 2.31674457e-09, 2.22753971e-09, ...,\n", - " 3.90127353e-09, 3.89643118e-09, 3.95452204e-09],\n", - " [4.54129534e-09, 3.39469808e-09, 2.30205255e-09, ...,\n", - " 3.88824706e-09, 3.88372090e-09, 3.95252631e-09],\n", - " [4.55012028e-09, 4.54941684e-09, 4.55885596e-09, ...,\n", - " 3.93945099e-09, 3.94256938e-09, 3.94736510e-09]],\n", - " \n", - " [[1.61966751e-09, 1.62850033e-09, 1.62801062e-09, ...,\n", - " 1.74583636e-09, 1.74684045e-09, 1.74125825e-09],\n", - " [1.60704539e-09, 1.41438683e-09, 1.39824063e-09, ...,\n", - " 1.43241041e-09, 1.45136980e-09, 1.73744363e-09],\n", - " [1.59303792e-09, 1.41264567e-09, 1.43958856e-09, ...,\n", - " 1.43522705e-09, 1.45869528e-09, 1.72746673e-09],\n", - " ...,\n", - " [3.39471939e-09, 2.65527422e-09, 2.22850582e-09, ...,\n", - " 3.00350167e-09, 3.02176750e-09, 3.04009262e-09],\n", - " [3.42592332e-09, 2.81851942e-09, 2.28753505e-09, ...,\n", - " 2.99818836e-09, 2.99247205e-09, 3.04403525e-09],\n", - " [3.43113582e-09, 3.43824125e-09, 3.44929552e-09, ...,\n", - " 3.05421777e-09, 3.04752024e-09, 3.04445491e-09]],\n", - " \n", - " [[6.52169652e-10, 6.62677024e-10, 6.71934786e-10, ...,\n", - " 6.84429291e-10, 6.85826118e-10, 6.81504464e-10],\n", - " [6.54959087e-10, 6.65219158e-10, 6.72430500e-10, ...,\n", - " 7.02121916e-10, 6.88325397e-10, 6.78990253e-10],\n", - " [6.57915333e-10, 6.72102929e-10, 6.82566170e-10, ...,\n", - " 7.10820458e-10, 7.07094217e-10, 6.77522760e-10],\n", - " ...,\n", - " [2.26027863e-09, 2.27629537e-09, 2.22616392e-09, ...,\n", - " 1.80253423e-09, 1.80225357e-09, 1.75757697e-09],\n", - " [2.25028196e-09, 2.24872521e-09, 2.25445618e-09, ...,\n", - " 1.78916737e-09, 1.75583581e-09, 1.73717007e-09],\n", - " [2.25827335e-09, 2.26974151e-09, 2.28325270e-09, ...,\n", - " 1.80090465e-09, 1.77703174e-09, 1.75434933e-09]],\n", - " \n", - " ...,\n", - " \n", - " [[6.20177729e-11, 6.26959387e-11, 6.28658792e-11, ...,\n", - " 7.74274672e-11, 7.81546980e-11, 7.60479180e-11],\n", - " [6.20486787e-11, 4.91600684e-11, 4.88878833e-11, ...,\n", - " 8.30884250e-11, 8.02152303e-11, 7.64004970e-11],\n", - " [6.20976950e-11, 4.84989236e-11, 4.85273696e-11, ...,\n", - " 8.46209977e-11, 8.60716498e-11, 9.29777644e-11],\n", - " ...,\n", - " [6.15721710e-11, 5.85051035e-11, 5.68927752e-11, ...,\n", - " 7.66955388e-11, 7.87262894e-11, 8.41871295e-11],\n", - " [6.17081941e-11, 5.77536560e-11, 5.71826440e-11, ...,\n", - " 8.49015233e-11, 8.82505458e-11, 9.20043208e-11],\n", - " [6.09760506e-11, 6.03529102e-11, 6.24047411e-11, ...,\n", - " 9.69636524e-11, 9.73700426e-11, 9.67554162e-11]],\n", - " \n", - " [[6.17567178e-11, 6.23894963e-11, 6.25706292e-11, ...,\n", - " 9.04916420e-11, 8.90077803e-11, 8.43536768e-11],\n", - " [6.17901147e-11, 4.59270816e-11, 4.57923699e-11, ...,\n", - " 1.06383589e-10, 1.05693093e-10, 9.44862175e-11],\n", - " [6.18271337e-11, 4.17853495e-11, 3.94594427e-11, ...,\n", - " 1.34135009e-10, 1.37096737e-10, 1.13853482e-10],\n", - " ...,\n", - " [5.87425456e-11, 5.60845814e-11, 5.33429169e-11, ...,\n", - " 6.52061183e-11, 6.64711411e-11, 7.06842501e-11],\n", - " [5.92315016e-11, 5.72428251e-11, 5.51245403e-11, ...,\n", - " 7.10893150e-11, 7.38196310e-11, 7.53354532e-11],\n", - " [5.72967125e-11, 5.87497967e-11, 6.08200851e-11, ...,\n", - " 7.97847274e-11, 8.28124236e-11, 7.89215707e-11]],\n", - " \n", - " [[6.15217946e-11, 6.21571961e-11, 6.23377391e-11, ...,\n", - " 1.08401239e-10, 1.07494236e-10, 1.08711720e-10],\n", - " [6.15563989e-11, 4.56989759e-11, 4.46428450e-11, ...,\n", - " 1.30999808e-10, 1.26581134e-10, 1.39005307e-10],\n", - " [6.15933693e-11, 3.98656906e-11, 3.75483949e-11, ...,\n", - " 1.37105632e-10, 1.48587462e-10, 1.83946344e-10],\n", - " ...,\n", - " [4.68582569e-11, 4.44464673e-11, 4.43960736e-11, ...,\n", - " 5.86025117e-11, 5.84869791e-11, 6.32652056e-11],\n", - " [4.99817097e-11, 4.49490271e-11, 4.43218864e-11, ...,\n", - " 6.19639479e-11, 6.07859180e-11, 6.55651922e-11],\n", - " [4.98553143e-11, 4.61104453e-11, 4.96835975e-11, ...,\n", - " 6.42673414e-11, 6.38328765e-11, 6.38894007e-11]]]],\n", - " mask=False,\n", - " fill_value=1e+20,\n", - " dtype=float32),\n", - " 'dimensions': ('time', 'lev', 'rlat', 'rlon'),\n", - " 'units': 'kg/m3',\n", - " 'long_name': 'TRACERS_044',\n", - " 'coordinates': 'lat lon',\n", - " 'cell_methods': 'time: maximum (interval: 1hr)',\n", - " 'grid_mapping': 'rotated_pole'}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy.variables['O3_all']" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", - " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "nessy.time_bnds" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset>\n",
-       "Dimensions:       (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n",
-       "Coordinates:\n",
-       "  * time          (time) datetime64[ns] 2021-08-03\n",
-       "  * lev           (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n",
-       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
-       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
-       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
-       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
-       "Dimensions without coordinates: time_nv\n",
-       "Data variables:\n",
-       "    time_bnds     (time, time_nv) datetime64[ns] 2020-02-20 2020-02-15\n",
-       "    O3_all        (time, lev, rlat, rlon) float32 ...\n",
-       "    rotated_pole  |S1 b''\n",
-       "Attributes:\n",
-       "    Conventions:  CF-1.7\n",
-       "    comment:      Generated on marenostrum4
" - ], - "text/plain": [ - "\n", - "Dimensions: (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n", - "Coordinates:\n", - " * time (time) datetime64[ns] 2021-08-03\n", - " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n", - " lat (rlat, rlon) float64 ...\n", - " lon (rlat, rlon) float64 ...\n", - " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", - " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", - "Dimensions without coordinates: time_nv\n", - "Data variables:\n", - " time_bnds (time, time_nv) datetime64[ns] ...\n", - " O3_all (time, lev, rlat, rlon) float32 ...\n", - " rotated_pole |S1 ...\n", - "Attributes:\n", - " Conventions: CF-1.7\n", - " comment: Generated on marenostrum4" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "xr.open_dataset('nc_serial_test.nc')" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/Jupyter_notebooks/input/Dades_2017.xlsx b/Jupyter_notebooks/input/Dades_2017.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..6385974cf23a4772e2125ca1edda5c5758252fc3 Binary files /dev/null and b/Jupyter_notebooks/input/Dades_2017.xlsx differ diff --git a/Jupyter_notebooks/input/XVPCA_info.csv b/Jupyter_notebooks/input/XVPCA_info.csv new file mode 100644 index 0000000000000000000000000000000000000000..41b94c9e79a49e9fa8bab191efe6f04705189ff3 --- /dev/null +++ b/Jupyter_notebooks/input/XVPCA_info.csv @@ -0,0 +1,135 @@ +station.code,lat,lon,standardised_network_provided_area_classification +ES0266A,41.379322448,2.086139959,urban-centre +ES0392A,41.727703559,1.838530912,urban-suburban +ES0395A,41.567823582,2.014598316,urban-centre +ES0559A,41.387423958,2.164918317,urban-centre +ES0567A,41.384906375,2.119573944,urban-centre +ES0584A,41.482016279,2.188296656,urban-suburban +ES0586A,41.413621183,2.015985703,urban-centre +ES0691A,41.403716,2.204736,urban-centre +ES0692A,41.37076,2.114771,urban-centre +ES0694A,41.392157459,2.009802277,urban-suburban +ES0700A,41.515609252,2.124996708,urban-centre +ES0704A,41.55242119,2.265250427,urban-suburban +ES0963A,41.473378006,1.982016549,urban-suburban +ES0971A,41.450745,1.975021,urban-suburban +ES0991A,41.88489741,2.874243477,urban-suburban +ES1018A,41.556115398,2.007401267,urban-centre +ES1117A,41.111995,1.151879,urban-suburban +ES1120A,41.115880526,1.191975478,urban-suburban +ES1122A,41.193743,1.236904,rural-near_city +ES1123A,41.155004,1.217734,urban-suburban +ES1124A,41.159532,1.239709,urban-suburban +ES1125A,41.730280261,1.825306423,urban-centre +ES1126A,41.475573452,1.923189624,urban-suburban +ES1135A,41.577626892,1.625893365,urban-suburban +ES1148A,41.425620904,2.222244805,urban-centre +ES1201A,42.391975687,2.842138412,rural-regional +ES1208A,41.150778895,1.120171923,urban-suburban +ES1215A,40.706708761,0.581651482,urban-suburban +ES1220A,41.830977786,1.755282654,rural +ES1222A,41.691273195,2.440944864,rural-regional +ES1225A,41.615794867,0.615725897,urban-centre +ES1231A,41.476769365,2.088977264,urban-centre +ES1248A,42.405378523,1.129930371,rural-regional +ES1262A,41.561264,2.101288,urban-centre +ES1275A,41.689049442,2.495746675,urban-suburban +ES1310A,42.311957,2.213146,rural-regional +ES1311A,41.958676874,3.212854412,rural +ES1312A,41.103677887,1.200765062,urban-suburban +ES1339A,41.219036461,1.721248774,urban-suburban +ES1347A,42.143260622,2.510206967,rural-regional +ES1348A,42.36839,1.776814,rural-regional +ES1362A,41.383226589,2.044453466,urban-centre +ES1379A,41.058203798,0.439711691,rural-regional +ES1390A,41.530418632,2.422048428,urban-suburban +ES1396A,41.378802802,2.133098078,urban-centre +ES1397A,42.003359222,2.287072698,urban-suburban +ES1408A,41.26805333,1.595034746,urban-suburban +ES1413A,41.531750031,2.432573332,urban-suburban +ES1438A,41.385366,2.15403,urban-centre +ES1447A,41.23429,1.72692,urban-suburban +ES1453A,41.447350254,2.209511187,urban-centre +ES1480A,41.398762,2.153472,urban-centre +ES1506A,41.122896353,1.246696221,urban-centre +ES1551A,41.512675808,2.125383709,urban-centre +ES1555A,41.224091559,1.726292388,urban-centre +ES1559A,41.959328421,3.037425681,urban-suburban +ES1588A,41.906647654,1.192974755,rural +ES1642A,41.93501,2.239901,urban-suburban +ES1663A,41.39881978,2.002128908,urban-suburban +ES1665A,41.479955339,2.187268328,urban-suburban +ES1666A,41.117387934,1.241649688,urban-centre +ES1679A,41.386413934,2.187416735,urban-centre +ES1680A,41.634656545,2.162349596,urban-suburban +ES1684A,41.492082863,2.042497085,urban-centre +ES1754A,40.643004752,0.288445976,rural-near_city +ES1773A,41.20223184,1.672200024,urban-suburban +ES1775A,41.608655219,2.135874806,urban-suburban +ES1776A,41.452115161,2.208196282,urban-centre +ES1778A,41.779343452,2.358018901,rural-remote +ES1812A,41.278381095,1.179916582,rural-near_city +ES1813A,41.100692784,0.755100288,rural-regional +ES1814A,41.54924,2.212144,urban-suburban +ES1815A,41.346823038,1.686575492,urban-suburban +ES1816A,41.547159712,2.443253787,urban-centre +ES1817A,41.526705931,2.183795315,urban-suburban +ES1839A,41.617943017,2.087078604,urban-suburban +ES1841A,41.551859569,2.43729,urban-suburban +ES1842A,41.481513156,2.26915353,urban-suburban +ES1843A,42.102313372,1.858062466,urban-suburban +ES1851A,42.097931,1.848338,urban-suburban +ES1852A,41.42068447,2.170571748,urban-suburban +ES1853A,41.244890143,1.617704646,rural-near_city +ES1854A,41.00821168,0.831084709,rural-regional +ES1855A,41.009505689,0.912875859,rural-near_city +ES1856A,41.4260772,2.147992032,urban-centre +ES1861A,41.722992004,1.826905859,urban-suburban +ES1870A,41.374943279,2.186841902,urban-suburban +ES1871A,41.436016001,2.007867622,urban-suburban +ES1872A,41.548225196,2.105157341,urban-centre +ES1874A,41.919593952,2.257149921,urban-suburban +ES1887A,41.848441044,2.224108021,rural-near_city +ES1891A,41.598666901,2.287117688,urban-centre +ES1892A,41.443983561,2.237875306,urban-centre +ES1895A,41.494082865,2.029636956,urban-suburban +ES1896A,41.320516155,1.664199192,urban-suburban +ES1899A,41.308402497,1.647339954,rural-near_city +ES1900A,41.387237826,2.186736695,urban-centre +ES1903A,41.313475208,2.013824628,urban-suburban +ES1909A,40.880656635,0.799176536,rural-near_city +ES1910A,41.303112534,1.991523957,urban-suburban +ES1923A,41.846711554,2.217443537,rural-near_city +ES1928A,41.446265477,2.227517323,urban-centre +ES1929A,41.32149521,2.09772526,urban-suburban +ES1930A,40.902692904,0.809795431,rural-near_city +ES1931A,41.469800555,2.184233488,urban-suburban +ES1936A,41.746333141,2.556661265,rural-near_city +ES1948A,40.939553197,0.831336974,rural-near_city +ES1964A,41.386638989,2.057392493,urban-suburban +ES1965A,41.38193008,2.066347853,urban-suburban +ES1982A,42.051340687,0.729555647,rural-remote +ES1983A,41.321768405,2.082140639,urban-suburban +ES1992A,41.387273,2.115661,urban-centre +ES1999A,41.976385751,2.816547298,urban-centre +ES2009A,40.577777716,0.546796795,rural-near_city +ES2011A,41.400770767,1.999634718,urban-suburban +ES2012A,41.415319291,1.990521216,urban-suburban +ES2017A,40.552819474,0.529982528,rural +ES2027A,41.45131069,2.248236084,urban-centre +ES2033A,41.242375047,1.859334489,rural +ES2034A,41.544104651,0.829933196,rural +ES2035A,41.567703441,1.637614343,urban-suburban +ES2071A,41.120064,1.254472,urban-centre +ES2079A,41.559607414,1.995963067,urban-suburban +ES2043A,41.230073,0.547183, +ES2090A,41.418413,2.123899, +ES0554A,40.2891666667,0.289166666667, +ES0977A,41.6047222222,1.60472222222, +ES1398A,41.53667,2.18361111111, +ES1200A,40.2813888889,0.281388888889, +ES2087A,41.929283,2.257302, +ES2091A,40.5799,0.5535, +ES2088A,41.77106,2.250647, +ES1908A,41.239068799,1.856563752, +ES9994A,42.358363,1.459455, diff --git a/nes/create_nes.py b/nes/create_nes.py index f074eace79180b89f487e71dfc7865d27ef2a116..a3353fdb37094dccc66c3108c6603ea21585cc34 100644 --- a/nes/create_nes.py +++ b/nes/create_nes.py @@ -2,14 +2,15 @@ import sys import os +import warnings from venv import create from mpi4py import MPI -import warnings from nes.nc_projections.rotated_nes import RotatedNes from nes.nc_projections.latlon_nes import LatLonNes from nes.nc_projections.points_nes import PointsNes -def create_nes(comm=None, info=False, projection=None, create_nes=True, **kwargs): + +def create_nes(comm=None, info=False, projection=None, create_nes=True, parallel_method='Y', **kwargs): if comm is None: comm = MPI.COMM_WORLD @@ -26,8 +27,7 @@ def create_nes(comm=None, info=False, projection=None, create_nes=True, **kwargs elif projection == 'regular': required_vars = ['lat_orig', 'lon_orig', 'inc_lat', 'inc_lon', 'n_lat', 'n_lon'] elif projection == 'rotated': - required_vars = ['centre_lat', 'centre_lon', 'west_boundary', 'south_boundary', 'inc_rlat', 'inc_rlon', - 'n_lat', 'n_lon'] + required_vars = ['centre_lat', 'centre_lon', 'west_boundary', 'south_boundary', 'inc_rlat', 'inc_rlon'] elif projection == 'lcc': required_vars = ['lat_1', 'lat_2', 'lon_0', 'lat_0', 'nx', 'ny', 'inc_x', 'inc_y', 'x_0', 'y_0'] elif projection == 'mercator': @@ -40,18 +40,19 @@ def create_nes(comm=None, info=False, projection=None, create_nes=True, **kwargs warnings.warn(msg) if projection == None: - nessy = PointsNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=None, + if parallel_method != 'X': + raise ValueError("Parallel method must be 'X' to create points NES.") + nessy = PointsNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=parallel_method, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=create_nes, **kwargs) - elif projection == 'regular': - nessy = LatLonNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=None, + nessy = LatLonNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=parallel_method, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=create_nes, **kwargs) - elif projection == 'rotated': - nessy = RotatedNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=None, + nessy = RotatedNes(comm=comm, dataset=None, xarray=None, info=info, parallel_method=parallel_method, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=create_nes, **kwargs) - return nessy \ No newline at end of file + return nessy + \ No newline at end of file diff --git a/nes/load_nes.py b/nes/load_nes.py index 699275b9c6ec47ad450fc35afa3f3ceb2cd9ebb8..626d66c69732b70e1ac6fc28cca243a08ba683b2 100644 --- a/nes/load_nes.py +++ b/nes/load_nes.py @@ -8,7 +8,8 @@ from netCDF4 import Dataset from nes.nc_projections.default_nes import Nes from nes.nc_projections.rotated_nes import RotatedNes from nes.nc_projections.latlon_nes import LatLonNes - +from nes.nc_projections.points_nes import PointsNes +from nes.nc_projections.points_nes_ghost import PointsNesGHOST def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, @@ -52,12 +53,24 @@ def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', else: dataset = Dataset(path, format="NETCDF4", mode='r', parallel=True, comm=comm, info=MPI.Info()) - # Rotated if __is_rotated(dataset): + # Rotated grids nessy = RotatedNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=create_nes) + elif __is_points(dataset): + if __is_points_GHOST(dataset): + # Points - GHOST + nessy = PointsNesGHOST(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes) + else: + # Points - non-GHOST + nessy = PointsNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes) else: + # Regular grids nessy = LatLonNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level, create_nes=create_nes) @@ -82,3 +95,18 @@ def __is_rotated(dataset): return True else: return False + +def __is_points(dataset): + + if 'station' in dataset.dimensions: + return True + else: + return False + +def __is_points_GHOST(dataset): + + if 'N_flag_codes' in dataset.dimensions and 'N_qa_codes' in dataset.dimensions: + return True + else: + return False + \ No newline at end of file diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py index 4bee1a0718919d75d6bb0217637dde7590ca4b64..88ecfe4f04d56ee043cc2bea6e11112cdf58657b 100644 --- a/nes/nc_projections/default_nes.py +++ b/nes/nc_projections/default_nes.py @@ -4,14 +4,13 @@ import sys import os import warnings import numpy as np +import datetime from xarray import open_dataset from netCDF4 import Dataset, num2date, date2num from mpi4py import MPI from cfunits import Units from numpy.ma.core import MaskError from copy import deepcopy -import datetime - class Nes(object): """ @@ -102,7 +101,7 @@ class Nes(object): Number of hours to remove from last time steps. parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['Y', 'T'] + accepted values: ['X', 'Y', 'T'] """ # MPI Initialization @@ -125,22 +124,39 @@ class Nes(object): self.first_level = first_level self.last_level = last_level + # Define parallel method + self.parallel_method = parallel_method + # NetCDF object if create_nes: + + self.netcdf = None + self.dataset = None + + # Initialize variables self.variables = None - self._time = self.__get_time(create_nes) + # Complete dimensions + self._time = self.__get_time(create_nes, **kwargs) self._time_bnds = self.__get_time_bnds(create_nes) - self._lev = {'data': np.array([0]), 'units': '', 'positive': 'up'} - self.lev = deepcopy(self._lev) - self._lat, self._lon = self._create_centroids(**kwargs) + # Set axis limits for parallel reading + self.read_axis_limits = self.set_read_axis_limits() + + # Dimensions screening + self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + self.time_bnds = self._time_bnds + self.lev = deepcopy(self._lev) + + # Set NetCDF attributes self.global_attrs = self.__get_global_attributes(create_nes) + else: + if dataset is not None: if self.is_xarray: self.dataset = dataset @@ -157,29 +173,29 @@ class Nes(object): self.netcdf = self.__open_netcdf4() # Lazy variables - self.variables = self.__get_lazy_variables() + self.variables = self._get_lazy_variables() - # Complete dimension + # Complete dimensions self._time = self.__get_time() - self._lev = self._get_coordinate_dimension(['lev', 'level', 'lm']) + self._time_bnds = self.__get_time_bnds() + self._lev = self._get_coordinate_dimension(['lev', 'level', 'lm', 'plev']) self._lat = self._get_coordinate_dimension(['lat', 'latitude']) self._lon = self._get_coordinate_dimension(['lon', 'longitude']) - self._time_bnds = self.__get_time_bnds() - - # Axis limits - self.parallel_method = parallel_method + + # Set axis limits for parallel reading self.read_axis_limits = self.set_read_axis_limits() # Dimensions screening self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + self.time_bnds = self._time_bnds self.lev = self._get_coordinate_values(self._lev, 'Z') self.lat = self._get_coordinate_values(self._lat, 'Y') self.lon = self._get_coordinate_values(self._lon, 'X') - self.time_bnds = self._time_bnds + # Set axis limits for parallel writing self.write_axis_limits = self.set_write_axis_limits() - # NetCDF attributes + # Set NetCDF attributes self.global_attrs = self.__get_global_attributes() # Writing options @@ -190,6 +206,29 @@ class Nes(object): self._lat_dim = None self._lon_dim = None + def __del__(self): + + self.close() + for var_name, var_info in self.variables.items(): + del var_info['data'] + del self.variables + try: + del self.time + del self._time + del self.time_bnds + del self._time_bnds + del self.lev + del self._lev + del self.lat + del self._lat + del self.lon + del self._lon + except AttributeError: + pass + del self + + return None + def __getstate__(self): """ Read the CSV file that contains all the Reduce variable specifications. @@ -199,8 +238,10 @@ class Nes(object): state : dict Dictionary with the class parameters """ + d = self.__dict__ state = {k: d[k] for k in d if k not in ['comm', 'variables', 'netcdf']} + return state def __setstate__(self, state): @@ -212,7 +253,9 @@ class Nes(object): state: dict Dictionary with the class parameters """ + self.__dict__ = state + return None def copy(self, copy_vars=False): @@ -231,18 +274,21 @@ class Nes(object): nessy : Nes Copy of the Nes object """ + nessy = deepcopy(self) nessy.netcdf = None if copy_vars: nessy.variables = nessy.__get_lazy_variables() else: nessy.variables = {} + return nessy def clear_communicator(self): """ Erase the communicator and the parallelization indexes. """ + self.comm = None self.rank = 0 self.master = 0 @@ -259,6 +305,7 @@ class Nes(object): comm: MPI.COMM Communicator to be set """ + self.comm = comm self.rank = self.comm.Get_rank() self.master = self.rank == 0 @@ -266,6 +313,7 @@ class Nes(object): self.read_axis_limits = self.set_read_axis_limits() self.write_axis_limits = self.set_write_axis_limits() + return None def set_levels(self, levels): @@ -277,8 +325,10 @@ class Nes(object): levels : dict Dictionary with the new level information to be set """ + self._lev = deepcopy(levels) self.lev = deepcopy(levels) + return None def set_time_bnds(self, time_bnds): @@ -290,10 +340,11 @@ class Nes(object): time_bnds : list List with the new time bounds information to be set """ + correct_format = True for time_bnd in np.array(time_bnds).flatten(): if not isinstance(time_bnd, datetime.datetime): - print(f'{time_bnd} is not a datetime object') + print("{0} is not a datetime object".format(time_bnd)) correct_format = False if correct_format: if len(self._time) == len(time_bnds): @@ -309,6 +360,7 @@ class Nes(object): msg += 'There is at least one element in the time bounds to be set that is not a datetime object. ' msg += 'Time bounds will not be set.' warnings.warn(msg) + return None def free_vars(self, var_list): @@ -320,6 +372,7 @@ class Nes(object): var_list : list, str List (or single string) of the variables to be loaded """ + if isinstance(var_list, str): var_list = [var_list] @@ -331,6 +384,7 @@ class Nes(object): if self.variables is not None: if var_name in self.variables: del self.variables[var_name] + return None def keep_vars(self, var_list): @@ -342,12 +396,14 @@ class Nes(object): var_list : list, str List (or single string) of the variables to be loaded """ + if isinstance(var_list, str): var_list = [var_list] to_remove = list(set(self.variables.keys()).difference(set(var_list))) self.free_vars(to_remove) + return None def get_time_interval(self): @@ -359,8 +415,10 @@ class Nes(object): int Number of hours between time steps """ + time_interval = self._time[1] - self._time[0] time_interval = int(time_interval.seconds // 3600) + return time_interval # ================================================================================================================== @@ -371,6 +429,7 @@ class Nes(object): """ Modify variables to keep only the last time step """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") aux_time = self._time[0].replace(hour=0, minute=0, second=0, microsecond=0) @@ -386,6 +445,7 @@ class Nes(object): self.variables[var_name]['data'] = aux_data self.hours_start = 0 self.hours_end = 0 + return None def daily_statistic(self, op, type_op='calendar'): @@ -403,6 +463,7 @@ class Nes(object): - "alltsteps": Calculate a single time statistic with all the time steps. - "withoutt0": Calculate a single time statistic with all the time steps avoiding the first one. """ + if self.parallel_method == 'T': raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") time_interval = self.get_time_interval() @@ -503,6 +564,7 @@ class Nes(object): raise NotImplementedError("Statistic operation type '{0}' is not implemented.".format(type_op)) self.hours_start = 0 self.hours_end = 0 + return None # ================================================================================================================== @@ -519,6 +581,7 @@ class Nes(object): Dictionary with the 4D limits of the rank data to read. t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, @@ -526,6 +589,9 @@ class Nes(object): if self.parallel_method == 'Y': y_len = self._lat['data'].shape[0] + if y_len < self.size: + + raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format(self.size, y_len)) axis_limits['y_min'] = (y_len // self.size) * self.rank if self.rank + 1 < self.size: axis_limits['y_max'] = (y_len // self.size) * (self.rank + 1) @@ -534,6 +600,8 @@ class Nes(object): axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) elif self.parallel_method == 'X': x_len = self._lon['data'].shape[-1] + if x_len < self.size: + raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format(self.size, x_len)) axis_limits['x_min'] = (x_len // self.size) * self.rank if self.rank + 1 < self.size: axis_limits['x_max'] = (x_len // self.size) * (self.rank + 1) @@ -544,15 +612,16 @@ class Nes(object): first_time_idx = self.get_time_id(self.hours_start, first=True) last_time_idx = self.get_time_id(self.hours_end, first=False) t_len = last_time_idx - first_time_idx + if t_len < self.size: + raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format(self.size, t_len)) axis_limits['t_min'] = ((t_len // self.size) * self.rank) + first_time_idx if self.rank + 1 < self.size: axis_limits['t_max'] = ((t_len // self.size) * (self.rank + 1)) + first_time_idx - else: raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( - meth=self.parallel_method, accept=['Y'])) + meth=self.parallel_method, accept=['X', 'Y', 'T'])) - # Verical levels selection: + # Vertical levels selection: axis_limits['z_min'] = self.first_level if self.last_level == -1 or self.last_level is None: self.last_level = None @@ -561,6 +630,7 @@ class Nes(object): else: self.last_level += 1 axis_limits['z_max'] = self.last_level + return axis_limits def get_time_id(self, hours, first=True): @@ -580,23 +650,28 @@ class Nes(object): int Possition of the time array """ + from datetime import timedelta + if first: idx = self._time.index(self._time[0] + timedelta(hours=hours)) else: idx = self._time.index(self._time[-1] - timedelta(hours=hours)) + 1 + return idx def open(self): """ Open the NetCDF """ + if self.is_xarray: self.dataset = self.__open_dataset() self.netcdf = None else: self.dataset = None self.netcdf = self.__open_netcdf4() + return None def __open_dataset(self): @@ -608,6 +683,7 @@ class Nes(object): dataset : xr.Dataset Opened dataset """ + if self.master: warnings.filterwarnings('ignore') # Disabling warnings while reading MONARCH original file dataset = open_dataset(self.__ini_path, decode_coords='all') @@ -616,6 +692,7 @@ class Nes(object): dataset = None dataset = self.comm.bcast(dataset, root=0) self.dataset = dataset + return dataset def __open_netcdf4(self, mode='r'): @@ -632,23 +709,95 @@ class Nes(object): netcdf : Dataset Opened dataset """ + if self.size == 1: netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=False) else: netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=True, comm=self.comm, info=MPI.Info()) self.netcdf = netcdf + return netcdf def close(self): """ Close the NetCDF with netcdf4-python """ + if self.netcdf is not None: self.netcdf.close() self.netcdf = None + return None + def __get_dates_from_months(self, time, units, calendar): + """ + Calculates the number of days since the first date + in the 'time' list and store in new list: + This is useful when the units are 'months since', + which cannot be transformed to dates using num2date + + Parameter + --------- + time: list + Original time + units: str + CF compliant time units + calendar: str + Original calendar + + Returns + ------- + time: list + CF compliant time + """ + + start_date_str = time.units.split('since')[1].lstrip() + start_date = datetime.datetime(int(start_date_str[0:4]), + int(start_date_str[5:7]), + int(start_date_str[8:10])) + new_time = [] + + for current_date in time: + + # Transform current_date into number of days since base date + current_date = num2date(current_date, self.__parse_time_unit(units), calendar=calendar) + + # Calculate number of days between base date and the other dates + n_days = (current_date - start_date).days + + # Store in list + new_time.append(n_days) + + return new_time + + def __parse_time(self, time): + """ + Parses the time to be CF compliant + + Parameter + --------- + time: str + Original time + + Returns + ------- + time : str + CF compliant time + """ + + units = time.units + if not hasattr(time, 'calendar'): + calendar = 'standard' + else: + calendar = time.calendar + + if 'months since' in time.units: + units = 'days since ' + time.units.split('since')[1].lstrip() + time = self.__get_dates_from_months(time, units, calendar) + + return time, units, calendar + @staticmethod def __parse_time_unit(t_units): """ @@ -662,14 +811,16 @@ class Nes(object): Returns ------- t_units : str - CF compliant time units. + CF compliant time units """ + if 'h @' in t_units: t_units = 'hour since {0}-{1}-{2} {3}:{4}:{5} UTC'.format( t_units[4:8], t_units[8:10], t_units[10:12], t_units[13:15], t_units[15:17], t_units[17:-4]) + return t_units - def __get_time(self, create_nes=False): + def __get_time(self, create_nes=False, **kwargs): """ Get the NetCDF time values @@ -678,22 +829,28 @@ class Nes(object): time : list List of times (datetime) of the NetCDF data """ + if self.is_xarray: time = self.variables['time'] else: if self.master: if create_nes: - units = "days since 1996-12-31 00:00:00" - calendar = "gregorian" - time = num2date([0.], units=units, calendar=calendar) + if 'time' in kwargs: + time = kwargs['time'] + else: + units = 'days since 1996-12-31 00:00:00' + calendar = 'standard' + time = num2date([0.], units=units, calendar=calendar) else: nc_var = self.netcdf.variables['time'] - time = num2date(nc_var[:], self.__parse_time_unit(nc_var.units), calendar=nc_var.calendar) + nc_var, units, calendar = self.__parse_time(nc_var) + time = num2date(nc_var[:], self.__parse_time_unit(units), calendar=calendar) time = [aux.replace(second=0, microsecond=0) for aux in time] else: time = None time = self.comm.bcast(time, root=0) self.free_vars('time') + return time def __get_time_bnds(self, create_nes=False): @@ -705,6 +862,7 @@ class Nes(object): time : list List of time bounds (datetime) of the NetCDF data """ + if self.is_xarray: time_bnds = self.variables['time_bnds'] else: @@ -742,19 +900,25 @@ class Nes(object): nc_var : dict Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. """ + if isinstance(possible_names, str): possible_names = [possible_names] - dimension_name = set(possible_names).intersection(set(self.variables.keys())).pop() - - if self.is_xarray: - nc_var = self.dataset[dimension_name] - else: - nc_var = self.variables[dimension_name].copy() - nc_var['data'] = self.netcdf.variables[dimension_name][:] - if nc_var['units'] in ['unitless', '-']: - nc_var['units'] = '' - - self.free_vars(dimension_name) + + try: + dimension_name = set(possible_names).intersection(set(self.variables.keys())).pop() + if self.is_xarray: + nc_var = self.dataset[dimension_name] + else: + nc_var = self.variables[dimension_name].copy() + nc_var['data'] = self.netcdf.variables[dimension_name][:] + if hasattr(nc_var, 'units'): + if nc_var['units'] in ['unitless', '-']: + nc_var['units'] = '' + self.free_vars(dimension_name) + except KeyError: + nc_var = {'data': np.array([0]), + 'units': '' + } return nc_var @@ -773,10 +937,12 @@ class Nes(object): values : dict Dictionary with the portion of data corresponding to the rank """ + values = deepcopy(coordinate_info) if isinstance(coordinate_info, list): values = {'data': deepcopy(coordinate_info)} coordinate_len = len(values['data'].shape) + if coordinate_axis == 'Y': if coordinate_len == 1: values['data'] = values['data'][self.read_axis_limits['y_min']:self.read_axis_limits['y_max']] @@ -799,9 +965,10 @@ class Nes(object): else: raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( dim=values['data'].shape)) + return values - def __get_lazy_variables(self): + def _get_lazy_variables(self): """ Get all the variables information. @@ -816,6 +983,7 @@ class Nes(object): 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, ...} """ + if self.is_xarray: variables = self.dataset.variables else: @@ -825,6 +993,7 @@ class Nes(object): variables[var_name] = {} variables[var_name]['data'] = None variables[var_name]['dimensions'] = var_info.dimensions + for attrname in var_info.ncattrs(): # Avoiding some attributes if attrname not in ['missing_value', '_FillValue']: @@ -838,7 +1007,7 @@ class Nes(object): return variables - def __read_variable(self, var_name): + def _read_variable(self, var_name): """ Read the corresponding variable data according to the current rank. @@ -852,8 +1021,11 @@ class Nes(object): data: np.array Portion of the variable data corresponding to the rank. """ + nc_var = self.netcdf.variables[var_name] var_dims = nc_var.dimensions + + # Read data in 4 dimensions if len(var_dims) < 2: data = nc_var[:] elif len(var_dims) == 2: @@ -879,12 +1051,12 @@ class Nes(object): else: raise NotImplementedError('Error with {0}. Only can be read netCDF with 4 dimensions or less'.format( var_name)) - # Missing to nan try: data[data.mask == True] = np.nan - except (AttributeError, MaskError): + except (AttributeError, MaskError, ValueError): pass + return data def load(self, var_list=None): @@ -898,6 +1070,7 @@ class Nes(object): var_list : list, str List (or single string) of the variables to be loaded """ + if self.netcdf is None: self.__open_dataset() close = True @@ -911,13 +1084,14 @@ class Nes(object): if self.print_info: print("Rank {0:03d}: Loading {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(var_list))) if self.variables[var_name]['data'] is None: - self.variables[var_name]['data'] = self.__read_variable(var_name) + self.variables[var_name]['data'] = self._read_variable(var_name) if self.print_info: print("Rank {0:03d}: Loaded {1} var ({2})".format( self.rank, var_name, self.variables[var_name]['data'].shape)) if close: self.close() + return None def __get_global_attributes(self, create_nes=False): @@ -929,6 +1103,7 @@ class Nes(object): gl_attrs : dict Dictionary with the netCDF global attributes """ + gl_attrs = {} if self.is_xarray: gl_attrs = self.dataset.attrs @@ -936,6 +1111,7 @@ class Nes(object): if not create_nes: for attrname in self.netcdf.ncattrs(): gl_attrs[attrname] = getattr(self.netcdf, attrname) + return gl_attrs # ================================================================================================================== @@ -952,6 +1128,7 @@ class Nes(object): Dictionary with the 4D limits of the rank data to write. t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max """ + axis_limits = {'x_min': None, 'x_max': None, 'y_min': None, 'y_max': None, 'z_min': None, 'z_max': None, @@ -972,10 +1149,10 @@ class Nes(object): axis_limits['t_min'] = ((t_len // self.size) * self.rank) if self.rank + 1 < self.size: axis_limits['t_max'] = (t_len // self.size) * (self.rank + 1) - else: raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( meth=self.parallel_method, accept=['X', 'Y', 'T'])) + return axis_limits def _create_dimensions(self, netcdf): @@ -987,22 +1164,26 @@ class Nes(object): netcdf : Dataset netcdf4-python opened Dataset """ + netcdf.createDimension('time', None) - netcdf.createDimension('lev', len(self.lev['data'])) if self._time_bnds is not None: netcdf.createDimension('time_nv', 2) + netcdf.createDimension('lev', len(self.lev['data'])) + netcdf.createDimension('lon', len(self._lon['data'])) + netcdf.createDimension('lat', len(self._lat['data'])) return None def _create_dimension_variables(self, netcdf): """ - Create the 'lev', 'time', 'lat' and 'lon' variables. + Create the 'lev' and 'time' variables. Parameters ---------- netcdf : Dataset netcdf4-python opened Dataset """ + # TIMES time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) time_var.units = 'hours since {0}'.format( @@ -1016,7 +1197,7 @@ class Nes(object): time_var.set_collective(True) time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): self.get_time_id(self.hours_end, first=False)], - time_var.units, calendar='standard') + time_var.units, time_var.calendar) # TIME BOUNDS if self._time_bnds is not None: @@ -1056,7 +1237,7 @@ class Nes(object): return None - def __create_variables(self, netcdf, chunking=False): + def _create_variables(self, netcdf, chunking=False): """ Create the netCDF file variables @@ -1067,6 +1248,7 @@ class Nes(object): chunking : bool Indicates if you want to chunk the output netCDF """ + if self.variables is not None: for i, (var_name, var_dict) in enumerate(self.variables.items()): if var_dict['data'] is not None: @@ -1095,7 +1277,6 @@ class Nes(object): for att_name, att_value in var_dict.items(): if att_name == 'data': - try: var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], self.write_axis_limits['z_min']:self.write_axis_limits['z_max'], @@ -1141,6 +1322,12 @@ class Nes(object): """ return None + def _create_metadata(self, netcdf): + """ + Must be implemented on inner class + """ + return None + def _set_crs(self, netcdf): """ Must be implemented on inner class @@ -1175,6 +1362,7 @@ class Nes(object): chunking: bool Indicates if you want to chunk the output netCDF """ + # Open NetCDF if self.print_info: print("Rank {0:03d}: Creating {1}".format(self.rank, path)) @@ -1192,10 +1380,10 @@ class Nes(object): print("Rank {0:03d}: Dimensions done".format(self.rank)) # Create variables - self.__create_variables(netcdf, chunking=chunking) + self._create_variables(netcdf, chunking=chunking) # Create metadata - self._set_crs(netcdf) + self._create_metadata(netcdf) # Close NetCDF if self.global_attrs is not None: @@ -1203,7 +1391,6 @@ class Nes(object): netcdf.setncattr(att_name, att_value) netcdf.setncattr('Conventions', 'CF-1.7') - self.comm.Barrier() netcdf.close() return None @@ -1225,6 +1412,7 @@ class Nes(object): chunking : bool Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False """ + old_info = self.print_info self.print_info = info @@ -1234,12 +1422,13 @@ class Nes(object): else: # if serial: if serial and self.size > 1: - data = self.__gather_data() + data = self._gather_data() if self.master: new_nc = self.copy(copy_vars=False) new_nc.set_communicator(MPI.COMM_SELF) new_nc.variables = data new_nc.__to_netcdf_py(path) + else: self.__to_netcdf_py(path, chunking=chunking) @@ -1247,7 +1436,7 @@ class Nes(object): return None - def __gather_data(self): + def _gather_data(self): """ Gather all the variable data into the MPI rank 0 to perform a serial write. @@ -1256,6 +1445,7 @@ class Nes(object): data_list: dict Variables dictionary with all the data from all the ranks. """ + data_list = deepcopy(self.variables) for var_name in data_list.keys(): try: diff --git a/nes/nc_projections/latlon_nes.py b/nes/nc_projections/latlon_nes.py index 1fda96ea3615dbcebe614859e7dd779e3b9b5fed..8a865ccfe265f216657b729a2b75eb696bbaa29a 100644 --- a/nes/nc_projections/latlon_nes.py +++ b/nes/nc_projections/latlon_nes.py @@ -1,7 +1,8 @@ #!/usr/bin/env python -from nes.nc_projections.default_nes import Nes import numpy as np +from nes.nc_projections.default_nes import Nes + class LatLonNes(Nes): """ @@ -38,12 +39,13 @@ class LatLonNes(Nes): (Not working) Indicates if you want to use xarray as default parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['Y', 'T'] + accepted values: ['X', 'Y', 'T'] avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(LatLonNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -51,30 +53,19 @@ class LatLonNes(Nes): **kwargs) if create_nes: - self._lat, self._lon = self._create_centroids(**kwargs) + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + # Set axis limits for parallel writing + self.write_axis_limits = self.set_write_axis_limits() + self._var_dim = ('lat', 'lon') self._lat_dim = ('lat',) self._lon_dim = ('lon',) self.free_vars('crs') - def _create_dimensions(self, netcdf): - """ - Create the 'lat', 'lon' dimensions and the super dimensions ('lev', 'time'). - - Parameters - ---------- - netcdf : Dataset - NetCDF object. - """ - super(LatLonNes, self)._create_dimensions(netcdf) - - netcdf.createDimension('lon', len(self._lon['data'])) - netcdf.createDimension('lat', len(self._lat['data'])) - - return None - def _create_centroids(self, **kwargs): """ Calculate center latitudes and longitudes from grid details. @@ -84,6 +75,7 @@ class LatLonNes(Nes): netcdf : Dataset NetCDF object. """ + # Calculate center latitudes lat_c_orig = kwargs['lat_orig'] + (kwargs['inc_lat'] / 2) self.center_lats = np.linspace( @@ -115,6 +107,7 @@ class LatLonNes(Nes): var : Variable netCDF4-python variable object. """ + var.grid_mapping = 'crs' return None @@ -136,3 +129,18 @@ class LatLonNes(Nes): mapping.inverse_flattening = 0 return None + + def _create_metadata(self, netcdf): + """ + Create metadata variables + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Set crs + self._set_crs(netcdf) + + return None \ No newline at end of file diff --git a/nes/nc_projections/points_nes.py b/nes/nc_projections/points_nes.py index 316d0db67a03661982518134a3b9559c5e172e74..98252c4a62d053024dc9f8f6684db33eed8aa0a7 100644 --- a/nes/nc_projections/points_nes.py +++ b/nes/nc_projections/points_nes.py @@ -1,7 +1,15 @@ #!/usr/bin/env python +import sys +import warnings +import numpy as np +import pandas as pd +from copy import deepcopy +from netCDF4 import Dataset, date2num, stringtochar +from numpy.ma.core import MaskError from nes.nc_projections.default_nes import Nes + class PointsNes(Nes): """ @@ -17,7 +25,7 @@ class PointsNes(Nes): Tuple with the name of the dimensions of the Longitude values. ('lon',) for a regular latitude-longitude projection. """ - def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, **kwargs): """ @@ -37,12 +45,13 @@ class PointsNes(Nes): (Not working) Indicates if you want to use xarray as default parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['Y', 'T'] + accepted values: ['X'] avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(PointsNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, @@ -50,28 +59,436 @@ class PointsNes(Nes): **kwargs) if create_nes: - self._lat, self._lon = self._create_centroids(**kwargs) + # Complete dimensions + self._station = {'data': np.arange(len(self._lon['data']))} + + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'X') + self.lon = self._get_coordinate_values(self._lon, 'X') + self.station = deepcopy(self._station) + else: + self._station = self._get_coordinate_dimension(['station']) + self.station = self._get_coordinate_values(self._station, 'X') + self.strlen = self._get_strlen() - self._var_dim = ('lat', 'lon') - self._lat_dim = ('lat',) - self._lon_dim = ('lon',) + # Set axis limits for parallel writing + self.write_axis_limits = self.set_write_axis_limits() + + self._var_dim = ('station',) + self._lat_dim = ('station',) + self._lon_dim = ('station',) def _create_dimensions(self, netcdf): """ - Create the 'lat', 'lon' dimensions and the super dimensions ('lev', 'time'). + Create the 'lev', 'time_nv', 'station' dimensions. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + netcdf.createDimension('time', None) + if self._time_bnds is not None: + netcdf.createDimension('time_nv', 2) + + # The number of longitudes is equal to the number of stations + netcdf.createDimension('station', len(self._lon['data'])) + + if hasattr(self, 'strlen'): + if self.strlen is not None: + netcdf.createDimension('strlen', self.strlen) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'time', 'time_bnds' and 'station' variables. Parameters ---------- netcdf : Dataset NetCDF object. """ - super(PointsNes, self)._create_dimensions(netcdf) - netcdf.createDimension('lon', len(self._lon['data'])) - netcdf.createDimension('lat', len(self._lat['data'])) + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + time_var.units = 'hours since {0}'.format( + self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) + time_var.standard_name = "time" + time_var.calendar = 'standard' + time_var.long_name = "time" + if self._time_bnds is not None: + time_var.bounds = 'time_bnds' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + # TIME BOUNDS + if self._time_bnds is not None: + time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, + complevel=self.zip_lvl) + if self.size > 1: + time_bnds_var.set_collective(True) + time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') + + # STATIONS + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + stations.units = "" + stations.axis = "X" + stations.long_name = "" + stations.standard_name = "station" + if self.size > 1: + stations.set_collective(True) + stations[:] = self._station['data'] return None + def _get_coordinate_dimension(self, possible_names): + """ + Read the coordinate dimension data. + + This will read the complete data of the coordinate + + Parameters + ---------- + possible_names: list, str + List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) + + Returns + ------- + nc_var : dict + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + """ + + nc_var = super(PointsNes, self)._get_coordinate_dimension(possible_names) + + if isinstance(possible_names, str): + possible_names = [possible_names] + + if 'station' in possible_names: + nc_var['data'] = np.arange(len(self._lon['data'])) + + return nc_var + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X'] + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + + return values + + def _get_lazy_variables_not_used(self): + """ + Get all the variables information. + + Returns + ------- + variables : dict + Dictionary with the variable name as key and another dictionary as value. + De value dictionary will have the 'data' key with None as value and all the variable attributes as the + other keys. + e.g. + {'var_name_1': {'data': None, 'attr_1': value_1_1, 'attr_2': value_1_2, ...}, + 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, + ...} + """ + + if self.is_xarray: + variables = self.dataset.variables + else: + if self.master: + variables = {} + for var_name, var_info in self.netcdf.variables.items(): + variables[var_name] = {} + variables[var_name]['data'] = None + # Remove strlen as a dimension + if 'strlen' in var_info.dimensions: + variables[var_name]['dimensions'] = tuple([dim for dim in var_info.dimensions + if dim != 'strlen']) + else: + variables[var_name]['dimensions'] = var_info.dimensions + + for attrname in var_info.ncattrs(): + # Avoiding some attributes + if attrname not in ['missing_value', '_FillValue']: + value = getattr(var_info, attrname) + if value in ['unitless', '-']: + value = '' + variables[var_name][attrname] = value + else: + variables = None + variables = self.comm.bcast(variables, root=0) + + return variables + + def _get_strlen(self): + """ + Read the string length dimension of some variables. + + Returns + ------- + strlen: np.int + String length. + """ + + if 'strlen' in self.netcdf.dimensions: + strlen = self.netcdf.dimensions['strlen'].size + else: + strlen = None + + return strlen + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + """ + # Remove strlen (maximum number of characters that a string can have) from dimensions and join characters + if 'strlen' in nc_var.dimensions: + nc_var = np.array([''.join(i) for i in np.char.decode(nc_var[:].data)]) + var_dims = tuple([', '.join(dim for dim in var_dims if dim != 'strlen')]) + """ + + # Read data in 1 or 2 dimensions + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + if 'strlen' in nc_var.dimensions: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], :] + else: + data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 2 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + chunking : bool + Indicates if you want to chunk the output netCDF + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + + # Set dimensions + if 'dimensions' in var_dict.keys(): + var_dims = var_dict['dimensions'] + else: + if len(var_dict['data'].shape) == 1: + # For data that depends only on station (e.g. station_code) + var_dims = self._var_dim + else: + # For data that is dependent on time and station (e.g. PM10) + var_dims = ('time',) + self._var_dim + + # Add strlen as a dimension if needed + if var_dict['dtype'] == np.str: + var_dims += ('strlen',) + + # Ensure data is of type numpy array (to create NES) + if not isinstance(var_dict['data'], (np.ndarray, np.generic)): + try: + var_dict['data'] = var_dict['data'].to_numpy() + except AttributeError: + raise AttributeError("Data for variable {0} must be a numpy array (np.ndarray or np.generic)".var_name) + + # Set dtype + if 'dtype' in var_dict.keys(): + var_dtype = var_dict['dtype'] + if var_dtype != var_dict['data'].dtype: + msg = 'WARNING!!! ' + msg += 'Different data types for variable {0}'.format(var_name) + msg += 'Input dtype={0}, data dtype={1}'.format(var_dtype, + var_dict['data'].dtype) + warnings.warn(msg) + try: + var_dict['data'] = var_dict['data'].astype(var_dtype) + except Exception as e: # TODO: Detect exception + raise e('It was not possible to cast the data to the input dtype.') + else: + var_dtype = var_dict['data'].dtype + + # Transform objects into strings (e.g. for ESDAC iwahashi landform in GHOST) + if var_dtype == np.dtype(object): + var_dtype = np.dtype(str) + + if self.print_info: + print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + + try: + if not chunking: + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + # TODO: Change chunk size (add strlen) as tuple + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl, + chunksizes=chunk_size) + + if self.print_info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.print_info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if len(var_dict['data'].shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(var_dict['data'].shape) == 2: + if 'strlen' in var_dict['dimensions']: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + if self.print_info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions', 'dtype']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.print_info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + + data_list = deepcopy(self.variables) + for var_name, var_info in data_list.items(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + if self.parallel_method == 'X': + if shp_len == 1: + # concatenate over first axis (station) when dims are (station) + axis = 0 + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # concatenate over first axis (station) when dims are (station, strlen) + axis = 0 + else: + # concatenate over second axis (station) when dims are (time, station) + axis = 1 + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + def _create_centroids(self, **kwargs): """ Calculate center latitudes and longitudes from points. @@ -81,10 +498,74 @@ class PointsNes(Nes): netcdf : Dataset NetCDF object. """ + # Calculate center latitudes self.center_lats = kwargs['lat'] # Calculate center longitudes self.center_lons = kwargs['lon'] - return {'data': self.center_lats}, {'data': self.center_lons} \ No newline at end of file + return {'data': self.center_lats}, {'data': self.center_lons} + + def _create_metadata(self, netcdf): + """ + Create metadata variables + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # LATITUDES + lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if self.size > 1: + lats.set_collective(True) + lats[:] = self._lat['data'] + + # LONGITUDES + lons = netcdf.createVariable('lon', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if self.size > 1: + lons.set_collective(True) + lons[:] = self._lon['data'] + + return None + + def set_read_axis_limits(self): + """ + Calculate the 4D reading axis limits + + Returns + ------- + dict + Dictionary with the 2D limits of the rank data to read. + t_min, t_max, x_min and x_max + """ + + axis_limits = {'x_min': None, 'x_max': None, + 't_min': None, 't_max': None} + + if self.parallel_method == 'X': + x_len = self._lon['data'].shape[-1] + if x_len < self.size: + raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format(self.size, x_len)) + axis_limits['x_min'] = (x_len // self.size) * self.rank + if self.rank + 1 < self.size: + axis_limits['x_max'] = (x_len // self.size) * (self.rank + 1) + # Spin up + axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) + axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + else: + raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X'])) + + return axis_limits diff --git a/nes/nc_projections/points_nes_ghost.py b/nes/nc_projections/points_nes_ghost.py new file mode 100644 index 0000000000000000000000000000000000000000..e8aad07f9d2af92cb52e6be89f65c103d61d4cf9 --- /dev/null +++ b/nes/nc_projections/points_nes_ghost.py @@ -0,0 +1,342 @@ +#!/usr/bin/env python + +import sys +import warnings +import numpy as np +from numpy.ma.core import MaskError +from copy import deepcopy +from nes.nc_projections.default_nes import Nes +from nes.nc_projections.points_nes import PointsNes + + +class PointsNesGHOST(PointsNes): + """ + + Attributes + ---------- + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('lat', 'lon') for a regular latitude-longitude projection. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('lat',) for a regular latitude-longitude projection. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('lon',) for a regular latitude-longitude projection. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + **kwargs): + """ + Initialize the PointsNes class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['Y', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + + super(PointsNesGHOST, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + **kwargs) + + self._flag = self._get_coordinate_dimension(['flag']) + self.flag = self._get_coordinate_values(self._flag, 'X') + + self._qa = self._get_coordinate_dimension(['qa']) + self.qa = self._get_coordinate_values(self._qa, 'X') + + def _create_dimensions(self, netcdf): + """ + Create the 'N_flag_codes' and 'N_qa_codes' dimensions and the super dimensions ('time', 'station'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesGHOST, self)._create_dimensions(netcdf) + + netcdf.createDimension('N_flag_codes', self._flag['data'].shape[2]) + netcdf.createDimension('N_qa_codes', self._qa['data'].shape[2]) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'station' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesGHOST, self)._create_dimension_variables(netcdf) + + # N FLAG CODES + flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + flag.units = "" + flag.axis = "" + flag.long_name = "" + flag.standard_name = "flag" + if self.size > 1: + flag.set_collective(True) + flag[:] = self._flag['data'] + + # N QA CODES + qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + qa.units = "" + qa.axis = "" + qa.long_name = "" + qa.standard_name = "N_qa_codes" + if self.size > 1: + qa.set_collective(True) + qa[:] = self._qa['data'] + + self.free_vars(('flag', 'qa')) + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X'] + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif coordinate_len == 3: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], :] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + + return values + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Remove strlen (maximum number of characters that a string can have) from dimensions and join characters + if 'strlen' in nc_var.dimensions: + nc_var = np.array([''.join(i) for i in np.char.decode(nc_var[:].data)]) + var_dims = tuple([', '.join(dim for dim in var_dims if dim != 'strlen')]) + + # Read data in 1 or 2 dimensions + # TODO: Ask Dene why x, t instead of t, x + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif len(var_dims) == 3: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + :] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 3 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + chunking : bool + Indicates if you want to chunk the output netCDF + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + + # Define dimensions depending on the type of variable + if len(var_dict['data'].shape) == 1: + # Metadata + var_dims = self._var_dim + elif len(var_dict['data'].shape) == 2: + # Different from metadata (e.g. concentrations of pm10) + var_dims = self._var_dim + ('time',) + else: + # Flags and qa variables + if var_name == 'flag': + var_dims = self._var_dim + ('time', 'N_flag_codes',) + elif var_name == 'qa': + var_dims = self._var_dim + ('time', 'N_qa_codes',) + + # ESDAC iwahashi landform and other vars are given as objects, transform to strings + if var_dict['data'].dtype == np.dtype(object): + var_dtype = np.dtype(str) + else: + var_dtype = var_dict['data'].dtype + + if self.print_info: + print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) + + try: + if not chunking: + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl, chunksizes=chunk_size) + + if self.print_info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.print_info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if len(var_dict['data'].shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(var_dict['data'].shape) == 2: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + elif len(var_dict['data'].shape) == 3: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + + if self.print_info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.print_info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + + data_list = deepcopy(self.variables) + for var_name in data_list.keys(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + if self.parallel_method == 'X': + axis = 0 + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list \ No newline at end of file diff --git a/nes/nc_projections/rotated_nes.py b/nes/nc_projections/rotated_nes.py index fa5b921d5723dc24c8f1a3f6b723ba3b91e24e97..3a14b485a370b8bc25fcd97e35432b8efe8707ec 100644 --- a/nes/nc_projections/rotated_nes.py +++ b/nes/nc_projections/rotated_nes.py @@ -1,8 +1,10 @@ #!/usr/bin/env python -from nes.nc_projections.default_nes import Nes -from cfunits import Units import numpy as np +import math +from cfunits import Units +from nes.nc_projections.default_nes import Nes + class RotatedNes(Nes): """ @@ -50,12 +52,13 @@ class RotatedNes(Nes): (Not working) Indicates if you want to use xarray as default parallel_method : str Indicates the parallelization method that you want. Default over Y axis - accepted values: ['Y', 'T'] + accepted values: ['X', 'Y', 'T'] avoid_first_hours : int Number of hours to remove from first time steps. avoid_last_hours : int Number of hours to remove from last time steps. """ + super(RotatedNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, @@ -64,17 +67,24 @@ class RotatedNes(Nes): **kwargs) if create_nes: - self._rlat, self._rlon = self._create_centroids(**kwargs) - + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') else: + # Complete dimensions self._rlat = self._get_coordinate_dimension('rlat') self._rlon = self._get_coordinate_dimension('rlon') - self.rlat = self._get_coordinate_values(self._rlat, 'Y') - self.rlon = self._get_coordinate_values(self._rlon, 'X') + # Dimensions screening + self.rlat = self._get_coordinate_values(self._rlat, 'Y') + self.rlon = self._get_coordinate_values(self._rlon, 'X') + # Get projection details self.projection_data = self.get_projection_data(create_nes, **kwargs) + # Set axis limits for parallel writing + self.write_axis_limits = self.set_write_axis_limits() + self._var_dim = ('rlat', 'rlon') self._lat_dim = ('rlat', 'rlon') self._lon_dim = ('rlat', 'rlon') @@ -88,6 +98,7 @@ class RotatedNes(Nes): projection : dict Dictionary with the projection data """ + if create_nes: projection = {'data': None, 'dimensions': (), @@ -111,6 +122,7 @@ class RotatedNes(Nes): netcdf : Dataset NetCDF object. """ + super(RotatedNes, self)._create_dimensions(netcdf) netcdf.createDimension('rlon', len(self._rlon['data'])) @@ -130,6 +142,7 @@ class RotatedNes(Nes): super(RotatedNes, self)._create_dimension_variables(netcdf) + # ROTATED LATITUDES rlat = netcdf.createVariable('rlat', self._rlat['data'].dtype, ('rlat',)) rlat.long_name = "latitude in rotated pole grid" rlat.units = Units("degrees", formatted=True).units @@ -138,7 +151,7 @@ class RotatedNes(Nes): rlat.set_collective(True) rlat[:] = self._rlat['data'] - # Rotated Longitude + # ROTATED LONGITUDES rlon = netcdf.createVariable('rlon', self._rlon['data'].dtype, ('rlon',)) rlon.long_name = "longitude in rotated pole grid" rlon.units = Units("degrees", formatted=True).units @@ -149,6 +162,75 @@ class RotatedNes(Nes): return None + def _create_rotated_coordinates(self, **kwargs): + """ + Calculate rotated latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Calculate rotated latitudes + self.n_lat = int((abs(kwargs['south_boundary']) / kwargs['inc_rlat']) * 2 + 1) + self.rotated_lats = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + + (kwargs['inc_rlat'] * (self.n_lat - 1)), self.n_lat) + # Calculate rotated longitudes + self.n_lon = int((abs(kwargs['west_boundary']) / kwargs['inc_rlon']) * 2 + 1) + self.rotated_lons = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + + (kwargs['inc_rlon'] * (self.n_lon - 1)), self.n_lon) + + return {'data': self.rotated_lats}, {'data': self.rotated_lons} + + def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180, **kwargs): + """ + Calculate the unrotated coordinates using the rotated ones. + + :param lon_deg: Rotated longitude coordinate. + :type lon_deg: numpy.array + + :param lat_deg: Rotated latitude coordinate. + :type lat_deg: numpy.array + + :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360) + :type lon_min: float + + :return: Unrotated coordinates. Longitudes, Latitudes + :rtype: tuple(numpy.array, numpy.array) + """ + + degrees_to_radians = math.pi / 180. + + tph0 = kwargs['centre_lat'] * degrees_to_radians + tlm = lon_deg * degrees_to_radians + tph = lat_deg * degrees_to_radians + tlm0d = -180 + kwargs['centre_lon'] + ctph0 = np.cos(tph0) + stph0 = np.sin(tph0) + + stlm = np.sin(tlm) + ctlm = np.cos(tlm) + stph = np.sin(tph) + ctph = np.cos(tph) + + # Latitudes + sph = (ctph0 * stph) + (stph0 * ctph * ctlm) + sph[sph > 1.] = 1. + sph[sph < -1.] = -1. + aph = np.arcsin(sph) + aphd = aph / degrees_to_radians + + # Longitudes + anum = ctph * stlm + denom = (ctlm * ctph - stph0 * sph) / ctph0 + relm = np.arctan2(anum, denom) - math.pi + almd = relm / degrees_to_radians + tlm0d + almd[almd > (lon_min + 360)] -= 360 + almd[almd < lon_min] += 360 + + return almd, aphd + def _create_centroids(self, **kwargs): """ Calculate center latitudes and longitudes from grid details. @@ -158,13 +240,14 @@ class RotatedNes(Nes): netcdf : Dataset NetCDF object. """ - # Calculate center latitudes - self.center_lats = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + - (kwargs['inc_rlat'] * (kwargs['n_lat'] - 1)), kwargs['n_lat']) - # Calculate center longitudes - self.center_lons = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + - (kwargs['inc_rlon'] * (kwargs['n_lon'] - 1)), kwargs['n_lon']) + # Complete dimensions + self._rlat, self._rlon = self._create_rotated_coordinates(**kwargs) + + # Calculate center latitudes and longitudes (1D to 2D) + self.center_lons, self.center_lats = self.rotated2latlon(np.array([self.rotated_lons] * len(self.rotated_lats)), + np.array([self.rotated_lats] * len(self.rotated_lons)).T, + **kwargs) return {'data': self.center_lats}, {'data': self.center_lons} diff --git a/tests/basic_nes_tests.py b/tests/1-nes_tests_by_size.py similarity index 100% rename from tests/basic_nes_tests.py rename to tests/1-nes_tests_by_size.py diff --git a/tests/2-nes_tests_by_projection.py b/tests/2-nes_tests_by_projection.py new file mode 100644 index 0000000000000000000000000000000000000000..c23c9bbab30eef06ff4e1a7959e559f330eb1861 --- /dev/null +++ b/tests/2-nes_tests_by_projection.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +import sys +import timeit +import pandas as pd +from mpi4py import MPI +from nes import * + +paths = {'regular_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc', + 'projection': 'regular', + 'variables': ['O3'], + 'parallel_methods': ['X', 'Y', 'T']}, + 'rotated_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc', + 'projection': 'rotated', + 'variables': ['O3_all'], + 'parallel_methods': ['X', 'Y', 'T']}, + 'points_file': {'path': '/esarchive/obs/eea/eionet/hourly/pm10/pm10_202107.nc', + 'projection': 'points', + 'variables': [], + 'parallel_methods': ['X']}, + 'points_ghost_file': {'path': '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc', + 'projection': 'points_ghost', + 'variables': [], + 'parallel_methods': ['X']}} + +results = [] + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +size = comm.Get_size() + +for name, dict in paths.items(): + + path = dict['path'] + projection = dict['projection'] + variables = dict['variables'] + parallel_methods = dict['parallel_methods'] + + for parallel_method in parallel_methods: + + if rank == 0: + print('TEST TO USE {0} GRID IN {1} FOR {2} USING {3} NODES'.format(projection.upper(), + parallel_method, + path, + size)) + sys.stdout.flush() + + try: + + # Read + start_time = timeit.default_timer() + nessy_1 = open_netcdf(path=path, comm=comm, info=True, parallel_method=parallel_method) + open_time = timeit.default_timer() - start_time + + # Select variables and load + start_time = timeit.default_timer() + if len(variables) > 0: + nessy_1.keep_vars(variables) + nessy_1.load() + load_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in serial + if rank == 0: + print('WRITE IN SERIAL') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_serial.nc'.format(size, projection, parallel_method), + info=True, serial=True) + serial_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in parallel + if rank == 0: + print('WRITE IN PARALLEL') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_parallel.nc'.format(size, projection, parallel_method), + info=True) + parallel_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in chunks + if rank == 0: + print('WRITE IN CHUNKS') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_chunking.nc'.format(size, projection, parallel_method), + info=True, chunking=True) + chunking_time = timeit.default_timer() - start_time + comm.Barrier() + + # Close everything + del nessy_1 + + if rank == 0: + print('Test was successful for {0} projection in {1}'.format(projection, parallel_method)) + sys.stdout.flush() + + # End timer and save results + results.append({'Projection': projection, + 'Method': parallel_method, + 'Open': '{min:02d}:{sec:02.3f}'.format(min=int(open_time // 60), + sec=open_time - (int(open_time // 60) * 60)), + 'Load': '{min:02d}:{sec:02.3f}'.format(min=int(load_time // 60), + sec=load_time - (int(load_time // 60) * 60)), + 'Serial': '{min:02d}:{sec:02.3f}'.format(min=int(serial_time // 60), + sec=serial_time - (int(serial_time // 60) * 60)), + 'Chunking': '{min:02d}:{sec:02.3f}'.format(min=int(chunking_time // 60), + sec=chunking_time - (int(chunking_time // 60) * 60)), + 'Parallel': '{min:02d}:{sec:02.3f}'.format(min=int(parallel_time // 60), + sec=parallel_time - (int(parallel_time // 60) * 60)) + }) + + comm.Barrier() + + except Exception as e: + print(e) + + sys.stdout.flush() + +comm.Barrier() + +if rank == 0: + table = pd.DataFrame(results) + print('RESULTS TABLE') + print(table) + table.to_csv('{0}_results.csv'.format(size)) + sys.stdout.flush() diff --git a/tests/scalability_test_nord3v2.bash b/tests/scalability_test_nord3v2.bash new file mode 100644 index 0000000000000000000000000000000000000000..57758ebf130547995dab8246b0fcd9d412c1f0c9 --- /dev/null +++ b/tests/scalability_test_nord3v2.bash @@ -0,0 +1,18 @@ +#!/bin/bash + +EXPORTPATH="/esarchive/scratch/avilanova/software/NES" +SRCPATH="/esarchive/scratch/avilanova/software/NES/tests" +EXE="2-nes_tests_by_projection.py" + +module purge +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 + +for nprocs in 1 2 4 8 +do + JOB_ID=`sbatch --ntasks=${nprocs} --exclusive --job-name=nes_${nprocs} --output=./log_nord3v2_NES_${nprocs}_%J.out --error=./log_nord3v2_NES_${nprocs}_%J.err -D . --time=02:00:00 --wrap="export PYTHONPATH=${EXPORTPATH}:${PYTHONPATH}; cd ${SRCPATH}; mpirun --mca mpi_warn_on_fork 0 -np ${nprocs} python ${SRCPATH}/${EXE}"` +done \ No newline at end of file diff --git a/tests/test_bash_nord3v2.cmd b/tests/test_bash_nord3v2.cmd index 42c01f7bdf17d09b86301f73289e18eafc3133ee..3b570f96ccafe041944f901219d1004af823a073 100644 --- a/tests/test_bash_nord3v2.cmd +++ b/tests/test_bash_nord3v2.cmd @@ -3,8 +3,8 @@ ####SBATCH --qos=debug #SBATCH -A bsc32 #SBATCH --cpus-per-task=1 -#SBATCH -n 2 -#SBATCH -t 00:30:00 +#SBATCH -n 4 +#SBATCH -t 00:10:00 #SBATCH -J test_nes #SBATCH --output=log_nord3v2_NES_%j.out #SBATCH --error=log_nord3v2_NES_%j.err @@ -18,8 +18,10 @@ module load Python/3.7.4-GCCcore-8.3.0 module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 module load cfunits/1.8-foss-2019b-Python-3.7.4 module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 -export PYTHONPATH=/gpfs/scratch/bsc32/bsc32538/NES_tests/NES:${PYTHONPATH} -cd /gpfs/scratch/bsc32/bsc32538/NES_tests/NES/tests +export PYTHONPATH=/esarchive/scratch/avilanova/software/NES:${PYTHONPATH} +cd /esarchive/scratch/avilanova/software/NES/tests -mpirun --mca mpi_warn_on_fork 0 -np 2 python basic_nes_tests.py +mpirun --mca mpi_warn_on_fork 0 -np 4 python 2-nes_tests_by_projection.py