diff --git a/.codacy.yml b/.codacy.yml new file mode 100755 index 0000000000000000000000000000000000000000..0bc0d39b0f2175fbc21f61847dace53b65460c48 --- /dev/null +++ b/.codacy.yml @@ -0,0 +1,21 @@ +# codacy configuration file + +--- + +engines: + coverage: + enabled: true + metrics: + enabled: true + duplication: + enabled: true + prospector: + enabled: true + pylint: + enabled: true + python_version: 3 + +exclude_paths: [ + 'doc/**', + 'data/**', +] diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e5d46f5e2567228ab637f935c9d2dcdc3087f550 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +.idea +logs +tests/basic_nes_tests_alba.py +tests/test_bash_nord3v2-alba.cmd +notebooks/.ipynb_checkpoints +.ipynb_checkpoints +nes/__pycache__ +nes/nc_projections/__pycache__ +jupyter_notebooks \ No newline at end of file diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100755 index 0000000000000000000000000000000000000000..fa94e79884bd390fd4dbbeedaa7305a15bf35438 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,34 @@ +before_script: + - export GIT_SSL_NO_VERIFY=1 + - export PATH="$HOME/miniconda2/bin:$PATH" + +stages: + - prepare + - test + - report + - clean + +cache: + paths: + - test/report + +prepare: + stage: prepare + script: + - conda update conda + +test_python3: + stage: test + script: + - git submodule sync --recursive + - git submodule update --init --recursive + - conda env update -f environment.yml -n nes python=3.7 + - source activate nes + - python run_test.py + +clean: + stage: clean + script: + - conda clean --all --yes + + diff --git a/.prospector.yml b/.prospector.yml new file mode 100755 index 0000000000000000000000000000000000000000..b9c6fa952133de951c9005edb1b2f01d9bcb851b --- /dev/null +++ b/.prospector.yml @@ -0,0 +1,36 @@ +# prospector configuration file + +--- + +output-format: grouped + +strictness: veryhigh +doc-warnings: true +test-warnings: true +member-warnings: false + +pyroma: + run: true + +pydocroma: + run: true + +pep8: + disable: [ + E501, # Line-length, already controlled by pylint + ] + +pep257: + run: true + # see http://pep257.readthedocs.io/en/latest/error_codes.html + disable: [ + # For short descriptions it makes sense not to end with a period: + D400, # First line should end with a period + # Disable because not part of PEP257 official convention: + D203, # 1 blank line required before class docstring + D212, # Multi-line docstring summary should start at the first line + D213, # Multi-line docstring summary should start at the second line + D404, # First word of the docstring should not be This + D107, # We are using numpy style and constructor should be documented in class docstring + D105, # Docstring in magic methods should not be required: we all now what they are for + ] diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..16777801e873500ffb73768bfadaac2700d660ff --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,42 @@ +# NES CHANGELOG + +### 0.9.0 +* Release date: 2022/08/12 +* Changes and new features: + * First beta release + * Open: + * NetCDF: + * Regular Latitude-Longitude + * Rotated Lat-Lon + * Lambert Conformal Conic + * Mercator + * Points + * Points + * Points in GHOST format + * Parallelization: + * Balanced / Unbalanced + * By time axis + * By Y axis + * By X axis + * Create: + * NetCDF: + * Regular Latitude-Longitude + * Rotated Lat-Lon + * Lambert Conformal Conic + * Mercator + * Points + * Write: + * NetCDF + * Grib2 + * Interpolation: + * Vertical interpolation + * Horizontal interpolation + * Nearest Neighbours + * Statistics: + * Daily_mean + * Daily_max + * Daily_min + * Last time step + * Methods: + * Concatenate (variables of the same period in different files) + \ No newline at end of file diff --git a/Jupyter_notebooks/1-introduction.ipynb b/Jupyter_notebooks/1-introduction.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..615860d3bed82415dcc5a0269940034654a10415 --- /dev/null +++ b/Jupyter_notebooks/1-introduction.ipynb @@ -0,0 +1,501 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Introduction to NES - NetCDF for Earth Science" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Open NetCDF" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 175 ms, sys: 159 ms, total: 335 ms\n", + "Wall time: 15.7 s\n" + ] + } + ], + "source": [ + "cams_file = \"/gpfs/scratch/bsc32/bsc32538/a4mg/nmmb-monarch/ARCHIVE/000/2022050312/MONARCH_d01_2022050312.nc\"\n", + "%time nessy = open_netcdf(path=cams_file, info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Time\n", + "NES.time : list of time steps (datetime)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2022, 5, 3, 12, 0),\n", + " datetime.datetime(2022, 5, 3, 13, 0),\n", + " datetime.datetime(2022, 5, 3, 14, 0),\n", + " datetime.datetime(2022, 5, 3, 15, 0),\n", + " datetime.datetime(2022, 5, 3, 16, 0),\n", + " datetime.datetime(2022, 5, 3, 17, 0),\n", + " datetime.datetime(2022, 5, 3, 18, 0),\n", + " datetime.datetime(2022, 5, 3, 19, 0),\n", + " datetime.datetime(2022, 5, 3, 20, 0),\n", + " datetime.datetime(2022, 5, 3, 21, 0),\n", + " datetime.datetime(2022, 5, 3, 22, 0),\n", + " datetime.datetime(2022, 5, 3, 23, 0),\n", + " datetime.datetime(2022, 5, 4, 0, 0),\n", + " datetime.datetime(2022, 5, 4, 1, 0),\n", + " datetime.datetime(2022, 5, 4, 2, 0),\n", + " datetime.datetime(2022, 5, 4, 3, 0),\n", + " datetime.datetime(2022, 5, 4, 4, 0),\n", + " datetime.datetime(2022, 5, 4, 5, 0),\n", + " datetime.datetime(2022, 5, 4, 6, 0),\n", + " datetime.datetime(2022, 5, 4, 7, 0),\n", + " datetime.datetime(2022, 5, 4, 8, 0),\n", + " datetime.datetime(2022, 5, 4, 9, 0),\n", + " datetime.datetime(2022, 5, 4, 10, 0),\n", + " datetime.datetime(2022, 5, 4, 11, 0),\n", + " datetime.datetime(2022, 5, 4, 12, 0),\n", + " datetime.datetime(2022, 5, 4, 13, 0),\n", + " datetime.datetime(2022, 5, 4, 14, 0),\n", + " datetime.datetime(2022, 5, 4, 15, 0),\n", + " datetime.datetime(2022, 5, 4, 16, 0),\n", + " datetime.datetime(2022, 5, 4, 17, 0),\n", + " datetime.datetime(2022, 5, 4, 18, 0),\n", + " datetime.datetime(2022, 5, 4, 19, 0),\n", + " datetime.datetime(2022, 5, 4, 20, 0),\n", + " datetime.datetime(2022, 5, 4, 21, 0),\n", + " datetime.datetime(2022, 5, 4, 22, 0),\n", + " datetime.datetime(2022, 5, 4, 23, 0),\n", + " datetime.datetime(2022, 5, 5, 0, 0),\n", + " datetime.datetime(2022, 5, 5, 1, 0),\n", + " datetime.datetime(2022, 5, 5, 2, 0),\n", + " datetime.datetime(2022, 5, 5, 3, 0),\n", + " datetime.datetime(2022, 5, 5, 4, 0),\n", + " datetime.datetime(2022, 5, 5, 5, 0),\n", + " datetime.datetime(2022, 5, 5, 6, 0),\n", + " datetime.datetime(2022, 5, 5, 7, 0),\n", + " datetime.datetime(2022, 5, 5, 8, 0),\n", + " datetime.datetime(2022, 5, 5, 9, 0),\n", + " datetime.datetime(2022, 5, 5, 10, 0),\n", + " datetime.datetime(2022, 5, 5, 11, 0),\n", + " datetime.datetime(2022, 5, 5, 12, 0),\n", + " datetime.datetime(2022, 5, 5, 13, 0),\n", + " datetime.datetime(2022, 5, 5, 14, 0),\n", + " datetime.datetime(2022, 5, 5, 15, 0),\n", + " datetime.datetime(2022, 5, 5, 16, 0),\n", + " datetime.datetime(2022, 5, 5, 17, 0),\n", + " datetime.datetime(2022, 5, 5, 18, 0),\n", + " datetime.datetime(2022, 5, 5, 19, 0),\n", + " datetime.datetime(2022, 5, 5, 20, 0),\n", + " datetime.datetime(2022, 5, 5, 21, 0),\n", + " datetime.datetime(2022, 5, 5, 22, 0),\n", + " datetime.datetime(2022, 5, 5, 23, 0),\n", + " datetime.datetime(2022, 5, 6, 0, 0),\n", + " datetime.datetime(2022, 5, 6, 1, 0),\n", + " datetime.datetime(2022, 5, 6, 2, 0),\n", + " datetime.datetime(2022, 5, 6, 3, 0),\n", + " datetime.datetime(2022, 5, 6, 4, 0),\n", + " datetime.datetime(2022, 5, 6, 5, 0),\n", + " datetime.datetime(2022, 5, 6, 6, 0),\n", + " datetime.datetime(2022, 5, 6, 7, 0),\n", + " datetime.datetime(2022, 5, 6, 8, 0),\n", + " datetime.datetime(2022, 5, 6, 9, 0),\n", + " datetime.datetime(2022, 5, 6, 10, 0),\n", + " datetime.datetime(2022, 5, 6, 11, 0),\n", + " datetime.datetime(2022, 5, 6, 12, 0),\n", + " datetime.datetime(2022, 5, 6, 13, 0),\n", + " datetime.datetime(2022, 5, 6, 14, 0),\n", + " datetime.datetime(2022, 5, 6, 15, 0),\n", + " datetime.datetime(2022, 5, 6, 16, 0),\n", + " datetime.datetime(2022, 5, 6, 17, 0),\n", + " datetime.datetime(2022, 5, 6, 18, 0),\n", + " datetime.datetime(2022, 5, 6, 19, 0),\n", + " datetime.datetime(2022, 5, 6, 20, 0),\n", + " datetime.datetime(2022, 5, 6, 21, 0),\n", + " datetime.datetime(2022, 5, 6, 22, 0),\n", + " datetime.datetime(2022, 5, 6, 23, 0),\n", + " datetime.datetime(2022, 5, 7, 0, 0),\n", + " datetime.datetime(2022, 5, 7, 1, 0),\n", + " datetime.datetime(2022, 5, 7, 2, 0),\n", + " datetime.datetime(2022, 5, 7, 3, 0),\n", + " datetime.datetime(2022, 5, 7, 4, 0),\n", + " datetime.datetime(2022, 5, 7, 5, 0),\n", + " datetime.datetime(2022, 5, 7, 6, 0),\n", + " datetime.datetime(2022, 5, 7, 7, 0),\n", + " datetime.datetime(2022, 5, 7, 8, 0),\n", + " datetime.datetime(2022, 5, 7, 9, 0),\n", + " datetime.datetime(2022, 5, 7, 10, 0),\n", + " datetime.datetime(2022, 5, 7, 11, 0),\n", + " datetime.datetime(2022, 5, 7, 12, 0),\n", + " datetime.datetime(2022, 5, 7, 13, 0),\n", + " datetime.datetime(2022, 5, 7, 14, 0),\n", + " datetime.datetime(2022, 5, 7, 15, 0),\n", + " datetime.datetime(2022, 5, 7, 16, 0),\n", + " datetime.datetime(2022, 5, 7, 17, 0),\n", + " datetime.datetime(2022, 5, 7, 18, 0),\n", + " datetime.datetime(2022, 5, 7, 19, 0),\n", + " datetime.datetime(2022, 5, 7, 20, 0),\n", + " datetime.datetime(2022, 5, 7, 21, 0),\n", + " datetime.datetime(2022, 5, 7, 22, 0),\n", + " datetime.datetime(2022, 5, 7, 23, 0),\n", + " datetime.datetime(2022, 5, 8, 0, 0)]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.time" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Level, Latitude, Longitude" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n", + " 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],\n", + " mask=False,\n", + " fill_value=999999,\n", + " dtype=int32),\n", + " 'dimensions': ('lm',),\n", + " 'units': '',\n", + " 'long_name': 'layer id'}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[16.371021, 16.43293 , 16.494629, ..., 16.494629, 16.43293 ,\n", + " 16.371021],\n", + " [16.503883, 16.565914, 16.627739, ..., 16.627739, 16.565918,\n", + " 16.503883],\n", + " [16.636723, 16.69888 , 16.760828, ..., 16.760828, 16.698881,\n", + " 16.636723],\n", + " ...,\n", + " [58.41168 , 58.525536, 58.63936 , ..., 58.63936 , 58.525547,\n", + " 58.41168 ],\n", + " [58.49049 , 58.604454, 58.718372, ..., 58.718372, 58.604454,\n", + " 58.49049 ],\n", + " [58.56883 , 58.6829 , 58.796925, ..., 58.796925, 58.682903,\n", + " 58.56883 ]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'long_name': 'latitude',\n", + " 'units': 'degrees_north',\n", + " 'standard_name': 'latitude',\n", + " 'coordinates': 'lon lat'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.lat" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Variables\n", + "\n", + "- List of variables in lazy mode: No data" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['lmp', 'IM', 'JM', 'LM', 'IHRST', 'I_PAR_STA', 'J_PAR_STA', 'NPHS', 'NCLOD', 'NHEAT', 'NPREC', 'NRDLW', 'NRDSW', 'NSRFC', 'AVGMAXLEN', 'MDRMINout', 'MDRMAXout', 'MDIMINout', 'MDIMAXout', 'IDAT', 'DXH', 'SG1', 'SG2', 'DSG1', 'DSG2', 'SGML1', 'SGML2', 'SLDPTH', 'ISLTYP', 'IVGTYP', 'NCFRCV', 'NCFRST', 'FIS', 'GLAT', 'GLON', 'PD', 'VLAT', 'VLON', 'ACPREC', 'CUPREC', 'MIXHT', 'PBLH', 'RLWTOA', 'RSWIN', 'U10', 'USTAR', 'V10', 'RMOL', 'T2', 'relative_humidity_2m', 'T', 'U', 'V', 'SH2O', 'SMC', 'STC', 'AERO_ACPREC', 'AERO_CUPREC', 'AERO_DEPDRY', 'AERO_OPT_R', 'DRE_SW_TOA', 'DRE_SW_SFC', 'DRE_LW_TOA', 'DRE_LW_SFC', 'ENG_SW_SFC', 'ADRYDEP', 'WETDEP', 'PH_NO2', 'HSUM', 'POLR', 'aerosol_optical_depth_dim', 'aerosol_optical_depth', 'satellite_AOD_dim', 'satellite_AOD', 'aerosol_loading_dim', 'aerosol_loading', 'clear_sky_AOD_dim', 'clear_sky_AOD', 'layer_thickness', 'mid_layer_pressure', 'interface_pressure', 'relative_humidity', 'mid_layer_height', 'mid_layer_height_agl', 'air_density', 'dry_pm10_mass', 'dry_pm2p5_mass', 'QC', 'QR', 'QS', 'QG', 'aero_dust_001', 'aero_dust_002', 'aero_dust_003', 'aero_dust_004', 'aero_dust_005', 'aero_dust_006', 'aero_dust_007', 'aero_dust_008', 'aero_ssa_001', 'aero_ssa_002', 'aero_ssa_003', 'aero_ssa_004', 'aero_ssa_005', 'aero_ssa_006', 'aero_ssa_007', 'aero_ssa_008', 'aero_om_001', 'aero_om_002', 'aero_om_003', 'aero_om_004', 'aero_om_005', 'aero_om_006', 'aero_bc_001', 'aero_bc_002', 'aero_so4_001', 'aero_no3_001', 'aero_no3_002', 'aero_no3_003', 'aero_nh4_001', 'aero_unsp_001', 'aero_unsp_002', 'aero_unsp_003', 'aero_unsp_004', 'aero_unsp_005', 'aero_pol_001', 'aero_pol_002', 'aero_pol_003', 'aero_pol_004', 'aero_pol_005', 'aero_pol_006', 'aero_pol_007', 'aero_pol_008', 'aero_pol_009', 'aero_pol_010', 'NO2', 'NO', 'O3', 'NO3', 'N2O5', 'HNO3', 'HONO', 'PNA', 'H2O2', 'NTR', 'ROOH', 'FORM', 'ALD2', 'ALDX', 'PAR', 'CO', 'MEPX', 'MEOH', 'FACD', 'PAN', 'PACD', 'AACD', 'PANX', 'OLE', 'ETH', 'IOLE', 'TOL', 'CRES', 'OPEN', 'MGLY', 'XYL', 'ISOP', 'ISPD', 'TERP', 'SO2', 'SULF', 'ETOH', 'ETHA', 'CL2', 'HOCL', 'FMCL', 'HCL', 'BENZENE', 'SESQ', 'NH3', 'DMS', 'SOAP_I', 'SOAP_T', 'SOAP_F', 'SOAP_A', 'O', 'O1D', 'OH', 'HO2', 'XO2', 'XO2N', 'MEO2', 'HCO3', 'C2O3', 'CXO3', 'ROR', 'TO2', 'TOLRO2', 'CRO', 'XYLRO2', 'ISOPRXN', 'TRPRXN', 'SULRXN', 'CL', 'CLO', 'TOLNRXN', 'TOLHRXN', 'XYLNRXN', 'XYLHRXN', 'BENZRO2', 'BNZNRXN', 'BNZHRXN', 'SESQRXN', 'aerosol_extinction_dim', 'aerosol_extinction_DUST_1', 'aerosol_extinction_DUST_2', 'aerosol_extinction_DUST_3', 'aerosol_extinction_DUST_4', 'aerosol_extinction_DUST_5', 'aerosol_extinction_DUST_6', 'aerosol_extinction_DUST_7', 'aerosol_extinction_DUST_8', 'aerosol_extinction_SALT_total', 'aerosol_extinction_OM_total', 'aerosol_extinction_BC_total', 'aerosol_extinction_SO4_total', 'aerosol_extinction_NO3_total', 'aerosol_extinction_NH4_total', 'aerosol_extinction_UNSPC_1', 'aerosol_extinction_UNSPC_2', 'aerosol_extinction_UNSPC_3', 'aerosol_extinction_UNSPC_4', 'aerosol_extinction_UNSPC_5', 'aerosol_extinction_POLLEN_total'])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.variables.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'O3': {'data': None,\n", + " 'dimensions': ('time', 'lm', 'rlat', 'rlon'),\n", + " 'long_name': 'TRACERS_054',\n", + " 'units': 'unknown',\n", + " 'standard_name': 'TRACERS_054',\n", + " 'coordinates': 'lon lat',\n", + " 'grid_mapping': 'rotated_pole'}}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Selecting only one variable and descarting the rest.\n", + "nessy.keep_vars('O3')\n", + "nessy.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3 var (1/1)\n", + "Rank 000: Loaded O3 var ((109, 24, 361, 467))\n", + "CPU times: user 1.21 s, sys: 7.1 s, total: 8.32 s\n", + "Wall time: 42 s\n" + ] + } + ], + "source": [ + "# Loading variable data from NetCDF file\n", + "%time nessy.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(109, 24, 361, 467)\n", + "('time', 'lm', 'rlat', 'rlon')\n" + ] + } + ], + "source": [ + "print(nessy.variables['O3']['data'].shape)\n", + "print(nessy.variables['O3']['dimensions'])" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 723 ms, sys: 719 ms, total: 1.44 s\n", + "Wall time: 14.3 s\n" + ] + } + ], + "source": [ + "# Writing NetCDF\n", + "%time nessy.to_netcdf('o3_test.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Statistics" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 403 ms, sys: 206 ms, total: 609 ms\n", + "Wall time: 611 ms\n" + ] + } + ], + "source": [ + "%time nessy.daily_statistic(op=\"mean\")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(5, 24, 361, 467)\n", + "('time', 'lm', 'rlat', 'rlon')\n" + ] + } + ], + "source": [ + "print(nessy.variables['O3']['data'].shape)\n", + "print(nessy.variables['O3']['dimensions'])" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 49.2 ms, sys: 30.3 ms, total: 79.4 ms\n", + "Wall time: 837 ms\n" + ] + } + ], + "source": [ + "%time nessy.to_netcdf('o3_daily_mean_test.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metadata 'cell_methods': time: mean (interval: 1hr)\n", + "Time: [datetime.datetime(2022, 5, 3, 0, 0), datetime.datetime(2022, 5, 4, 0, 0), datetime.datetime(2022, 5, 5, 0, 0), datetime.datetime(2022, 5, 6, 0, 0), datetime.datetime(2022, 5, 7, 0, 0)]\n", + "Time bounds: 5\n", + "[datetime.datetime(2022, 5, 3, 12, 0), datetime.datetime(2022, 5, 3, 23, 0)]\n" + ] + } + ], + "source": [ + "print(\"Metadata 'cell_methods':\", nessy.variables['O3']['cell_methods'])\n", + "\n", + "print(\"Time:\", nessy.time)\n", + "print(\"Time bounds:\", len(nessy.time_bnds))\n", + "\n", + "print(nessy.time_bnds[0])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.1-regular_grids.ipynb b/Jupyter_notebooks/1.1-regular_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c6733d6d81818c4b45234704f8b4fd7b1a539eb5 --- /dev/null +++ b/Jupyter_notebooks/1.1-regular_grids.ipynb @@ -0,0 +1,2358 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write regular grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from netCDF4 import Dataset\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# ERROR when files have lat_bnds, lon_bnds \n", + "#nc_path_1 = '/esarchive/exp/ecearth/a2vx/original_files/cmorfiles-fixed/CMIP/EC-Earth-Consortium/EC-Earth3-AerChem/historical/r4i1p1f1/Amon/ch4/gn/v20200609/ch4_Amon_EC-Earth3-AerChem_historical_r4i1p1f1_gn_185001-185012.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'JM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'LM' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IHRST' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'I_PAR_STA' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'J_PAR_STA' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NPHS' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCLOD' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NHEAT' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NPREC' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NRDLW' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NRDSW' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NSRFC' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVGMAXLEN' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDRMINout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDRMAXout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDIMINout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MDIMAXout' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DXH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SG1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SG2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DSG1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DSG2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SGML1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SGML2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SLDPTH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ISLTYP' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'IVGTYP' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCFRCV' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'NCFRST' has multiple fill values {-999999, -32767}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'FIS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GLAT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GLON' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VLAT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VLON' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACFRCV' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACFRST' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACSNOM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACSNOW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AKHSAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AKMSAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALBASE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALBEDO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ALWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'BGROFF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACL' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CFRACM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CLDEFI' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CMC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CNVBOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CNVTOP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CPRATE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CUPPT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CUPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CZEN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CZMEAN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DNVVELMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EPSR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'GRNFLX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HBOTD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HBOTS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HTOPD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'HTOPS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MIXHT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MXSNAL' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PBLH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'POTEVP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSFCAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RH02MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RH02MIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T02MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T02MIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T10AVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QSH' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QWBS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RADOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'REFDMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWINC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWOUT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCEVP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCEXC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCLHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SFCSHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SI' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SICE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SIGT4' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMSTAV' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMSTOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNOAVG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SNOPCX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SOILTB' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SSROFF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SST' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SUBSHX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TH10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'THS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'THZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TSHLTR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TWBS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UPHLMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UPVVELMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U10MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'USTAR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'UZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V10' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V10MAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VEGFRC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'VZ0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Z0' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWTOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'POTFLX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PSFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TLMIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TLMAX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'LSPA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ACUTIM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'APHTIM' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ARDLW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ARDSW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ASRFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVRAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AVCNVC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ROUGHCOR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMOISCOR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'relative_humidity_2m' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'W' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'W_TOT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'OMGALF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'O3' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CLDFRA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'CW' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EXCH_H' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'Q2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RLWTT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'RSWTT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PINT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DWDT' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'T' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TCUCN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'TRAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'U' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'V' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'XLEN_MIX' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_ICE' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_RIMEF' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'F_RAIN' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SH2O' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'SMC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'STC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_ACPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_CUPREC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_DEPDRY' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AERO_OPT_R' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_SW_TOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_SW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_LW_TOA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'DRE_LW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ENG_SW_SFC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'EMISS_AERO' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'ADRYDEP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'WETDEP' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'PH_NO2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'AEROSSA' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_optical_depth' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'satellite_AOD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_loading' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'clear_sky_AOD' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'layer_thickness' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_pressure' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'interface_pressure' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'relative_humidity' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_height' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'mid_layer_height_agl' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'air_density' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'dry_pm10_mass' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'dry_pm2p5_mass' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QC' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QR' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QS' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'QG' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_002' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_003' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_004' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_005' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_006' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_007' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_dust_008' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_002' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_003' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_004' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_005' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_006' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_007' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_008' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_009' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_010' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_011' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_012' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_013' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_014' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_015' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_016' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_017' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_018' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_019' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_020' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_021' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_022' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_023' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_024' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_025' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_026' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_027' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_028' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_029' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_030' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_031' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_032' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_033' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_034' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_035' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_036' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_037' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_038' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_039' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_040' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_041' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_042' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_043' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_044' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_045' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_046' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_047' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_048' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_049' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_050' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_051' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_052' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_053' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_054' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_055' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_056' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_057' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_058' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_059' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_060' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_061' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_062' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_063' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_064' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_065' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_066' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_067' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_068' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_069' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_070' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_071' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_072' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_073' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_074' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_075' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_076' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_077' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_078' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_079' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aero_flex_080' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'MPRATES_001' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_1' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_2' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_3' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_4' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_5' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_6' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_7' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n", + "/gpfs/projects/bsc32/software/suselinux/11/software/xarray/0.19.0-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/xarray/conventions.py:520: SerializationWarning: variable 'aerosol_extinction_DUST_8' has multiple fill values {-999999.0, -32767.0}, decoding all values to NaN.\n", + " decode_timedelta=decode_timedelta,\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                    (time: 9, lm: 48, lmp: 49, lon: 257, lat: 181, idat: 3, soil_lm: 4, num_aero: 88, num_aero_r: 3, num_aero_1: 89, num_engy: 7, num_gas_total: 1, aerosol_optical_depth_dim: 8, satellite_AOD_dim: 2, aerosol_loading_dim: 8, clear_sky_AOD_dim: 8, aerosol_extinction_dim: 8)\n",
+       "Coordinates:\n",
+       "  * time                       (time) datetime64[ns] 2008-12-31 ... 2009-01-01\n",
+       "  * lm                         (lm) int32 0 1 2 3 4 5 6 ... 41 42 43 44 45 46 47\n",
+       "  * lmp                        (lmp) int32 0 1 2 3 4 5 6 ... 43 44 45 46 47 48\n",
+       "  * lon                        (lon) float32 -180.0 -178.6 ... 178.6 180.0\n",
+       "  * lat                        (lat) float32 -90.0 -89.0 -88.0 ... 89.0 90.0\n",
+       "  * aerosol_optical_depth_dim  (aerosol_optical_depth_dim) |S100 b'DUST_1    ...\n",
+       "  * satellite_AOD_dim          (satellite_AOD_dim) |S100 b'MODIS TERRA 550 nm...\n",
+       "  * aerosol_loading_dim        (aerosol_loading_dim) |S100 b'DUST_1          ...\n",
+       "  * clear_sky_AOD_dim          (clear_sky_AOD_dim) |S100 b'DUST_1            ...\n",
+       "  * aerosol_extinction_dim     (aerosol_extinction_dim) |S100 b'DUST_1       ...\n",
+       "Dimensions without coordinates: idat, soil_lm, num_aero, num_aero_r, num_aero_1, num_engy, num_gas_total\n",
+       "Data variables: (12/302)\n",
+       "    IM                         float64 257.0\n",
+       "    JM                         float64 181.0\n",
+       "    LM                         float64 48.0\n",
+       "    IHRST                      float64 0.0\n",
+       "    I_PAR_STA                  float64 1.0\n",
+       "    J_PAR_STA                  float64 1.0\n",
+       "    ...                         ...\n",
+       "    aerosol_extinction_DUST_3  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_4  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_5  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_6  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_7  (time, lm, lat, lon) float32 ...\n",
+       "    aerosol_extinction_DUST_8  (time, lm, lat, lon) float32 ...\n",
+       "Attributes:\n",
+       "    Domain:       Global\n",
+       "    Conventions:  None\n",
+       "    history:      MONARCHv1.0 netcdf file.\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 9, lm: 48, lmp: 49, lon: 257, lat: 181, idat: 3, soil_lm: 4, num_aero: 88, num_aero_r: 3, num_aero_1: 89, num_engy: 7, num_gas_total: 1, aerosol_optical_depth_dim: 8, satellite_AOD_dim: 2, aerosol_loading_dim: 8, clear_sky_AOD_dim: 8, aerosol_extinction_dim: 8)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2008-12-31 ... 2009-01-01\n", + " * lm (lm) int32 0 1 2 3 4 5 6 ... 41 42 43 44 45 46 47\n", + " * lmp (lmp) int32 0 1 2 3 4 5 6 ... 43 44 45 46 47 48\n", + " * lon (lon) float32 -180.0 -178.6 ... 178.6 180.0\n", + " * lat (lat) float32 -90.0 -89.0 -88.0 ... 89.0 90.0\n", + " * aerosol_optical_depth_dim (aerosol_optical_depth_dim) |S100 b'DUST_1 ...\n", + " * satellite_AOD_dim (satellite_AOD_dim) |S100 b'MODIS TERRA 550 nm...\n", + " * aerosol_loading_dim (aerosol_loading_dim) |S100 b'DUST_1 ...\n", + " * clear_sky_AOD_dim (clear_sky_AOD_dim) |S100 b'DUST_1 ...\n", + " * aerosol_extinction_dim (aerosol_extinction_dim) |S100 b'DUST_1 ...\n", + "Dimensions without coordinates: idat, soil_lm, num_aero, num_aero_r, num_aero_1, num_engy, num_gas_total\n", + "Data variables: (12/302)\n", + " IM float64 ...\n", + " JM float64 ...\n", + " LM float64 ...\n", + " IHRST float64 ...\n", + " I_PAR_STA float64 ...\n", + " J_PAR_STA float64 ...\n", + " ... ...\n", + " aerosol_extinction_DUST_3 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_4 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_5 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_6 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_7 (time, lm, lat, lon) float32 ...\n", + " aerosol_extinction_DUST_8 (time, lm, lat, lon) float32 ...\n", + "Attributes:\n", + " Domain: Global\n", + " Conventions: None\n", + " history: MONARCHv1.0 netcdf file.\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2008, 12, 31, 0, 0),\n", + " datetime.datetime(2008, 12, 31, 3, 0),\n", + " datetime.datetime(2008, 12, 31, 6, 0),\n", + " datetime.datetime(2008, 12, 31, 9, 0),\n", + " datetime.datetime(2008, 12, 31, 12, 0),\n", + " datetime.datetime(2008, 12, 31, 15, 0),\n", + " datetime.datetime(2008, 12, 31, 18, 0),\n", + " datetime.datetime(2008, 12, 31, 21, 0),\n", + " datetime.datetime(2009, 1, 1, 0, 0)]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n", + " 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,\n", + " 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,\n", + " 42, 43, 44, 45, 46, 47],\n", + " mask=False,\n", + " fill_value=999999,\n", + " dtype=int32),\n", + " 'dimensions': ('lm',),\n", + " 'units': '',\n", + " 'long_name': 'layer id'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-90., -89., -88., -87., -86., -85., -84., -83., -82.,\n", + " -81., -80., -79., -78., -77., -76., -75., -74., -73.,\n", + " -72., -71., -70., -69., -68., -67., -66., -65., -64.,\n", + " -63., -62., -61., -60., -59., -58., -57., -56., -55.,\n", + " -54., -53., -52., -51., -50., -49., -48., -47., -46.,\n", + " -45., -44., -43., -42., -41., -40., -39., -38., -37.,\n", + " -36., -35., -34., -33., -32., -31., -30., -29., -28.,\n", + " -27., -26., -25., -24., -23., -22., -21., -20., -19.,\n", + " -18., -17., -16., -15., -14., -13., -12., -11., -10.,\n", + " -9., -8., -7., -6., -5., -4., -3., -2., -1.,\n", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,\n", + " 9., 10., 11., 12., 13., 14., 15., 16., 17.,\n", + " 18., 19., 20., 21., 22., 23., 24., 25., 26.,\n", + " 27., 28., 29., 30., 31., 32., 33., 34., 35.,\n", + " 36., 37., 38., 39., 40., 41., 42., 43., 44.,\n", + " 45., 46., 47., 48., 49., 50., 51., 52., 53.,\n", + " 54., 55., 56., 57., 58., 59., 60., 61., 62.,\n", + " 63., 64., 65., 66., 67., 68., 69., 70., 71.,\n", + " 72., 73., 74., 75., 76., 77., 78., 79., 80.,\n", + " 81., 82., 83., 84., 85., 86., 87., 88., 89.,\n", + " 90.],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lat',),\n", + " 'long_name': 'latitude',\n", + " 'units': 'degrees_north',\n", + " 'standard_name': 'grid_latitude'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-180. , -178.59375, -177.1875 , -175.78125,\n", + " -174.375 , -172.96875, -171.5625 , -170.15625,\n", + " -168.75 , -167.34375, -165.9375 , -164.53125,\n", + " -163.125 , -161.71875, -160.3125 , -158.90625,\n", + " -157.5 , -156.09375, -154.6875 , -153.28125,\n", + " -151.875 , -150.46875, -149.0625 , -147.65625,\n", + " -146.25 , -144.84375, -143.4375 , -142.03125,\n", + " -140.625 , -139.21875, -137.8125 , -136.40625,\n", + " -135. , -133.59375, -132.1875 , -130.78125,\n", + " -129.375 , -127.96875, -126.5625 , -125.15625,\n", + " -123.75 , -122.34375, -120.9375 , -119.53125,\n", + " -118.125 , -116.71875, -115.3125 , -113.90625,\n", + " -112.5 , -111.09375, -109.6875 , -108.28125,\n", + " -106.875 , -105.46875, -104.0625 , -102.65625,\n", + " -101.25 , -99.84375, -98.4375 , -97.03125,\n", + " -95.625 , -94.21875, -92.8125 , -91.40625,\n", + " -90. , -88.59375, -87.1875 , -85.78125,\n", + " -84.375 , -82.96875, -81.5625 , -80.15625,\n", + " -78.75 , -77.34375, -75.9375 , -74.53125,\n", + " -73.125 , -71.71875, -70.3125 , -68.90625,\n", + " -67.5 , -66.09375, -64.6875 , -63.28125,\n", + " -61.875 , -60.46875, -59.0625 , -57.65625,\n", + " -56.25 , -54.84375, -53.4375 , -52.03125,\n", + " -50.625 , -49.21875, -47.8125 , -46.40625,\n", + " -45. , -43.59375, -42.1875 , -40.78125,\n", + " -39.375 , -37.96875, -36.5625 , -35.15625,\n", + " -33.75 , -32.34375, -30.9375 , -29.53125,\n", + " -28.125 , -26.71875, -25.3125 , -23.90625,\n", + " -22.5 , -21.09375, -19.6875 , -18.28125,\n", + " -16.875 , -15.46875, -14.0625 , -12.65625,\n", + " -11.25 , -9.84375, -8.4375 , -7.03125,\n", + " -5.625 , -4.21875, -2.8125 , -1.40625,\n", + " 0. , 1.40625, 2.8125 , 4.21875,\n", + " 5.625 , 7.03125, 8.4375 , 9.84375,\n", + " 11.25 , 12.65625, 14.0625 , 15.46875,\n", + " 16.875 , 18.28125, 19.6875 , 21.09375,\n", + " 22.5 , 23.90625, 25.3125 , 26.71875,\n", + " 28.125 , 29.53125, 30.9375 , 32.34375,\n", + " 33.75 , 35.15625, 36.5625 , 37.96875,\n", + " 39.375 , 40.78125, 42.1875 , 43.59375,\n", + " 45. , 46.40625, 47.8125 , 49.21875,\n", + " 50.625 , 52.03125, 53.4375 , 54.84375,\n", + " 56.25 , 57.65625, 59.0625 , 60.46875,\n", + " 61.875 , 63.28125, 64.6875 , 66.09375,\n", + " 67.5 , 68.90625, 70.3125 , 71.71875,\n", + " 73.125 , 74.53125, 75.9375 , 77.34375,\n", + " 78.75 , 80.15625, 81.5625 , 82.96875,\n", + " 84.375 , 85.78125, 87.1875 , 88.59375,\n", + " 90. , 91.40625, 92.8125 , 94.21875,\n", + " 95.625 , 97.03125, 98.4375 , 99.84375,\n", + " 101.25 , 102.65625, 104.0625 , 105.46875,\n", + " 106.875 , 108.28125, 109.6875 , 111.09375,\n", + " 112.5 , 113.90625, 115.3125 , 116.71875,\n", + " 118.125 , 119.53125, 120.9375 , 122.34375,\n", + " 123.75 , 125.15625, 126.5625 , 127.96875,\n", + " 129.375 , 130.78125, 132.1875 , 133.59375,\n", + " 135. , 136.40625, 137.8125 , 139.21875,\n", + " 140.625 , 142.03125, 143.4375 , 144.84375,\n", + " 146.25 , 147.65625, 149.0625 , 150.46875,\n", + " 151.875 , 153.28125, 154.6875 , 156.09375,\n", + " 157.5 , 158.90625, 160.3125 , 161.71875,\n", + " 163.125 , 164.53125, 165.9375 , 167.34375,\n", + " 168.75 , 170.15625, 171.5625 , 172.96875,\n", + " 174.375 , 175.78125, 177.1875 , 178.59375,\n", + " 180. ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('lon',),\n", + " 'long_name': 'longitude',\n", + " 'units': 'degrees_east',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "nessy_1.keep_vars('O3')" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3 var (1/1)\n", + "Rank 000: Loaded O3 var ((9, 48, 181, 257))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'O3': {'data': masked_array(\n", + " data=[[[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " ...,\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]],\n", + " \n", + " \n", + " [[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " ...,\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]],\n", + " \n", + " [[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'lm', 'lat', 'lon'),\n", + " 'long_name': 'O3',\n", + " 'units': 'unknown',\n", + " 'standard_name': 'O3'}}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating regular_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing O3 var (1/1)\n", + "Rank 000: Var O3 created (1/1)\n", + "Rank 000: Filling O3)\n", + "Rank 000: Var O3 data (1/1)\n", + "Rank 000: Var O3 completed (1/1)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('regular_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('regular_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:  (time: 9, lev: 48, lat: 181, lon: 257)\n",
+       "Coordinates:\n",
+       "  * time     (time) datetime64[ns] 2008-12-31 2008-12-31T03:00:00 ... 2009-01-01\n",
+       "  * lev      (lev) float64 0.0 1.0 2.0 3.0 4.0 5.0 ... 43.0 44.0 45.0 46.0 47.0\n",
+       "  * lat      (lat) float64 -90.0 -89.0 -88.0 -87.0 -86.0 ... 87.0 88.0 89.0 90.0\n",
+       "  * lon      (lon) float64 -180.0 -178.6 -177.2 -175.8 ... 177.2 178.6 180.0\n",
+       "Data variables:\n",
+       "    O3       (time, lev, lat, lon) float32 ...\n",
+       "    crs      |S1 b''\n",
+       "Attributes:\n",
+       "    Domain:       Global\n",
+       "    Conventions:  CF-1.7\n",
+       "    history:      MONARCHv1.0 netcdf file.\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 9, lev: 48, lat: 181, lon: 257)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2008-12-31 2008-12-31T03:00:00 ... 2009-01-01\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 5.0 ... 43.0 44.0 45.0 46.0 47.0\n", + " * lat (lat) float64 -90.0 -89.0 -88.0 -87.0 -86.0 ... 87.0 88.0 89.0 90.0\n", + " * lon (lon) float64 -180.0 -178.6 -177.2 -175.8 ... 177.2 178.6 180.0\n", + "Data variables:\n", + " O3 (time, lev, lat, lon) float32 ...\n", + " crs |S1 ...\n", + "Attributes:\n", + " Domain: Global\n", + " Conventions: CF-1.7\n", + " history: MONARCHv1.0 netcdf file.\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('regular_file_1.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.2-rotated_grids.ipynb b/Jupyter_notebooks/1.2-rotated_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4cff1df6df2c0bf97ae9c90b55077140afc990b6 --- /dev/null +++ b/Jupyter_notebooks/1.2-rotated_grids.ipynb @@ -0,0 +1,1276 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write rotated grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 1, nv: 2, lev: 24, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2021-08-03\n",
+       "  * lev           (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n",
+       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
+       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
+       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Dimensions without coordinates: nv\n",
+       "Data variables:\n",
+       "    time_bnds     (time, nv) datetime64[ns] 2021-08-03 2021-08-07\n",
+       "    O3_all        (time, lev, rlat, rlon) float32 ...\n",
+       "    rotated_pole  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, nv: 2, lev: 24, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-08-03\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Dimensions without coordinates: nv\n", + "Data variables:\n", + " time_bnds (time, nv) datetime64[ns] ...\n", + " O3_all (time, lev, rlat, rlon) float32 ...\n", + " rotated_pole |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2021, 8, 3, 0, 0)]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.,\n", + " 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21.,\n", + " 22., 23.],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('lev',),\n", + " 'units': '',\n", + " 'positive': 'up'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[16.35033798, 16.43292999, 16.51514626, ..., 16.51514626,\n", + " 16.43292999, 16.35033798],\n", + " [16.52742577, 16.61023903, 16.69267654, ..., 16.69267654,\n", + " 16.61024284, 16.52742577],\n", + " [16.70447159, 16.78750801, 16.87016678, ..., 16.87016678,\n", + " 16.78750992, 16.70447159],\n", + " ...,\n", + " [58.32094955, 58.47268295, 58.62430954, ..., 58.62430954,\n", + " 58.47268295, 58.32094955],\n", + " [58.42628479, 58.57820129, 58.73002625, ..., 58.73002625,\n", + " 58.57820129, 58.42628479],\n", + " [58.53079224, 58.68289948, 58.83491898, ..., 58.83491898,\n", + " 58.68290329, 58.53079224]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.18126488, -22.01667213, -21.85179901, ..., 41.8517952 ,\n", + " 42.01666641, 42.18125916],\n", + " [-22.27817917, -22.11318588, -21.94790459, ..., 41.94789886,\n", + " 42.11317444, 42.27817154],\n", + " [-22.37526703, -22.2098732 , -22.04418945, ..., 42.04418564,\n", + " 42.2098732 , 42.37526321],\n", + " ...,\n", + " [-67.57766724, -67.39706421, -67.21534729, ..., 87.21533966,\n", + " 87.39705658, 87.57765961],\n", + " [-67.90187836, -67.72247314, -67.54193878, ..., 87.54193878,\n", + " 87.72245789, 87.90187073],\n", + " [-68.22803497, -68.04981995, -67.87051392, ..., 87.87050629,\n", + " 88.04981995, 88.22803497]],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('rlat', 'rlon'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'O3_all': {'data': masked_array(\n", + " data=[[[[2.82636070e-09, 2.83436141e-09, 2.82522827e-09, ...,\n", + " 2.96334601e-09, 2.94810221e-09, 2.91839553e-09],\n", + " [2.80930834e-09, 2.57182142e-09, 2.55521360e-09, ...,\n", + " 2.56395216e-09, 2.55890820e-09, 2.89073032e-09],\n", + " [2.79031842e-09, 2.53415999e-09, 2.50317100e-09, ...,\n", + " 2.56737764e-09, 2.58685584e-09, 2.85498758e-09],\n", + " ...,\n", + " [4.54469973e-09, 2.31674457e-09, 2.22753971e-09, ...,\n", + " 3.90127353e-09, 3.89643118e-09, 3.95452204e-09],\n", + " [4.54129534e-09, 3.39469808e-09, 2.30205255e-09, ...,\n", + " 3.88824706e-09, 3.88372090e-09, 3.95252631e-09],\n", + " [4.55012028e-09, 4.54941684e-09, 4.55885596e-09, ...,\n", + " 3.93945099e-09, 3.94256938e-09, 3.94736510e-09]],\n", + " \n", + " [[1.61966751e-09, 1.62850033e-09, 1.62801062e-09, ...,\n", + " 1.74583636e-09, 1.74684045e-09, 1.74125825e-09],\n", + " [1.60704539e-09, 1.41438683e-09, 1.39824063e-09, ...,\n", + " 1.43241041e-09, 1.45136980e-09, 1.73744363e-09],\n", + " [1.59303792e-09, 1.41264567e-09, 1.43958856e-09, ...,\n", + " 1.43522705e-09, 1.45869528e-09, 1.72746673e-09],\n", + " ...,\n", + " [3.39471939e-09, 2.65527422e-09, 2.22850582e-09, ...,\n", + " 3.00350167e-09, 3.02176750e-09, 3.04009262e-09],\n", + " [3.42592332e-09, 2.81851942e-09, 2.28753505e-09, ...,\n", + " 2.99818836e-09, 2.99247205e-09, 3.04403525e-09],\n", + " [3.43113582e-09, 3.43824125e-09, 3.44929552e-09, ...,\n", + " 3.05421777e-09, 3.04752024e-09, 3.04445491e-09]],\n", + " \n", + " [[6.52169652e-10, 6.62677024e-10, 6.71934786e-10, ...,\n", + " 6.84429291e-10, 6.85826118e-10, 6.81504464e-10],\n", + " [6.54959087e-10, 6.65219158e-10, 6.72430500e-10, ...,\n", + " 7.02121916e-10, 6.88325397e-10, 6.78990253e-10],\n", + " [6.57915333e-10, 6.72102929e-10, 6.82566170e-10, ...,\n", + " 7.10820458e-10, 7.07094217e-10, 6.77522760e-10],\n", + " ...,\n", + " [2.26027863e-09, 2.27629537e-09, 2.22616392e-09, ...,\n", + " 1.80253423e-09, 1.80225357e-09, 1.75757697e-09],\n", + " [2.25028196e-09, 2.24872521e-09, 2.25445618e-09, ...,\n", + " 1.78916737e-09, 1.75583581e-09, 1.73717007e-09],\n", + " [2.25827335e-09, 2.26974151e-09, 2.28325270e-09, ...,\n", + " 1.80090465e-09, 1.77703174e-09, 1.75434933e-09]],\n", + " \n", + " ...,\n", + " \n", + " [[6.20177729e-11, 6.26959387e-11, 6.28658792e-11, ...,\n", + " 7.74274672e-11, 7.81546980e-11, 7.60479180e-11],\n", + " [6.20486787e-11, 4.91600684e-11, 4.88878833e-11, ...,\n", + " 8.30884250e-11, 8.02152303e-11, 7.64004970e-11],\n", + " [6.20976950e-11, 4.84989236e-11, 4.85273696e-11, ...,\n", + " 8.46209977e-11, 8.60716498e-11, 9.29777644e-11],\n", + " ...,\n", + " [6.15721710e-11, 5.85051035e-11, 5.68927752e-11, ...,\n", + " 7.66955388e-11, 7.87262894e-11, 8.41871295e-11],\n", + " [6.17081941e-11, 5.77536560e-11, 5.71826440e-11, ...,\n", + " 8.49015233e-11, 8.82505458e-11, 9.20043208e-11],\n", + " [6.09760506e-11, 6.03529102e-11, 6.24047411e-11, ...,\n", + " 9.69636524e-11, 9.73700426e-11, 9.67554162e-11]],\n", + " \n", + " [[6.17567178e-11, 6.23894963e-11, 6.25706292e-11, ...,\n", + " 9.04916420e-11, 8.90077803e-11, 8.43536768e-11],\n", + " [6.17901147e-11, 4.59270816e-11, 4.57923699e-11, ...,\n", + " 1.06383589e-10, 1.05693093e-10, 9.44862175e-11],\n", + " [6.18271337e-11, 4.17853495e-11, 3.94594427e-11, ...,\n", + " 1.34135009e-10, 1.37096737e-10, 1.13853482e-10],\n", + " ...,\n", + " [5.87425456e-11, 5.60845814e-11, 5.33429169e-11, ...,\n", + " 6.52061183e-11, 6.64711411e-11, 7.06842501e-11],\n", + " [5.92315016e-11, 5.72428251e-11, 5.51245403e-11, ...,\n", + " 7.10893150e-11, 7.38196310e-11, 7.53354532e-11],\n", + " [5.72967125e-11, 5.87497967e-11, 6.08200851e-11, ...,\n", + " 7.97847274e-11, 8.28124236e-11, 7.89215707e-11]],\n", + " \n", + " [[6.15217946e-11, 6.21571961e-11, 6.23377391e-11, ...,\n", + " 1.08401239e-10, 1.07494236e-10, 1.08711720e-10],\n", + " [6.15563989e-11, 4.56989759e-11, 4.46428450e-11, ...,\n", + " 1.30999808e-10, 1.26581134e-10, 1.39005307e-10],\n", + " [6.15933693e-11, 3.98656906e-11, 3.75483949e-11, ...,\n", + " 1.37105632e-10, 1.48587462e-10, 1.83946344e-10],\n", + " ...,\n", + " [4.68582569e-11, 4.44464673e-11, 4.43960736e-11, ...,\n", + " 5.86025117e-11, 5.84869791e-11, 6.32652056e-11],\n", + " [4.99817097e-11, 4.49490271e-11, 4.43218864e-11, ...,\n", + " 6.19639479e-11, 6.07859180e-11, 6.55651922e-11],\n", + " [4.98553143e-11, 4.61104453e-11, 4.96835975e-11, ...,\n", + " 6.42673414e-11, 6.38328765e-11, 6.38894007e-11]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'lev', 'rlat', 'rlon'),\n", + " 'units': 'kg/m3',\n", + " 'long_name': 'TRACERS_044',\n", + " 'coordinates': 'lat lon',\n", + " 'cell_methods': 'time: maximum (interval: 1hr)',\n", + " 'grid_mapping': 'rotated_pole'}}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Write dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating rotated_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing O3_all var (1/1)\n", + "Rank 000: Var O3_all created (1/1)\n", + "Rank 000: Filling O3_all)\n", + "Rank 000: Var O3_all data (1/1)\n", + "Rank 000: Var O3_all completed (1/1)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('rotated_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('rotated_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2021-08-03\n",
+       "  * lev           (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n",
+       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
+       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
+       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Dimensions without coordinates: time_nv\n",
+       "Data variables:\n",
+       "    time_bnds     (time, time_nv) datetime64[ns] 2021-08-03 2021-08-07\n",
+       "    O3_all        (time, lev, rlat, rlon) float32 ...\n",
+       "    rotated_pole  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7\n",
+       "    comment:      Generated on marenostrum4
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, time_nv: 2, lev: 24, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-08-03\n", + " * lev (lev) float64 0.0 1.0 2.0 3.0 4.0 ... 19.0 20.0 21.0 22.0 23.0\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Dimensions without coordinates: time_nv\n", + "Data variables:\n", + " time_bnds (time, time_nv) datetime64[ns] ...\n", + " O3_all (time, lev, rlat, rlon) float32 ...\n", + " rotated_pole |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7\n", + " comment: Generated on marenostrum4" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('rotated_file_1.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.3-points_grids.ipynb b/Jupyter_notebooks/1.3-points_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..966ab17efc2bc372b374a42703695727d852c786 --- /dev/null +++ b/Jupyter_notebooks/1.3-points_grids.ipynb @@ -0,0 +1,4624 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write observational datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# nc_path_1 = '/esarchive/obs/eea/eionet/hourly/pm10/pm10_202107.nc' # EIONET\n", + "nc_path_1 = '/esarchive/obs/nilu/ebas/daily/pm10/pm10_201507.nc' # EBAS" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read and write - Non-GHOST type" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                       (station: 84, time: 31)\n",
+       "Coordinates:\n",
+       "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
+       "Dimensions without coordinates: station\n",
+       "Data variables: (12/19)\n",
+       "    station_start_date            (station) |S75 b'1980-01-01' ... b'nan'\n",
+       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    country_code                  (station) |S75 b'CH' b'CH' ... b'NL' b'IT'\n",
+       "    ccaa                          (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_name                  (station) |S75 b'payerne' ... b'lamezia terme'\n",
+       "    ...                            ...\n",
+       "    station_code                  (station) |S75 b'CH0002R' ... b'IT0016R'\n",
+       "    longitude                     (station) float32 6.944 8.905 ... 6.277 16.23\n",
+       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    latitude                      (station) float32 46.81 47.48 ... 53.33 38.88\n",
+       "    station_ozone_classification  (station) |S75 b'rural' b'rural' ... b'nan'
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 84, time: 31)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", + "Dimensions without coordinates: station\n", + "Data variables: (12/19)\n", + " station_start_date (station) |S75 ...\n", + " station_zone (station) |S75 ...\n", + " street_type (station) |S75 ...\n", + " country_code (station) |S75 ...\n", + " ccaa (station) |S75 ...\n", + " station_name (station) |S75 ...\n", + " ... ...\n", + " station_code (station) |S75 ...\n", + " longitude (station) float32 ...\n", + " station_end_date (station) |S75 ...\n", + " station_rural_back (station) |S75 ...\n", + " latitude (station) float32 ...\n", + " station_ozone_classification (station) |S75 ..." + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True, parallel_method='X')\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2015, 7, 1, 0, 0),\n", + " datetime.datetime(2015, 7, 1, 1, 0),\n", + " datetime.datetime(2015, 7, 1, 2, 0),\n", + " datetime.datetime(2015, 7, 1, 3, 0),\n", + " datetime.datetime(2015, 7, 1, 4, 0),\n", + " datetime.datetime(2015, 7, 1, 5, 0),\n", + " datetime.datetime(2015, 7, 1, 6, 0),\n", + " datetime.datetime(2015, 7, 1, 7, 0),\n", + " datetime.datetime(2015, 7, 1, 8, 0),\n", + " datetime.datetime(2015, 7, 1, 9, 0),\n", + " datetime.datetime(2015, 7, 1, 10, 0),\n", + " datetime.datetime(2015, 7, 1, 11, 0),\n", + " datetime.datetime(2015, 7, 1, 12, 0),\n", + " datetime.datetime(2015, 7, 1, 13, 0),\n", + " datetime.datetime(2015, 7, 1, 14, 0),\n", + " datetime.datetime(2015, 7, 1, 15, 0),\n", + " datetime.datetime(2015, 7, 1, 16, 0),\n", + " datetime.datetime(2015, 7, 1, 17, 0),\n", + " datetime.datetime(2015, 7, 1, 18, 0),\n", + " datetime.datetime(2015, 7, 1, 19, 0),\n", + " datetime.datetime(2015, 7, 1, 20, 0),\n", + " datetime.datetime(2015, 7, 1, 21, 0),\n", + " datetime.datetime(2015, 7, 1, 22, 0),\n", + " datetime.datetime(2015, 7, 1, 23, 0),\n", + " datetime.datetime(2015, 7, 2, 0, 0),\n", + " datetime.datetime(2015, 7, 2, 1, 0),\n", + " datetime.datetime(2015, 7, 2, 2, 0),\n", + " datetime.datetime(2015, 7, 2, 3, 0),\n", + " datetime.datetime(2015, 7, 2, 4, 0),\n", + " datetime.datetime(2015, 7, 2, 5, 0),\n", + " datetime.datetime(2015, 7, 2, 6, 0)]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,\n", + " 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,\n", + " 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,\n", + " 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83]),\n", + " 'units': ''}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.station" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[46.813057, 47.479767, 47.04947 , 47.06741 , 35.039165,\n", + " 56.354443, 56.711666, 55.694107, 59.494446, 58.37611 ,\n", + " 39.546944, 42.720554, 39.87528 , 37.23722 , 43.439167,\n", + " 41.274166, 42.319168, 38.47278 , 39.08278 , 41.238888,\n", + " 41.39389 , 42.634724, 37.051945, 51.57108 , 55.79216 ,\n", + " 46.966667, 51.933334, 64.1 , 45.814445, 58.433 ,\n", + " 65.833336, 62.783333, 60.366665, 51.814445, 50.73639 ,\n", + " 54.754166, 54.125275, 43.404 , 63.84889 , 56.028057,\n", + " 58.80111 , 57.393612, 45.56139 , 48.942223, 49.04222 ,\n", + " 78.90667 , 52.802223, 52.083332, 53.662117, 27.28999 ,\n", + " 51.54111 , 47.83861 , 40.384445, 47.766666, 51.974445,\n", + " 49.573395, 54.433334, 56.161945, 59.779167, 40.3358 ,\n", + " 49.733334, 54.9 , 52.11667 , 43.15 , 35.316666,\n", + " 55.37611 , 47.914722, 53.166668, 42.1 , 60.53002 ,\n", + " 51.53014 , 52.86861 , 55.375 , 54.925556, 50.65 ,\n", + " 49.066666, 31.34274 , 68. , 46.5475 , 52.3 ,\n", + " 46.677776, 52.178074, 53.33389 , 38.8763 ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station',),\n", + " 'units': 'degrees_north',\n", + " 'long_name': 'latitude',\n", + " 'standard_name': 'latitude',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[ 6.944469, 8.904696, 6.979203, 8.463339,\n", + " 33.05806 , 9.603059, 11.533614, 12.088669,\n", + " 25.930569, 21.845013, -4.350556, -8.923611,\n", + " 4.316389, -3.534167, -4.85 , -3.1425 ,\n", + " 3.315833, -6.923611, -1.101111, -5.8975 ,\n", + " 0.734722, -7.704722, -6.555278, -1.325286,\n", + " -3.2429 , 19.55 , -10.233334, -21.016945,\n", + " 8.637225, 8.269 , 13.916667, 8.883333,\n", + " 11.066667, 21.97139 , 15.739722, 17.534445,\n", + " 22.038055, 21.947 , 15.335577, 13.149458,\n", + " 17.381958, 11.91418 , 14.862789, 19.592232,\n", + " 22.259731, 11.888333, 10.759444, 6.566667,\n", + " 17.934017, 33.749886, 5.853611, 14.441389,\n", + " 44.260582, 16.766666, 4.923611, 15.080278,\n", + " 12.733333, 21.173056, 21.377222, 18.1245 ,\n", + " 16.05 , 37.8 , 5.2 , 19.133333,\n", + " 25.666668, 21.030556, 7.908611, 13.033333,\n", + " 12.633333, 27.66754 , 12.93386 , -6.924722,\n", + " -7.342778, 8.309722, 10.766667, 13.6 ,\n", + " 27.217775, 24.237223, 7.985 , 4.5 ,\n", + " 12.972222, -6.364524, 6.277222, 16.2322 ],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('station',),\n", + " 'units': 'degrees_east',\n", + " 'long_name': 'longitude',\n", + " 'standard_name': 'longitude',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading station_start_date var (1/17)\n", + "Rank 000: Loaded station_start_date var ((84,))\n", + "Rank 000: Loading station_zone var (2/17)\n", + "Rank 000: Loaded station_zone var ((84,))\n", + "Rank 000: Loading street_type var (3/17)\n", + "Rank 000: Loaded street_type var ((84,))\n", + "Rank 000: Loading country_code var (4/17)\n", + "Rank 000: Loaded country_code var ((84,))\n", + "Rank 000: Loading ccaa var (5/17)\n", + "Rank 000: Loaded ccaa var ((84,))\n", + "Rank 000: Loading station_name var (6/17)\n", + "Rank 000: Loaded station_name var ((84,))\n", + "Rank 000: Loading station_area var (7/17)\n", + "Rank 000: Loaded station_area var ((84,))\n", + "Rank 000: Loading city var (8/17)\n", + "Rank 000: Loaded city var ((84,))\n", + "Rank 000: Loading pm10 var (9/17)\n", + "Rank 000: Loaded pm10 var ((31, 84))\n", + "Rank 000: Loading station_emep var (10/17)\n", + "Rank 000: Loaded station_emep var ((84,))\n", + "Rank 000: Loading station_type var (11/17)\n", + "Rank 000: Loaded station_type var ((84,))\n", + "Rank 000: Loading country var (12/17)\n", + "Rank 000: Loaded country var ((84,))\n", + "Rank 000: Loading altitude var (13/17)\n", + "Rank 000: Loaded altitude var ((84,))\n", + "Rank 000: Loading station_code var (14/17)\n", + "Rank 000: Loaded station_code var ((84,))\n", + "Rank 000: Loading station_end_date var (15/17)\n", + "Rank 000: Loaded station_end_date var ((84,))\n", + "Rank 000: Loading station_rural_back var (16/17)\n", + "Rank 000: Loaded station_rural_back var ((84,))\n", + "Rank 000: Loading station_ozone_classification var (17/17)\n", + "Rank 000: Loaded station_ozone_classification var ((84,))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'station_start_date': {'data': array(['1980-01-01', '1980-01-01', '1991-05-01', '1991-05-01',\n", + " '1996-10-01', '1978-10-01', '1987-01-01', '2010-07-01',\n", + " '1985-01-06', '1994-05-01', '1987-11-01', '1992-01-01',\n", + " '1992-06-01', '1995-11-01', '1998-09-01', '1998-01-01',\n", + " '1997-07-01', '1999-03-03', '1997-06-11', '2000-08-01',\n", + " '2000-11-01', '2001-01-31', '2007-10-03', '1976-06-22',\n", + " '2006-01-01', '1980-01-01', '2001-01-01', '1900-01-01',\n", + " '1985-11-01', '2009-01-01', '1971-12-01', '1978-02-01',\n", + " '1997-01-01', '1967-09-15', '1981-01-01', '1995-01-01',\n", + " '1984-12-01', '2011-01-01', '1979-01-09', '1984-01-09',\n", + " '1990-01-01', '2002-01-01', '1996-05-01', '1997-01-01',\n", + " '1997-01-01', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan',\n", + " 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan',\n", + " 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan',\n", + " 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan', 'nan',\n", + " 'nan', 'nan', 'nan', 'nan', 'nan'], dtype='" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('points_file_1.nc', info=True, parallel_method='X')\n", + "nessy_2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                       (time: 31, station: 84)\n",
+       "Coordinates:\n",
+       "  * time                          (time) datetime64[ns] 2015-07-01 ... 2015-0...\n",
+       "  * station                       (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n",
+       "Data variables: (12/19)\n",
+       "    station_start_date            (station) |S75 b'1980-01-01' ... b'nan'\n",
+       "    station_zone                  (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    street_type                   (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    country_code                  (station) |S75 b'CH' b'CH' ... b'NL' b'IT'\n",
+       "    ccaa                          (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_name                  (station) |S75 b'payerne' ... b'lamezia terme'\n",
+       "    ...                            ...\n",
+       "    station_code                  (station) |S75 b'CH0002R' ... b'IT0016R'\n",
+       "    station_end_date              (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_rural_back            (station) |S75 b'nan' b'nan' ... b'nan' b'nan'\n",
+       "    station_ozone_classification  (station) |S75 b'rural' b'rural' ... b'nan'\n",
+       "    lat                           (station) float64 46.81 47.48 ... 53.33 38.88\n",
+       "    lon                           (station) float64 6.944 8.905 ... 6.277 16.23\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 31, station: 84)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2015-07-01 ... 2015-0...\n", + " * station (station) float64 0.0 1.0 2.0 ... 82.0 83.0\n", + "Data variables: (12/19)\n", + " station_start_date (station) |S75 ...\n", + " station_zone (station) |S75 ...\n", + " street_type (station) |S75 ...\n", + " country_code (station) |S75 ...\n", + " ccaa (station) |S75 ...\n", + " station_name (station) |S75 ...\n", + " ... ...\n", + " station_code (station) |S75 ...\n", + " station_end_date (station) |S75 ...\n", + " station_rural_back (station) |S75 ...\n", + " station_ozone_classification (station) |S75 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_file_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Read and write - GHOST type" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.4/hourly/sconcno2/sconcno2_202004.nc' #EBAS\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/AERONET_v3/1.3.3/hourly/od1020aero/od1020aero_201907.nc' # AERONET\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/CANADA_NAPS/1.4/daily/sconcno2/sconcno2_202011.nc' # CANADA NAPS\n", + "# nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/CHILE_SINCA/1.4/monthly/pm10/pm10_201905.nc' # CHILE SINCA\n", + "nc_path_2 = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc' # EANET" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (station: 3, time: 30, N_flag_codes: 190, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/177)\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    street_type                                                       (station) object ...\n",
+       "    street_width                                                      (station) float32 ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "    sconcso4_prefiltered_defaultqa                                    (station, time) float32 ...\n",
+       "Attributes:\n",
+       "    title:          Surface sulphate data in the EANET network in 2019-11.\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Surface observations\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    version:        1.4
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 3, time: 30, N_flag_codes: 190, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", + "Data variables: (12/177)\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", + " ... ...\n", + " street_type (station) object ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", + "Attributes:\n", + " title: Surface sulphate data in the EANET network in 2019-11.\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " version: 1.4" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1 = open_netcdf(path=nc_path_2, info=True, parallel_method='X')\n", + "nessy_ghost_1" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2019, 11, 1, 0, 0),\n", + " datetime.datetime(2019, 11, 2, 0, 0),\n", + " datetime.datetime(2019, 11, 3, 0, 0),\n", + " datetime.datetime(2019, 11, 4, 0, 0),\n", + " datetime.datetime(2019, 11, 5, 0, 0),\n", + " datetime.datetime(2019, 11, 6, 0, 0),\n", + " datetime.datetime(2019, 11, 7, 0, 0),\n", + " datetime.datetime(2019, 11, 8, 0, 0),\n", + " datetime.datetime(2019, 11, 9, 0, 0),\n", + " datetime.datetime(2019, 11, 10, 0, 0),\n", + " datetime.datetime(2019, 11, 11, 0, 0),\n", + " datetime.datetime(2019, 11, 12, 0, 0),\n", + " datetime.datetime(2019, 11, 13, 0, 0),\n", + " datetime.datetime(2019, 11, 14, 0, 0),\n", + " datetime.datetime(2019, 11, 15, 0, 0),\n", + " datetime.datetime(2019, 11, 16, 0, 0),\n", + " datetime.datetime(2019, 11, 17, 0, 0),\n", + " datetime.datetime(2019, 11, 18, 0, 0),\n", + " datetime.datetime(2019, 11, 19, 0, 0),\n", + " datetime.datetime(2019, 11, 20, 0, 0),\n", + " datetime.datetime(2019, 11, 21, 0, 0),\n", + " datetime.datetime(2019, 11, 22, 0, 0),\n", + " datetime.datetime(2019, 11, 23, 0, 0),\n", + " datetime.datetime(2019, 11, 24, 0, 0),\n", + " datetime.datetime(2019, 11, 25, 0, 0),\n", + " datetime.datetime(2019, 11, 26, 0, 0),\n", + " datetime.datetime(2019, 11, 27, 0, 0),\n", + " datetime.datetime(2019, 11, 28, 0, 0),\n", + " datetime.datetime(2019, 11, 29, 0, 0),\n", + " datetime.datetime(2019, 11, 30, 0, 0)]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0, 1, 2]), 'units': ''}" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.station" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[37.70888889, 33.29222222, 35.6025 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[126.27388889, 126.16194444, 127.18138889],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading ASTER_v3_altitude var (1/173)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/173)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((3,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/173)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/173)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/173)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/173)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/173)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((3,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/173)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((3,))\n", + "Rank 000: Loading ETOPO1_altitude var (18/173)\n", + "Rank 000: Loaded ETOPO1_altitude var ((3,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/173)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((3,))\n", + "Rank 000: Loading GHOST_version var (20/173)\n", + "Rank 000: Loaded GHOST_version var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/173)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/173)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (23/173)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (24/173)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (25/173)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/173)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/173)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (28/173)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (29/173)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/173)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((3,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/173)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((3,))\n", + "Rank 000: Loading GHSL_population_density var (32/173)\n", + "Rank 000: Loaded GHSL_population_density var ((3,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (33/173)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (34/173)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (35/173)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (36/173)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((3,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (37/173)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((3,))\n", + "Rank 000: Loading GPW_population_density var (38/173)\n", + "Rank 000: Loaded GPW_population_density var ((3,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (39/173)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((3,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (40/173)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (41/173)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/173)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((3,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/173)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((3,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/173)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/173)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/173)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/173)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/173)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((3,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/173)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((3,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/173)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/173)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/173)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/173)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((3,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (62/173)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/173)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((3,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/173)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((3,))\n", + "Rank 000: Loading WMO_region var (65/173)\n", + "Rank 000: Loaded WMO_region var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/173)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((3,))\n", + "Rank 000: Loading WWF_TEOW_biome var (67/173)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((3,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/173)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((3,))\n", + "Rank 000: Loading administrative_country_division_1 var (69/173)\n", + "Rank 000: Loaded administrative_country_division_1 var ((3,))\n", + "Rank 000: Loading administrative_country_division_2 var (70/173)\n", + "Rank 000: Loaded administrative_country_division_2 var ((3,))\n", + "Rank 000: Loading altitude var (71/173)\n", + "Rank 000: Loaded altitude var ((3,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (72/173)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading annual_native_representativity_percent var (73/173)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading area_classification var (74/173)\n", + "Rank 000: Loaded area_classification var ((3,))\n", + "Rank 000: Loading associated_networks var (75/173)\n", + "Rank 000: Loaded associated_networks var ((3,))\n", + "Rank 000: Loading city var (76/173)\n", + "Rank 000: Loaded city var ((3,))\n", + "Rank 000: Loading climatology var (77/173)\n", + "Rank 000: Loaded climatology var ((3,))\n", + "Rank 000: Loading contact_email_address var (78/173)\n", + "Rank 000: Loaded contact_email_address var ((3,))\n", + "Rank 000: Loading contact_institution var (79/173)\n", + "Rank 000: Loaded contact_institution var ((3,))\n", + "Rank 000: Loading contact_name var (80/173)\n", + "Rank 000: Loaded contact_name var ((3,))\n", + "Rank 000: Loading country var (81/173)\n", + "Rank 000: Loaded country var ((3,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (82/173)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading daily_native_representativity_percent var (83/173)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading daily_passing_vehicles var (84/173)\n", + "Rank 000: Loaded daily_passing_vehicles var ((3,))\n", + "Rank 000: Loading data_level var (85/173)\n", + "Rank 000: Loaded data_level var ((3,))\n", + "Rank 000: Loading data_licence var (86/173)\n", + "Rank 000: Loaded data_licence var ((3,))\n", + "Rank 000: Loading day_night_code var (87/173)\n", + "Rank 000: Loaded day_night_code var ((3, 30))\n", + "Rank 000: Loading daytime_traffic_speed var (88/173)\n", + "Rank 000: Loaded daytime_traffic_speed var ((3,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (89/173)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading distance_to_building var (90/173)\n", + "Rank 000: Loaded distance_to_building var ((3,))\n", + "Rank 000: Loading distance_to_junction var (91/173)\n", + "Rank 000: Loaded distance_to_junction var ((3,))\n", + "Rank 000: Loading distance_to_kerb var (92/173)\n", + "Rank 000: Loaded distance_to_kerb var ((3,))\n", + "Rank 000: Loading distance_to_source var (93/173)\n", + "Rank 000: Loaded distance_to_source var ((3,))\n", + "Rank 000: Loading ellipsoid var (94/173)\n", + "Rank 000: Loaded ellipsoid var ((3,))\n", + "Rank 000: Loading horizontal_datum var (95/173)\n", + "Rank 000: Loaded horizontal_datum var ((3,))\n", + "Rank 000: Loading land_use var (96/173)\n", + "Rank 000: Loaded land_use var ((3,))\n", + "Rank 000: Loading main_emission_source var (97/173)\n", + "Rank 000: Loaded main_emission_source var ((3,))\n", + "Rank 000: Loading measurement_altitude var (98/173)\n", + "Rank 000: Loaded measurement_altitude var ((3,))\n", + "Rank 000: Loading measurement_methodology var (99/173)\n", + "Rank 000: Loaded measurement_methodology var ((3,))\n", + "Rank 000: Loading measurement_scale var (100/173)\n", + "Rank 000: Loaded measurement_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (101/173)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (102/173)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (103/173)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (104/173)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (105/173)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (106/173)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (107/173)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (108/173)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (109/173)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (110/173)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (111/173)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (112/173)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_further_details var (113/173)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (114/173)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((3,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (115/173)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_name var (116/173)\n", + "Rank 000: Loaded measuring_instrument_name var ((3,))\n", + "Rank 000: Loading measuring_instrument_process_details var (117/173)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (118/173)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (119/173)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (120/173)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (121/173)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (122/173)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (123/173)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (124/173)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (125/173)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (126/173)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (127/173)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (128/173)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (129/173)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((3,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (130/173)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((3,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (131/173)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((3, 30))\n", + "Rank 000: Loading monthly_native_representativity_percent var (132/173)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((3, 30))\n", + "Rank 000: Loading network var (133/173)\n", + "Rank 000: Loaded network var ((3,))\n", + "Rank 000: Loading network_maintenance_details var (134/173)\n", + "Rank 000: Loaded network_maintenance_details var ((3,))\n", + "Rank 000: Loading network_miscellaneous_details var (135/173)\n", + "Rank 000: Loaded network_miscellaneous_details var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (136/173)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((3,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (137/173)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((3,))\n", + "Rank 000: Loading network_qa_details var (138/173)\n", + "Rank 000: Loaded network_qa_details var ((3,))\n", + "Rank 000: Loading network_sampling_details var (139/173)\n", + "Rank 000: Loaded network_sampling_details var ((3,))\n", + "Rank 000: Loading network_uncertainty_details var (140/173)\n", + "Rank 000: Loaded network_uncertainty_details var ((3,))\n", + "Rank 000: Loading population var (141/173)\n", + "Rank 000: Loaded population var ((3,))\n", + "Rank 000: Loading primary_sampling_further_details var (142/173)\n", + "Rank 000: Loaded primary_sampling_further_details var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (143/173)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (144/173)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (145/173)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((3,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (146/173)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((3,))\n", + "Rank 000: Loading primary_sampling_process_details var (147/173)\n", + "Rank 000: Loaded primary_sampling_process_details var ((3,))\n", + "Rank 000: Loading primary_sampling_type var (148/173)\n", + "Rank 000: Loaded primary_sampling_type var ((3,))\n", + "Rank 000: Loading principal_investigator_email_address var (149/173)\n", + "Rank 000: Loaded principal_investigator_email_address var ((3,))\n", + "Rank 000: Loading principal_investigator_institution var (150/173)\n", + "Rank 000: Loaded principal_investigator_institution var ((3,))\n", + "Rank 000: Loading principal_investigator_name var (151/173)\n", + "Rank 000: Loaded principal_investigator_name var ((3,))\n", + "Rank 000: Loading process_warnings var (152/173)\n", + "Rank 000: Loaded process_warnings var ((3,))\n", + "Rank 000: Loading projection var (153/173)\n", + "Rank 000: Loaded projection var ((3,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (154/173)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((3, 30))\n", + "Rank 000: Loading representative_radius var (155/173)\n", + "Rank 000: Loaded representative_radius var ((3,))\n", + "Rank 000: Loading retrieval_algorithm var (156/173)\n", + "Rank 000: Loaded retrieval_algorithm var ((3,))\n", + "Rank 000: Loading sample_preparation_further_details var (157/173)\n", + "Rank 000: Loaded sample_preparation_further_details var ((3,))\n", + "Rank 000: Loading sample_preparation_process_details var (158/173)\n", + "Rank 000: Loaded sample_preparation_process_details var ((3,))\n", + "Rank 000: Loading sample_preparation_techniques var (159/173)\n", + "Rank 000: Loaded sample_preparation_techniques var ((3,))\n", + "Rank 000: Loading sample_preparation_types var (160/173)\n", + "Rank 000: Loaded sample_preparation_types var ((3,))\n", + "Rank 000: Loading sampling_height var (161/173)\n", + "Rank 000: Loaded sampling_height var ((3,))\n", + "Rank 000: Loading sconcso4 var (162/173)\n", + "Rank 000: Loaded sconcso4 var ((3, 30))\n", + "Rank 000: Loading season_code var (163/173)\n", + "Rank 000: Loaded season_code var ((3, 30))\n", + "Rank 000: Loading station_classification var (164/173)\n", + "Rank 000: Loaded station_classification var ((3,))\n", + "Rank 000: Loading station_name var (165/173)\n", + "Rank 000: Loaded station_name var ((3,))\n", + "Rank 000: Loading station_reference var (166/173)\n", + "Rank 000: Loaded station_reference var ((3,))\n", + "Rank 000: Loading station_timezone var (167/173)\n", + "Rank 000: Loaded station_timezone var ((3,))\n", + "Rank 000: Loading street_type var (168/173)\n", + "Rank 000: Loaded street_type var ((3,))\n", + "Rank 000: Loading street_width var (169/173)\n", + "Rank 000: Loaded street_width var ((3,))\n", + "Rank 000: Loading terrain var (170/173)\n", + "Rank 000: Loaded terrain var ((3,))\n", + "Rank 000: Loading vertical_datum var (171/173)\n", + "Rank 000: Loaded vertical_datum var ((3,))\n", + "Rank 000: Loading weekday_weekend_code var (172/173)\n", + "Rank 000: Loaded weekday_weekend_code var ((3, 30))\n", + "Rank 000: Loading sconcso4_prefiltered_defaultqa var (173/173)\n", + "Rank 000: Loaded sconcso4_prefiltered_defaultqa var ((3, 30))\n" + ] + } + ], + "source": [ + "nessy_ghost_1.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_file_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing ASTER_v3_altitude var (1/173)\n", + "Rank 000: Var ASTER_v3_altitude created (1/173)\n", + "Rank 000: Var ASTER_v3_altitude data (1/173)\n", + "Rank 000: Var ASTER_v3_altitude completed (1/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/173)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/173)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/173)\n", + "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/173)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/173)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/173)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/173)\n", + "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/173)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/173)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/173)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/173)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/173)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/173)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/173)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/173)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/173)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/173)\n", + "Rank 000: Writing ETOPO1_altitude var (18/173)\n", + "Rank 000: Var ETOPO1_altitude created (18/173)\n", + "Rank 000: Var ETOPO1_altitude data (18/173)\n", + "Rank 000: Var ETOPO1_altitude completed (18/173)\n", + "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/173)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/173)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/173)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/173)\n", + "Rank 000: Writing GHOST_version var (20/173)\n", + "Rank 000: Var GHOST_version created (20/173)\n", + "Rank 000: Var GHOST_version data (20/173)\n", + "Rank 000: Var GHOST_version completed (20/173)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/173)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/173)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/173)\n", + "Rank 000: Writing GHSL_average_population_density_25km var (23/173)\n", + "Rank 000: Var GHSL_average_population_density_25km created (23/173)\n", + "Rank 000: Var GHSL_average_population_density_25km data (23/173)\n", + "Rank 000: Var GHSL_average_population_density_25km completed (23/173)\n", + "Rank 000: Writing GHSL_average_population_density_5km var (24/173)\n", + "Rank 000: Var GHSL_average_population_density_5km created (24/173)\n", + "Rank 000: Var GHSL_average_population_density_5km data (24/173)\n", + "Rank 000: Var GHSL_average_population_density_5km completed (24/173)\n", + "Rank 000: Writing GHSL_built_up_area_density var (25/173)\n", + "Rank 000: Var GHSL_built_up_area_density created (25/173)\n", + "Rank 000: Var GHSL_built_up_area_density data (25/173)\n", + "Rank 000: Var GHSL_built_up_area_density completed (25/173)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/173)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/173)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/173)\n", + "Rank 000: Writing GHSL_max_population_density_25km var (28/173)\n", + "Rank 000: Var GHSL_max_population_density_25km created (28/173)\n", + "Rank 000: Var GHSL_max_population_density_25km data (28/173)\n", + "Rank 000: Var GHSL_max_population_density_25km completed (28/173)\n", + "Rank 000: Writing GHSL_max_population_density_5km var (29/173)\n", + "Rank 000: Var GHSL_max_population_density_5km created (29/173)\n", + "Rank 000: Var GHSL_max_population_density_5km data (29/173)\n", + "Rank 000: Var GHSL_max_population_density_5km completed (29/173)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/173)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/173)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/173)\n", + "Rank 000: Writing GHSL_population_density var (32/173)\n", + "Rank 000: Var GHSL_population_density created (32/173)\n", + "Rank 000: Var GHSL_population_density data (32/173)\n", + "Rank 000: Var GHSL_population_density completed (32/173)\n", + "Rank 000: Writing GHSL_settlement_model_classification var (33/173)\n", + "Rank 000: Var GHSL_settlement_model_classification created (33/173)\n", + "Rank 000: Var GHSL_settlement_model_classification data (33/173)\n", + "Rank 000: Var GHSL_settlement_model_classification completed (33/173)\n", + "Rank 000: Writing GPW_average_population_density_25km var (34/173)\n", + "Rank 000: Var GPW_average_population_density_25km created (34/173)\n", + "Rank 000: Var GPW_average_population_density_25km data (34/173)\n", + "Rank 000: Var GPW_average_population_density_25km completed (34/173)\n", + "Rank 000: Writing GPW_average_population_density_5km var (35/173)\n", + "Rank 000: Var GPW_average_population_density_5km created (35/173)\n", + "Rank 000: Var GPW_average_population_density_5km data (35/173)\n", + "Rank 000: Var GPW_average_population_density_5km completed (35/173)\n", + "Rank 000: Writing GPW_max_population_density_25km var (36/173)\n", + "Rank 000: Var GPW_max_population_density_25km created (36/173)\n", + "Rank 000: Var GPW_max_population_density_25km data (36/173)\n", + "Rank 000: Var GPW_max_population_density_25km completed (36/173)\n", + "Rank 000: Writing GPW_max_population_density_5km var (37/173)\n", + "Rank 000: Var GPW_max_population_density_5km created (37/173)\n", + "Rank 000: Var GPW_max_population_density_5km data (37/173)\n", + "Rank 000: Var GPW_max_population_density_5km completed (37/173)\n", + "Rank 000: Writing GPW_population_density var (38/173)\n", + "Rank 000: Var GPW_population_density created (38/173)\n", + "Rank 000: Var GPW_population_density data (38/173)\n", + "Rank 000: Var GPW_population_density completed (38/173)\n", + "Rank 000: Writing GSFC_coastline_proximity var (39/173)\n", + "Rank 000: Var GSFC_coastline_proximity created (39/173)\n", + "Rank 000: Var GSFC_coastline_proximity data (39/173)\n", + "Rank 000: Var GSFC_coastline_proximity completed (39/173)\n", + "Rank 000: Writing Joly-Peuch_classification_code var (40/173)\n", + "Rank 000: Var Joly-Peuch_classification_code created (40/173)\n", + "Rank 000: Var Joly-Peuch_classification_code data (40/173)\n", + "Rank 000: Var Joly-Peuch_classification_code completed (40/173)\n", + "Rank 000: Writing Koppen-Geiger_classification var (41/173)\n", + "Rank 000: Var Koppen-Geiger_classification created (41/173)\n", + "Rank 000: Var Koppen-Geiger_classification data (41/173)\n", + "Rank 000: Var Koppen-Geiger_classification completed (41/173)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/173)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/173)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/173)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/173)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/173)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/173)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/173)\n", + "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/173)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/173)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/173)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/173)\n", + "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/173)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/173)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/173)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/173)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/173)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/173)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/173)\n", + "Rank 000: Writing UMBC_anthrome_classification var (62/173)\n", + "Rank 000: Var UMBC_anthrome_classification created (62/173)\n", + "Rank 000: Var UMBC_anthrome_classification data (62/173)\n", + "Rank 000: Var UMBC_anthrome_classification completed (62/173)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/173)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/173)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/173)\n", + "Rank 000: Writing WMO_region var (65/173)\n", + "Rank 000: Var WMO_region created (65/173)\n", + "Rank 000: Var WMO_region data (65/173)\n", + "Rank 000: Var WMO_region completed (65/173)\n", + "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/173)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/173)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/173)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/173)\n", + "Rank 000: Writing WWF_TEOW_biome var (67/173)\n", + "Rank 000: Var WWF_TEOW_biome created (67/173)\n", + "Rank 000: Var WWF_TEOW_biome data (67/173)\n", + "Rank 000: Var WWF_TEOW_biome completed (67/173)\n", + "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/173)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/173)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/173)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/173)\n", + "Rank 000: Writing administrative_country_division_1 var (69/173)\n", + "Rank 000: Var administrative_country_division_1 created (69/173)\n", + "Rank 000: Var administrative_country_division_1 data (69/173)\n", + "Rank 000: Var administrative_country_division_1 completed (69/173)\n", + "Rank 000: Writing administrative_country_division_2 var (70/173)\n", + "Rank 000: Var administrative_country_division_2 created (70/173)\n", + "Rank 000: Var administrative_country_division_2 data (70/173)\n", + "Rank 000: Var administrative_country_division_2 completed (70/173)\n", + "Rank 000: Writing altitude var (71/173)\n", + "Rank 000: Var altitude created (71/173)\n", + "Rank 000: Var altitude data (71/173)\n", + "Rank 000: Var altitude completed (71/173)\n", + "Rank 000: Writing annual_native_max_gap_percent var (72/173)\n", + "Rank 000: Var annual_native_max_gap_percent created (72/173)\n", + "Rank 000: Var annual_native_max_gap_percent data (72/173)\n", + "Rank 000: Var annual_native_max_gap_percent completed (72/173)\n", + "Rank 000: Writing annual_native_representativity_percent var (73/173)\n", + "Rank 000: Var annual_native_representativity_percent created (73/173)\n", + "Rank 000: Var annual_native_representativity_percent data (73/173)\n", + "Rank 000: Var annual_native_representativity_percent completed (73/173)\n", + "Rank 000: Writing area_classification var (74/173)\n", + "Rank 000: Var area_classification created (74/173)\n", + "Rank 000: Var area_classification data (74/173)\n", + "Rank 000: Var area_classification completed (74/173)\n", + "Rank 000: Writing associated_networks var (75/173)\n", + "Rank 000: Var associated_networks created (75/173)\n", + "Rank 000: Var associated_networks data (75/173)\n", + "Rank 000: Var associated_networks completed (75/173)\n", + "Rank 000: Writing city var (76/173)\n", + "Rank 000: Var city created (76/173)\n", + "Rank 000: Var city data (76/173)\n", + "Rank 000: Var city completed (76/173)\n", + "Rank 000: Writing climatology var (77/173)\n", + "Rank 000: Var climatology created (77/173)\n", + "Rank 000: Var climatology data (77/173)\n", + "Rank 000: Var climatology completed (77/173)\n", + "Rank 000: Writing contact_email_address var (78/173)\n", + "Rank 000: Var contact_email_address created (78/173)\n", + "Rank 000: Var contact_email_address data (78/173)\n", + "Rank 000: Var contact_email_address completed (78/173)\n", + "Rank 000: Writing contact_institution var (79/173)\n", + "Rank 000: Var contact_institution created (79/173)\n", + "Rank 000: Var contact_institution data (79/173)\n", + "Rank 000: Var contact_institution completed (79/173)\n", + "Rank 000: Writing contact_name var (80/173)\n", + "Rank 000: Var contact_name created (80/173)\n", + "Rank 000: Var contact_name data (80/173)\n", + "Rank 000: Var contact_name completed (80/173)\n", + "Rank 000: Writing country var (81/173)\n", + "Rank 000: Var country created (81/173)\n", + "Rank 000: Var country data (81/173)\n", + "Rank 000: Var country completed (81/173)\n", + "Rank 000: Writing daily_native_max_gap_percent var (82/173)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/173)\n", + "Rank 000: Var daily_native_max_gap_percent data (82/173)\n", + "Rank 000: Var daily_native_max_gap_percent completed (82/173)\n", + "Rank 000: Writing daily_native_representativity_percent var (83/173)\n", + "Rank 000: Var daily_native_representativity_percent created (83/173)\n", + "Rank 000: Var daily_native_representativity_percent data (83/173)\n", + "Rank 000: Var daily_native_representativity_percent completed (83/173)\n", + "Rank 000: Writing daily_passing_vehicles var (84/173)\n", + "Rank 000: Var daily_passing_vehicles created (84/173)\n", + "Rank 000: Var daily_passing_vehicles data (84/173)\n", + "Rank 000: Var daily_passing_vehicles completed (84/173)\n", + "Rank 000: Writing data_level var (85/173)\n", + "Rank 000: Var data_level created (85/173)\n", + "Rank 000: Var data_level data (85/173)\n", + "Rank 000: Var data_level completed (85/173)\n", + "Rank 000: Writing data_licence var (86/173)\n", + "Rank 000: Var data_licence created (86/173)\n", + "Rank 000: Var data_licence data (86/173)\n", + "Rank 000: Var data_licence completed (86/173)\n", + "Rank 000: Writing day_night_code var (87/173)\n", + "Rank 000: Var day_night_code created (87/173)\n", + "Rank 000: Var day_night_code data (87/173)\n", + "Rank 000: Var day_night_code completed (87/173)\n", + "Rank 000: Writing daytime_traffic_speed var (88/173)\n", + "Rank 000: Var daytime_traffic_speed created (88/173)\n", + "Rank 000: Var daytime_traffic_speed data (88/173)\n", + "Rank 000: Var daytime_traffic_speed completed (88/173)\n", + "Rank 000: Writing derived_uncertainty_per_measurement var (89/173)\n", + "Rank 000: Var derived_uncertainty_per_measurement created (89/173)\n", + "Rank 000: Var derived_uncertainty_per_measurement data (89/173)\n", + "Rank 000: Var derived_uncertainty_per_measurement completed (89/173)\n", + "Rank 000: Writing distance_to_building var (90/173)\n", + "Rank 000: Var distance_to_building created (90/173)\n", + "Rank 000: Var distance_to_building data (90/173)\n", + "Rank 000: Var distance_to_building completed (90/173)\n", + "Rank 000: Writing distance_to_junction var (91/173)\n", + "Rank 000: Var distance_to_junction created (91/173)\n", + "Rank 000: Var distance_to_junction data (91/173)\n", + "Rank 000: Var distance_to_junction completed (91/173)\n", + "Rank 000: Writing distance_to_kerb var (92/173)\n", + "Rank 000: Var distance_to_kerb created (92/173)\n", + "Rank 000: Var distance_to_kerb data (92/173)\n", + "Rank 000: Var distance_to_kerb completed (92/173)\n", + "Rank 000: Writing distance_to_source var (93/173)\n", + "Rank 000: Var distance_to_source created (93/173)\n", + "Rank 000: Var distance_to_source data (93/173)\n", + "Rank 000: Var distance_to_source completed (93/173)\n", + "Rank 000: Writing ellipsoid var (94/173)\n", + "Rank 000: Var ellipsoid created (94/173)\n", + "Rank 000: Var ellipsoid data (94/173)\n", + "Rank 000: Var ellipsoid completed (94/173)\n", + "Rank 000: Writing horizontal_datum var (95/173)\n", + "Rank 000: Var horizontal_datum created (95/173)\n", + "Rank 000: Var horizontal_datum data (95/173)\n", + "Rank 000: Var horizontal_datum completed (95/173)\n", + "Rank 000: Writing land_use var (96/173)\n", + "Rank 000: Var land_use created (96/173)\n", + "Rank 000: Var land_use data (96/173)\n", + "Rank 000: Var land_use completed (96/173)\n", + "Rank 000: Writing main_emission_source var (97/173)\n", + "Rank 000: Var main_emission_source created (97/173)\n", + "Rank 000: Var main_emission_source data (97/173)\n", + "Rank 000: Var main_emission_source completed (97/173)\n", + "Rank 000: Writing measurement_altitude var (98/173)\n", + "Rank 000: Var measurement_altitude created (98/173)\n", + "Rank 000: Var measurement_altitude data (98/173)\n", + "Rank 000: Var measurement_altitude completed (98/173)\n", + "Rank 000: Writing measurement_methodology var (99/173)\n", + "Rank 000: Var measurement_methodology created (99/173)\n", + "Rank 000: Var measurement_methodology data (99/173)\n", + "Rank 000: Var measurement_methodology completed (99/173)\n", + "Rank 000: Writing measurement_scale var (100/173)\n", + "Rank 000: Var measurement_scale created (100/173)\n", + "Rank 000: Var measurement_scale data (100/173)\n", + "Rank 000: Var measurement_scale completed (100/173)\n", + "Rank 000: Writing measuring_instrument_calibration_scale var (101/173)\n", + "Rank 000: Var measuring_instrument_calibration_scale created (101/173)\n", + "Rank 000: Var measuring_instrument_calibration_scale data (101/173)\n", + "Rank 000: Var measuring_instrument_calibration_scale completed (101/173)\n", + "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (102/173)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (102/173)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (102/173)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (102/173)\n", + "Rank 000: Writing measuring_instrument_documented_accuracy var (103/173)\n", + "Rank 000: Var measuring_instrument_documented_accuracy created (103/173)\n", + "Rank 000: Var measuring_instrument_documented_accuracy data (103/173)\n", + "Rank 000: Var measuring_instrument_documented_accuracy completed (103/173)\n", + "Rank 000: Writing measuring_instrument_documented_flow_rate var (104/173)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate created (104/173)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate data (104/173)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate completed (104/173)\n", + "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (105/173)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (105/173)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (105/173)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (105/173)\n", + "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (106/173)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution created (106/173)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution data (106/173)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (106/173)\n", + "Rank 000: Writing measuring_instrument_documented_precision var (107/173)\n", + "Rank 000: Var measuring_instrument_documented_precision created (107/173)\n", + "Rank 000: Var measuring_instrument_documented_precision data (107/173)\n", + "Rank 000: Var measuring_instrument_documented_precision completed (107/173)\n", + "Rank 000: Writing measuring_instrument_documented_span_drift var (108/173)\n", + "Rank 000: Var measuring_instrument_documented_span_drift created (108/173)\n", + "Rank 000: Var measuring_instrument_documented_span_drift data (108/173)\n", + "Rank 000: Var measuring_instrument_documented_span_drift completed (108/173)\n", + "Rank 000: Writing measuring_instrument_documented_uncertainty var (109/173)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty created (109/173)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty data (109/173)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty completed (109/173)\n", + "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (110/173)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (110/173)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (110/173)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (110/173)\n", + "Rank 000: Writing measuring_instrument_documented_zero_drift var (111/173)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift created (111/173)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift data (111/173)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift completed (111/173)\n", + "Rank 000: Writing measuring_instrument_documented_zonal_drift var (112/173)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift created (112/173)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift data (112/173)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift completed (112/173)\n", + "Rank 000: Writing measuring_instrument_further_details var (113/173)\n", + "Rank 000: Var measuring_instrument_further_details created (113/173)\n", + "Rank 000: Var measuring_instrument_further_details data (113/173)\n", + "Rank 000: Var measuring_instrument_further_details completed (113/173)\n", + "Rank 000: Writing measuring_instrument_inlet_information var (114/173)\n", + "Rank 000: Var measuring_instrument_inlet_information created (114/173)\n", + "Rank 000: Var measuring_instrument_inlet_information data (114/173)\n", + "Rank 000: Var measuring_instrument_inlet_information completed (114/173)\n", + "Rank 000: Writing measuring_instrument_manual_name var (115/173)\n", + "Rank 000: Var measuring_instrument_manual_name created (115/173)\n", + "Rank 000: Var measuring_instrument_manual_name data (115/173)\n", + "Rank 000: Var measuring_instrument_manual_name completed (115/173)\n", + "Rank 000: Writing measuring_instrument_name var (116/173)\n", + "Rank 000: Var measuring_instrument_name created (116/173)\n", + "Rank 000: Var measuring_instrument_name data (116/173)\n", + "Rank 000: Var measuring_instrument_name completed (116/173)\n", + "Rank 000: Writing measuring_instrument_process_details var (117/173)\n", + "Rank 000: Var measuring_instrument_process_details created (117/173)\n", + "Rank 000: Var measuring_instrument_process_details data (117/173)\n", + "Rank 000: Var measuring_instrument_process_details completed (117/173)\n", + "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (118/173)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (118/173)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (118/173)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (118/173)\n", + "Rank 000: Writing measuring_instrument_reported_accuracy var (119/173)\n", + "Rank 000: Var measuring_instrument_reported_accuracy created (119/173)\n", + "Rank 000: Var measuring_instrument_reported_accuracy data (119/173)\n", + "Rank 000: Var measuring_instrument_reported_accuracy completed (119/173)\n", + "Rank 000: Writing measuring_instrument_reported_flow_rate var (120/173)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate created (120/173)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate data (120/173)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate completed (120/173)\n", + "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (121/173)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (121/173)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (121/173)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (121/173)\n", + "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (122/173)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution created (122/173)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution data (122/173)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (122/173)\n", + "Rank 000: Writing measuring_instrument_reported_precision var (123/173)\n", + "Rank 000: Var measuring_instrument_reported_precision created (123/173)\n", + "Rank 000: Var measuring_instrument_reported_precision data (123/173)\n", + "Rank 000: Var measuring_instrument_reported_precision completed (123/173)\n", + "Rank 000: Writing measuring_instrument_reported_span_drift var (124/173)\n", + "Rank 000: Var measuring_instrument_reported_span_drift created (124/173)\n", + "Rank 000: Var measuring_instrument_reported_span_drift data (124/173)\n", + "Rank 000: Var measuring_instrument_reported_span_drift completed (124/173)\n", + "Rank 000: Writing measuring_instrument_reported_uncertainty var (125/173)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty created (125/173)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty data (125/173)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty completed (125/173)\n", + "Rank 000: Writing measuring_instrument_reported_units var (126/173)\n", + "Rank 000: Var measuring_instrument_reported_units created (126/173)\n", + "Rank 000: Var measuring_instrument_reported_units data (126/173)\n", + "Rank 000: Var measuring_instrument_reported_units completed (126/173)\n", + "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (127/173)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (127/173)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (127/173)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (127/173)\n", + "Rank 000: Writing measuring_instrument_reported_zero_drift var (128/173)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift created (128/173)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift data (128/173)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift completed (128/173)\n", + "Rank 000: Writing measuring_instrument_reported_zonal_drift var (129/173)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift created (129/173)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift data (129/173)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift completed (129/173)\n", + "Rank 000: Writing measuring_instrument_sampling_type var (130/173)\n", + "Rank 000: Var measuring_instrument_sampling_type created (130/173)\n", + "Rank 000: Var measuring_instrument_sampling_type data (130/173)\n", + "Rank 000: Var measuring_instrument_sampling_type completed (130/173)\n", + "Rank 000: Writing monthly_native_max_gap_percent var (131/173)\n", + "Rank 000: Var monthly_native_max_gap_percent created (131/173)\n", + "Rank 000: Var monthly_native_max_gap_percent data (131/173)\n", + "Rank 000: Var monthly_native_max_gap_percent completed (131/173)\n", + "Rank 000: Writing monthly_native_representativity_percent var (132/173)\n", + "Rank 000: Var monthly_native_representativity_percent created (132/173)\n", + "Rank 000: Var monthly_native_representativity_percent data (132/173)\n", + "Rank 000: Var monthly_native_representativity_percent completed (132/173)\n", + "Rank 000: Writing network var (133/173)\n", + "Rank 000: Var network created (133/173)\n", + "Rank 000: Var network data (133/173)\n", + "Rank 000: Var network completed (133/173)\n", + "Rank 000: Writing network_maintenance_details var (134/173)\n", + "Rank 000: Var network_maintenance_details created (134/173)\n", + "Rank 000: Var network_maintenance_details data (134/173)\n", + "Rank 000: Var network_maintenance_details completed (134/173)\n", + "Rank 000: Writing network_miscellaneous_details var (135/173)\n", + "Rank 000: Var network_miscellaneous_details created (135/173)\n", + "Rank 000: Var network_miscellaneous_details data (135/173)\n", + "Rank 000: Var network_miscellaneous_details completed (135/173)\n", + "Rank 000: Writing network_provided_volume_standard_pressure var (136/173)\n", + "Rank 000: Var network_provided_volume_standard_pressure created (136/173)\n", + "Rank 000: Var network_provided_volume_standard_pressure data (136/173)\n", + "Rank 000: Var network_provided_volume_standard_pressure completed (136/173)\n", + "Rank 000: Writing network_provided_volume_standard_temperature var (137/173)\n", + "Rank 000: Var network_provided_volume_standard_temperature created (137/173)\n", + "Rank 000: Var network_provided_volume_standard_temperature data (137/173)\n", + "Rank 000: Var network_provided_volume_standard_temperature completed (137/173)\n", + "Rank 000: Writing network_qa_details var (138/173)\n", + "Rank 000: Var network_qa_details created (138/173)\n", + "Rank 000: Var network_qa_details data (138/173)\n", + "Rank 000: Var network_qa_details completed (138/173)\n", + "Rank 000: Writing network_sampling_details var (139/173)\n", + "Rank 000: Var network_sampling_details created (139/173)\n", + "Rank 000: Var network_sampling_details data (139/173)\n", + "Rank 000: Var network_sampling_details completed (139/173)\n", + "Rank 000: Writing network_uncertainty_details var (140/173)\n", + "Rank 000: Var network_uncertainty_details created (140/173)\n", + "Rank 000: Var network_uncertainty_details data (140/173)\n", + "Rank 000: Var network_uncertainty_details completed (140/173)\n", + "Rank 000: Writing population var (141/173)\n", + "Rank 000: Var population created (141/173)\n", + "Rank 000: Var population data (141/173)\n", + "Rank 000: Var population completed (141/173)\n", + "Rank 000: Writing primary_sampling_further_details var (142/173)\n", + "Rank 000: Var primary_sampling_further_details created (142/173)\n", + "Rank 000: Var primary_sampling_further_details data (142/173)\n", + "Rank 000: Var primary_sampling_further_details completed (142/173)\n", + "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (143/173)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (143/173)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (143/173)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (143/173)\n", + "Rank 000: Writing primary_sampling_instrument_manual_name var (144/173)\n", + "Rank 000: Var primary_sampling_instrument_manual_name created (144/173)\n", + "Rank 000: Var primary_sampling_instrument_manual_name data (144/173)\n", + "Rank 000: Var primary_sampling_instrument_manual_name completed (144/173)\n", + "Rank 000: Writing primary_sampling_instrument_name var (145/173)\n", + "Rank 000: Var primary_sampling_instrument_name created (145/173)\n", + "Rank 000: Var primary_sampling_instrument_name data (145/173)\n", + "Rank 000: Var primary_sampling_instrument_name completed (145/173)\n", + "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (146/173)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (146/173)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (146/173)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (146/173)\n", + "Rank 000: Writing primary_sampling_process_details var (147/173)\n", + "Rank 000: Var primary_sampling_process_details created (147/173)\n", + "Rank 000: Var primary_sampling_process_details data (147/173)\n", + "Rank 000: Var primary_sampling_process_details completed (147/173)\n", + "Rank 000: Writing primary_sampling_type var (148/173)\n", + "Rank 000: Var primary_sampling_type created (148/173)\n", + "Rank 000: Var primary_sampling_type data (148/173)\n", + "Rank 000: Var primary_sampling_type completed (148/173)\n", + "Rank 000: Writing principal_investigator_email_address var (149/173)\n", + "Rank 000: Var principal_investigator_email_address created (149/173)\n", + "Rank 000: Var principal_investigator_email_address data (149/173)\n", + "Rank 000: Var principal_investigator_email_address completed (149/173)\n", + "Rank 000: Writing principal_investigator_institution var (150/173)\n", + "Rank 000: Var principal_investigator_institution created (150/173)\n", + "Rank 000: Var principal_investigator_institution data (150/173)\n", + "Rank 000: Var principal_investigator_institution completed (150/173)\n", + "Rank 000: Writing principal_investigator_name var (151/173)\n", + "Rank 000: Var principal_investigator_name created (151/173)\n", + "Rank 000: Var principal_investigator_name data (151/173)\n", + "Rank 000: Var principal_investigator_name completed (151/173)\n", + "Rank 000: Writing process_warnings var (152/173)\n", + "Rank 000: Var process_warnings created (152/173)\n", + "Rank 000: Var process_warnings data (152/173)\n", + "Rank 000: Var process_warnings completed (152/173)\n", + "Rank 000: Writing projection var (153/173)\n", + "Rank 000: Var projection created (153/173)\n", + "Rank 000: Var projection data (153/173)\n", + "Rank 000: Var projection completed (153/173)\n", + "Rank 000: Writing reported_uncertainty_per_measurement var (154/173)\n", + "Rank 000: Var reported_uncertainty_per_measurement created (154/173)\n", + "Rank 000: Var reported_uncertainty_per_measurement data (154/173)\n", + "Rank 000: Var reported_uncertainty_per_measurement completed (154/173)\n", + "Rank 000: Writing representative_radius var (155/173)\n", + "Rank 000: Var representative_radius created (155/173)\n", + "Rank 000: Var representative_radius data (155/173)\n", + "Rank 000: Var representative_radius completed (155/173)\n", + "Rank 000: Writing retrieval_algorithm var (156/173)\n", + "Rank 000: Var retrieval_algorithm created (156/173)\n", + "Rank 000: Var retrieval_algorithm data (156/173)\n", + "Rank 000: Var retrieval_algorithm completed (156/173)\n", + "Rank 000: Writing sample_preparation_further_details var (157/173)\n", + "Rank 000: Var sample_preparation_further_details created (157/173)\n", + "Rank 000: Var sample_preparation_further_details data (157/173)\n", + "Rank 000: Var sample_preparation_further_details completed (157/173)\n", + "Rank 000: Writing sample_preparation_process_details var (158/173)\n", + "Rank 000: Var sample_preparation_process_details created (158/173)\n", + "Rank 000: Var sample_preparation_process_details data (158/173)\n", + "Rank 000: Var sample_preparation_process_details completed (158/173)\n", + "Rank 000: Writing sample_preparation_techniques var (159/173)\n", + "Rank 000: Var sample_preparation_techniques created (159/173)\n", + "Rank 000: Var sample_preparation_techniques data (159/173)\n", + "Rank 000: Var sample_preparation_techniques completed (159/173)\n", + "Rank 000: Writing sample_preparation_types var (160/173)\n", + "Rank 000: Var sample_preparation_types created (160/173)\n", + "Rank 000: Var sample_preparation_types data (160/173)\n", + "Rank 000: Var sample_preparation_types completed (160/173)\n", + "Rank 000: Writing sampling_height var (161/173)\n", + "Rank 000: Var sampling_height created (161/173)\n", + "Rank 000: Var sampling_height data (161/173)\n", + "Rank 000: Var sampling_height completed (161/173)\n", + "Rank 000: Writing sconcso4 var (162/173)\n", + "Rank 000: Var sconcso4 created (162/173)\n", + "Rank 000: Var sconcso4 data (162/173)\n", + "Rank 000: Var sconcso4 completed (162/173)\n", + "Rank 000: Writing season_code var (163/173)\n", + "Rank 000: Var season_code created (163/173)\n", + "Rank 000: Var season_code data (163/173)\n", + "Rank 000: Var season_code completed (163/173)\n", + "Rank 000: Writing station_classification var (164/173)\n", + "Rank 000: Var station_classification created (164/173)\n", + "Rank 000: Var station_classification data (164/173)\n", + "Rank 000: Var station_classification completed (164/173)\n", + "Rank 000: Writing station_name var (165/173)\n", + "Rank 000: Var station_name created (165/173)\n", + "Rank 000: Var station_name data (165/173)\n", + "Rank 000: Var station_name completed (165/173)\n", + "Rank 000: Writing station_reference var (166/173)\n", + "Rank 000: Var station_reference created (166/173)\n", + "Rank 000: Var station_reference data (166/173)\n", + "Rank 000: Var station_reference completed (166/173)\n", + "Rank 000: Writing station_timezone var (167/173)\n", + "Rank 000: Var station_timezone created (167/173)\n", + "Rank 000: Var station_timezone data (167/173)\n", + "Rank 000: Var station_timezone completed (167/173)\n", + "Rank 000: Writing street_type var (168/173)\n", + "Rank 000: Var street_type created (168/173)\n", + "Rank 000: Var street_type data (168/173)\n", + "Rank 000: Var street_type completed (168/173)\n", + "Rank 000: Writing street_width var (169/173)\n", + "Rank 000: Var street_width created (169/173)\n", + "Rank 000: Var street_width data (169/173)\n", + "Rank 000: Var street_width completed (169/173)\n", + "Rank 000: Writing terrain var (170/173)\n", + "Rank 000: Var terrain created (170/173)\n", + "Rank 000: Var terrain data (170/173)\n", + "Rank 000: Var terrain completed (170/173)\n", + "Rank 000: Writing vertical_datum var (171/173)\n", + "Rank 000: Var vertical_datum created (171/173)\n", + "Rank 000: Var vertical_datum data (171/173)\n", + "Rank 000: Var vertical_datum completed (171/173)\n", + "Rank 000: Writing weekday_weekend_code var (172/173)\n", + "Rank 000: Var weekday_weekend_code created (172/173)\n", + "Rank 000: Var weekday_weekend_code data (172/173)\n", + "Rank 000: Var weekday_weekend_code completed (172/173)\n", + "Rank 000: Writing sconcso4_prefiltered_defaultqa var (173/173)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa created (173/173)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa data (173/173)\n", + "Rank 000: Var sconcso4_prefiltered_defaultqa completed (173/173)\n" + ] + } + ], + "source": [ + "nessy_ghost_1.to_netcdf('points_file_2.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_ghost_2 = open_netcdf('points_file_2.nc', info=True, parallel_method='X')\n", + "nessy_ghost_2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (time: 30, station: 3, N_flag_codes: 190, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "  * station                                                           (station) float64 ...\n",
+       "Dimensions without coordinates: N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/177)\n",
+       "    flag                                                              (station, time, N_flag_codes) int64 ...\n",
+       "    qa                                                                (station, time, N_qa_codes) int64 ...\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "    sconcso4_prefiltered_defaultqa                                    (station, time) float32 ...\n",
+       "    lat                                                               (station) float64 ...\n",
+       "    lon                                                               (station) float64 ...\n",
+       "Attributes:\n",
+       "    title:          Surface sulphate data in the EANET network in 2019-11.\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Surface observations\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    version:        1.4\n",
+       "    Conventions:    CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 30, station: 3, N_flag_codes: 190, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + " * station (station) float64 ...\n", + "Dimensions without coordinates: N_flag_codes, N_qa_codes\n", + "Data variables: (12/177)\n", + " flag (station, time, N_flag_codes) int64 ...\n", + " qa (station, time, N_qa_codes) int64 ...\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " ... ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + " sconcso4_prefiltered_defaultqa (station, time) float32 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " title: Surface sulphate data in the EANET network in 2019-11.\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " version: 1.4\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_file_2.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.4-lcc_grids.ipynb b/Jupyter_notebooks/1.4-lcc_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..91f50cec892013d13f02c4d288a812ad34b0dd4d --- /dev/null +++ b/Jupyter_notebooks/1.4-lcc_grids.ipynb @@ -0,0 +1,1710 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write LCC grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read and write" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:            (time: 48, y: 398, x: 478, lev: 1)\n",
+       "Coordinates:\n",
+       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
+       "    lat                (y, x) float32 ...\n",
+       "    lon                (y, x) float32 ...\n",
+       "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
+       "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
+       "  * lev                (lev) float32 0.0\n",
+       "Data variables:\n",
+       "    pm10               (time, lev, y, x) float32 ...\n",
+       "    Lambert_conformal  int32 -2147483647
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 48, y: 398, x: 478, lev: 1)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " lat (y, x) float32 ...\n", + " lon (y, x) float32 ...\n", + " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", + " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", + " * lev (lev) float32 0.0\n", + "Data variables:\n", + " pm10 (time, lev, y, x) float32 ...\n", + " Lambert_conformal int32 ..." + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(nc_path_1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=nc_path_1, info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2022, 6, 26, 0, 0),\n", + " datetime.datetime(2022, 6, 26, 1, 0),\n", + " datetime.datetime(2022, 6, 26, 2, 0),\n", + " datetime.datetime(2022, 6, 26, 3, 0),\n", + " datetime.datetime(2022, 6, 26, 4, 0),\n", + " datetime.datetime(2022, 6, 26, 5, 0),\n", + " datetime.datetime(2022, 6, 26, 6, 0),\n", + " datetime.datetime(2022, 6, 26, 7, 0),\n", + " datetime.datetime(2022, 6, 26, 8, 0),\n", + " datetime.datetime(2022, 6, 26, 9, 0),\n", + " datetime.datetime(2022, 6, 26, 10, 0),\n", + " datetime.datetime(2022, 6, 26, 11, 0),\n", + " datetime.datetime(2022, 6, 26, 12, 0),\n", + " datetime.datetime(2022, 6, 26, 13, 0),\n", + " datetime.datetime(2022, 6, 26, 14, 0),\n", + " datetime.datetime(2022, 6, 26, 15, 0),\n", + " datetime.datetime(2022, 6, 26, 16, 0),\n", + " datetime.datetime(2022, 6, 26, 17, 0),\n", + " datetime.datetime(2022, 6, 26, 18, 0),\n", + " datetime.datetime(2022, 6, 26, 19, 0),\n", + " datetime.datetime(2022, 6, 26, 20, 0),\n", + " datetime.datetime(2022, 6, 26, 21, 0),\n", + " datetime.datetime(2022, 6, 26, 22, 0),\n", + " datetime.datetime(2022, 6, 26, 23, 0),\n", + " datetime.datetime(2022, 6, 27, 0, 0),\n", + " datetime.datetime(2022, 6, 27, 1, 0),\n", + " datetime.datetime(2022, 6, 27, 2, 0),\n", + " datetime.datetime(2022, 6, 27, 3, 0),\n", + " datetime.datetime(2022, 6, 27, 4, 0),\n", + " datetime.datetime(2022, 6, 27, 5, 0),\n", + " datetime.datetime(2022, 6, 27, 6, 0),\n", + " datetime.datetime(2022, 6, 27, 7, 0),\n", + " datetime.datetime(2022, 6, 27, 8, 0),\n", + " datetime.datetime(2022, 6, 27, 9, 0),\n", + " datetime.datetime(2022, 6, 27, 10, 0),\n", + " datetime.datetime(2022, 6, 27, 11, 0),\n", + " datetime.datetime(2022, 6, 27, 12, 0),\n", + " datetime.datetime(2022, 6, 27, 13, 0),\n", + " datetime.datetime(2022, 6, 27, 14, 0),\n", + " datetime.datetime(2022, 6, 27, 15, 0),\n", + " datetime.datetime(2022, 6, 27, 16, 0),\n", + " datetime.datetime(2022, 6, 27, 17, 0),\n", + " datetime.datetime(2022, 6, 27, 18, 0),\n", + " datetime.datetime(2022, 6, 27, 19, 0),\n", + " datetime.datetime(2022, 6, 27, 20, 0),\n", + " datetime.datetime(2022, 6, 27, 21, 0),\n", + " datetime.datetime(2022, 6, 27, 22, 0),\n", + " datetime.datetime(2022, 6, 27, 23, 0)]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[0.],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32), 'dimensions': ('lev',), 'positive': 'up'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-2.12584753e+06, -2.11384761e+06, -2.10184701e+06,\n", + " -2.08984901e+06, -2.07784716e+06, -2.06584784e+06,\n", + " -2.05384791e+06, -2.04185027e+06, -2.02984883e+06,\n", + " -2.01784924e+06, -2.00584929e+06, -1.99384819e+06,\n", + " -1.98184942e+06, -1.96984999e+06, -1.95784950e+06,\n", + " -1.94584776e+06, -1.93384820e+06, -1.92184804e+06,\n", + " -1.90984965e+06, -1.89784727e+06, -1.88584974e+06,\n", + " -1.87384798e+06, -1.86184856e+06, -1.84984781e+06,\n", + " -1.83784882e+06, -1.82584878e+06, -1.81384730e+06,\n", + " -1.80184766e+06, -1.78985000e+06, -1.77784784e+06,\n", + " -1.76584709e+06, -1.75384839e+06, -1.74184822e+06,\n", + " -1.72984970e+06, -1.71784950e+06, -1.70584804e+06,\n", + " -1.69384799e+06, -1.68184961e+06, -1.66984941e+06,\n", + " -1.65784782e+06, -1.64584735e+06, -1.63384873e+06,\n", + " -1.62184786e+06, -1.60984877e+06, -1.59784781e+06,\n", + " -1.58584811e+06, -1.57384978e+06, -1.56184949e+06,\n", + " -1.54984706e+06, -1.53784949e+06, -1.52584971e+06,\n", + " -1.51384784e+06, -1.50185029e+06, -1.48984741e+06,\n", + " -1.47784881e+06, -1.46584796e+06, -1.45384829e+06,\n", + " -1.44184935e+06, -1.42984834e+06, -1.41784813e+06,\n", + " -1.40584895e+06, -1.39384732e+06, -1.38184958e+06,\n", + " -1.36984970e+06, -1.35784703e+06, -1.34584852e+06,\n", + " -1.33384739e+06, -1.32185023e+06, -1.30984708e+06,\n", + " -1.29784782e+06, -1.28584924e+06, -1.27384787e+06,\n", + " -1.26185017e+06, -1.24984974e+06, -1.23784984e+06,\n", + " -1.22584679e+06, -1.21384762e+06, -1.20184864e+06,\n", + " -1.18985017e+06, -1.17784863e+06, -1.16584730e+06,\n", + " -1.15384933e+06, -1.14184842e+06, -1.12984761e+06,\n", + " -1.11785016e+06, -1.10584945e+06, -1.09384884e+06,\n", + " -1.08184809e+06, -1.06984737e+06, -1.05784994e+06,\n", + " -1.04584897e+06, -1.03384802e+06, -1.02185016e+06,\n", + " -1.00984874e+06, -9.97847138e+05, -9.85848430e+05,\n", + " -9.73849460e+05, -9.61850193e+05, -9.49847198e+05,\n", + " -9.37847136e+05, -9.25849883e+05, -9.13848882e+05,\n", + " -9.01847317e+05, -8.89848619e+05, -8.77849203e+05,\n", + " -8.65849037e+05, -8.53848483e+05, -8.41850255e+05,\n", + " -8.29848103e+05, -8.17848443e+05, -8.05848159e+05,\n", + " -7.93850146e+05, -7.81847983e+05, -7.69848242e+05,\n", + " -7.57847649e+05, -7.45849126e+05, -7.33849816e+05,\n", + " -7.21849273e+05, -7.09847665e+05, -6.97848323e+05,\n", + " -6.85847775e+05, -6.73849354e+05, -6.61849656e+05,\n", + " -6.49848767e+05, -6.37849773e+05, -6.25849455e+05,\n", + " -6.13847832e+05, -6.01848000e+05, -5.89850037e+05,\n", + " -5.77847297e+05, -5.65849601e+05, -5.53850360e+05,\n", + " -5.41849485e+05, -5.29850392e+05, -5.17849446e+05,\n", + " -5.05850300e+05, -4.93849326e+05, -4.81849796e+05,\n", + " -4.69848463e+05, -4.57848676e+05, -4.45850309e+05,\n", + " -4.33849861e+05, -4.21847579e+05, -4.09849797e+05,\n", + " -3.97850061e+05, -3.85848258e+05, -3.73847618e+05,\n", + " -3.61848107e+05, -3.49849786e+05, -3.37849091e+05,\n", + " -3.25849544e+05, -3.13847764e+05, -3.01850147e+05,\n", + " -2.89850223e+05, -2.77847869e+05, -2.65849682e+05,\n", + " -2.53848995e+05, -2.41849099e+05, -2.29849935e+05,\n", + " -2.17848225e+05, -2.05850456e+05, -1.93850007e+05,\n", + " -1.81850198e+05, -1.69847633e+05, -1.57848901e+05,\n", + " -1.45847355e+05, -1.33849541e+05, -1.21848866e+05,\n", + " -1.09848557e+05, -9.78485812e+04, -8.58489110e+04,\n", + " -7.38495157e+04, -6.18503428e+04, -4.98480588e+04,\n", + " -3.78492294e+04, -2.58472156e+04, -1.38485865e+04,\n", + " -1.85000093e+03, 1.01518839e+04, 2.21504974e+04,\n", + " 3.41524846e+04, 4.61512675e+04, 5.81502025e+04,\n", + " 7.01526052e+04, 8.21519418e+04, 9.41515115e+04,\n", + " 1.06151401e+05, 1.18151593e+05, 1.30152150e+05,\n", + " 1.42149821e+05, 1.54151239e+05, 1.66149805e+05,\n", + " 1.78152206e+05, 1.90151841e+05, 2.02152103e+05,\n", + " 2.14149676e+05, 2.26151179e+05, 2.38150101e+05,\n", + " 2.50149789e+05, 2.62150163e+05, 2.74151423e+05,\n", + " 2.86150208e+05, 2.98149857e+05, 3.10150460e+05,\n", + " 3.22152060e+05, 3.34151271e+05, 3.46151519e+05,\n", + " 3.58149605e+05, 3.70152044e+05, 3.82152331e+05,\n", + " 3.94150502e+05, 4.06149863e+05, 4.18150448e+05,\n", + " 4.30152410e+05, 4.42152326e+05, 4.54150354e+05,\n", + " 4.66149752e+05, 4.78150600e+05, 4.90149630e+05,\n", + " 5.02150092e+05, 5.14152159e+05, 5.26152471e+05,\n", + " 5.38151163e+05, 5.50151477e+05, 5.62150197e+05,\n", + " 5.74150715e+05, 5.86149501e+05, 5.98150157e+05,\n", + " 6.10149476e+05, 6.22150579e+05, 6.34150250e+05,\n", + " 6.46151947e+05, 6.58152226e+05, 6.70151301e+05,\n", + " 6.82152394e+05, 6.94152233e+05, 7.06151050e+05,\n", + " 7.18151992e+05, 7.30151725e+05, 7.42150415e+05,\n", + " 7.54151335e+05, 7.66151356e+05, 7.78150447e+05,\n", + " 7.90151805e+05, 8.02152305e+05, 8.14151909e+05,\n", + " 8.26150803e+05, 8.38152179e+05, 8.50149463e+05,\n", + " 8.62152600e+05, 8.74151875e+05, 8.86150469e+05,\n", + " 8.98151720e+05, 9.10152442e+05, 9.22152590e+05,\n", + " 9.34152281e+05, 9.46151555e+05, 9.58150185e+05,\n", + " 9.70151947e+05, 9.82150008e+05, 9.94151009e+05,\n", + " 1.00615168e+06, 1.01815215e+06, 1.03015237e+06,\n", + " 1.04215255e+06, 1.05415245e+06, 1.06615219e+06,\n", + " 1.07815192e+06, 1.09015146e+06, 1.10215106e+06,\n", + " 1.11415084e+06, 1.12615055e+06, 1.13815031e+06,\n", + " 1.15015027e+06, 1.16215026e+06, 1.17415050e+06,\n", + " 1.18615117e+06, 1.19815184e+06, 1.21014970e+06,\n", + " 1.22215106e+06, 1.23414955e+06, 1.24615161e+06,\n", + " 1.25815089e+06, 1.27015072e+06, 1.28215091e+06,\n", + " 1.29415184e+06, 1.30614966e+06, 1.31815171e+06,\n", + " 1.33015072e+06, 1.34215086e+06, 1.35415143e+06,\n", + " 1.36614929e+06, 1.37815146e+06, 1.39015111e+06,\n", + " 1.40215157e+06, 1.41414946e+06, 1.42615154e+06,\n", + " 1.43815149e+06, 1.45015214e+06, 1.46215061e+06,\n", + " 1.47414997e+06, 1.48615052e+06, 1.49815230e+06,\n", + " 1.51015149e+06, 1.52215197e+06, 1.53415035e+06,\n", + " 1.54615023e+06, 1.55815110e+06, 1.57015010e+06,\n", + " 1.58215014e+06, 1.59415154e+06, 1.60615118e+06,\n", + " 1.61815225e+06, 1.63015134e+06, 1.64215177e+06,\n", + " 1.65415073e+06, 1.66615110e+06, 1.67814931e+06,\n", + " 1.69014976e+06, 1.70215157e+06, 1.71415178e+06,\n", + " 1.72615026e+06, 1.73815066e+06, 1.75015254e+06,\n", + " 1.76214949e+06, 1.77415192e+06, 1.78614949e+06,\n", + " 1.79815196e+06, 1.81015012e+06, 1.82214994e+06,\n", + " 1.83415163e+06, 1.84615225e+06, 1.85815149e+06,\n", + " 1.87014939e+06, 1.88214927e+06, 1.89415152e+06,\n", + " 1.90615253e+06, 1.91815231e+06, 1.93015091e+06,\n", + " 1.94215219e+06, 1.95415199e+06, 1.96615088e+06,\n", + " 1.97815184e+06, 1.99015212e+06, 2.00215141e+06,\n", + " 2.01414974e+06, 2.02615043e+06, 2.03815022e+06,\n", + " 2.05014932e+06, 2.06215089e+06, 2.07415183e+06,\n", + " 2.08615200e+06, 2.09815142e+06, 2.11015031e+06,\n", + " 2.12215125e+06, 2.13415212e+06, 2.14615216e+06,\n", + " 2.15815181e+06, 2.17015090e+06, 2.18215236e+06,\n", + " 2.19415062e+06, 2.20615132e+06, 2.21815157e+06,\n", + " 2.23015162e+06, 2.24215129e+06, 2.25415041e+06,\n", + " 2.26614942e+06, 2.27815103e+06, 2.29014950e+06,\n", + " 2.30215084e+06, 2.31415199e+06, 2.32614967e+06,\n", + " 2.33815052e+06, 2.35015105e+06, 2.36215151e+06,\n", + " 2.37415171e+06, 2.38615211e+06, 2.39815209e+06,\n", + " 2.41014925e+06, 2.42214936e+06, 2.43414979e+06,\n", + " 2.44615014e+06, 2.45815065e+06, 2.47015092e+06,\n", + " 2.48215186e+06, 2.49414885e+06, 2.50615033e+06,\n", + " 2.51815099e+06, 2.53014892e+06, 2.54215075e+06,\n", + " 2.55414946e+06, 2.56615190e+06, 2.57815081e+06,\n", + " 2.59015090e+06, 2.60215059e+06, 2.61415105e+06,\n", + " 2.62615163e+06, 2.63814974e+06, 2.65015084e+06,\n", + " 2.66215003e+06, 2.67414918e+06, 2.68614955e+06,\n", + " 2.69814946e+06, 2.71015063e+06, 2.72215238e+06,\n", + " 2.73415141e+06, 2.74615082e+06, 2.75815088e+06,\n", + " 2.77015212e+06, 2.78215001e+06, 2.79414938e+06,\n", + " 2.80614927e+06, 2.81814994e+06, 2.83015143e+06,\n", + " 2.84215046e+06, 2.85415010e+06, 2.86615064e+06,\n", + " 2.87815237e+06, 2.89015096e+06, 2.90215132e+06,\n", + " 2.91415163e+06, 2.92615073e+06, 2.93815062e+06,\n", + " 2.95015108e+06, 2.96214987e+06, 2.97415232e+06,\n", + " 2.98614959e+06, 2.99815110e+06, 3.01015052e+06,\n", + " 3.02215092e+06, 3.03414984e+06, 3.04615228e+06,\n", + " 3.05814999e+06, 3.07015153e+06, 3.08215198e+06,\n", + " 3.09414943e+06, 3.10615189e+06, 3.11815252e+06,\n", + " 3.13015077e+06, 3.14215026e+06, 3.15415157e+06,\n", + " 3.16615058e+06, 3.17815090e+06, 3.19015010e+06,\n", + " 3.20214982e+06, 3.21415120e+06, 3.22615069e+06,\n", + " 3.23815162e+06, 3.25015100e+06, 3.26215215e+06,\n", + " 3.27415093e+06, 3.28615124e+06, 3.29815011e+06,\n", + " 3.31015056e+06, 3.32214902e+06, 3.33414971e+06,\n", + " 3.34615147e+06, 3.35815161e+06, 3.37014985e+06,\n", + " 3.38215073e+06, 3.39415246e+06, 3.40614937e+06,\n", + " 3.41815139e+06, 3.43015224e+06, 3.44215134e+06,\n", + " 3.45415201e+06, 3.46615129e+06, 3.47814888e+06,\n", + " 3.49015204e+06, 3.50214995e+06, 3.51415017e+06,\n", + " 3.52615242e+06, 3.53814916e+06, 3.55015129e+06,\n", + " 3.56214954e+06, 3.57415226e+06, 3.58615021e+06,\n", + " 3.59815064e+06],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('x',),\n", + " 'units': 'm',\n", + " 'long_name': 'x coordinate of projection',\n", + " 'standard_name': 'projection_x_coordinate'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.x" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-2067138.47948561, -2055138.33742867,\n", + " -2043137.54665598, -2031137.11044915,\n", + " -2019137.8807928 , -2007138.08379361,\n", + " -1995137.55201761, -1983138.30318791,\n", + " -1971139.14829968, -1959137.5926525 ,\n", + " -1947137.10543589, -1935138.53320712,\n", + " -1923137.4951208 , -1911137.45400782,\n", + " -1899138.2432851 , -1887137.51394029,\n", + " -1875138.43810423, -1863136.92956926,\n", + " -1851137.89552799, -1839137.1121049 ,\n", + " -1827137.15927377, -1815138.01203912,\n", + " -1803138.7795864 , -1791137.70775766,\n", + " -1779137.3704284 , -1767137.74254063,\n", + " -1755137.93518378, -1743137.06138263,\n", + " -1731137.6875659 , -1719137.19999768,\n", + " -1707136.43834552, -1695137.96319842,\n", + " -1683137.44174755, -1671137.43454641,\n", + " -1659137.056387 , -1647138.00256504,\n", + " -1635137.66921584, -1623136.89251186,\n", + " -1611137.36489549, -1599137.34432241,\n", + " -1587137.79320469, -1575137.57084194,\n", + " -1563136.9109904 , -1551137.37235623,\n", + " -1539137.94554037, -1527137.1502086 ,\n", + " -1515138.25410466, -1503138.66687657,\n", + " -1491138.4899473 , -1479136.59453581,\n", + " -1467137.47233746, -1455137.55905622,\n", + " -1443137.68244934, -1431137.81703208,\n", + " -1419138.78837709, -1407138.99088641,\n", + " -1395137.42673134, -1383137.47212092,\n", + " -1371137.40019024, -1359138.03433592,\n", + " -1347136.9224302 , -1335137.31271794,\n", + " -1323137.4816182 , -1311137.94634304,\n", + " -1299137.29022577, -1287137.1818736 ,\n", + " -1275137.59439096, -1263136.92697495,\n", + " -1251137.57275311, -1239136.12380517,\n", + " -1227137.6241066 , -1215137.51625937,\n", + " -1203137.04074528, -1191137.62692217,\n", + " -1179138.21301879, -1167137.50751754,\n", + " -1155137.89726681, -1143138.09129864,\n", + " -1131138.06316848, -1119137.05825655,\n", + " -1107137.15205236, -1095138.20305536,\n", + " -1083138.08298794, -1071137.18627622,\n", + " -1059138.64697368, -1047138.74478058,\n", + " -1035138.51485085, -1023137.92955771,\n", + " -1011137.6894263 , -999137.45744737,\n", + " -987137.51499851, -975137.83442915,\n", + " -963137.76831308, -951139.05433923,\n", + " -939137.39017683, -927137.54863286,\n", + " -915138.66481437, -903137.47725183,\n", + " -891138.44385041, -879137.99257723,\n", + " -867138.69715018, -855137.92810361,\n", + " -843137.53065969, -831137.99786638,\n", + " -819138.36299108, -807137.87013262,\n", + " -795137.32354617, -783137.83855245,\n", + " -771137.41075253, -759138.09010529,\n", + " -747137.76958547, -735137.98006167,\n", + " -723137.5495894 , -711137.6947773 ,\n", + " -699137.86979959, -687138.5622281 ,\n", + " -675137.66992397, -663138.06610574,\n", + " -651137.23497902, -639136.80506847,\n", + " -627138.30290647, -615137.65946231,\n", + " -603138.15712078, -591136.87075576,\n", + " -579137.08014637, -567137.82964577,\n", + " -555137.63384505, -543137.50696179,\n", + " -531138.0283756 , -519137.73451411,\n", + " -507137.93240193, -495137.45153707,\n", + " -483137.30641057, -471137.15203014,\n", + " -459138.19530608, -447138.75770337,\n", + " -435136.43449817, -423137.59544964,\n", + " -411136.54082615, -399138.59271965,\n", + " -387138.05393281, -375137.17535161,\n", + " -363137.35013412, -351138.04215929,\n", + " -339138.30575139, -327138.29921261,\n", + " -315137.89921544, -303137.58006488,\n", + " -291137.12571846, -279137.5132003 ,\n", + " -267137.79855472, -255137.95212261,\n", + " -243138.35433354, -231138.15527529,\n", + " -219137.41721347, -207138.06770319,\n", + " -195138.02753911, -183138.17757006,\n", + " -171137.98702931, -159138.24510045,\n", + " -147137.37488813, -135137.71176692,\n", + " -123137.99774014, -111137.06580801,\n", + " -99137.65919525, -87138.52051766,\n", + " -75137.66532798, -63138.24447606,\n", + " -51137.45613798, -39138.44989177,\n", + " -27137.19935201, -15137.26193513,\n", + " -3138.2870129 , 8862.70158754,\n", + " 20863.19613188, 32862.09109349,\n", + " 44862.1850711 , 56862.28417564,\n", + " 68862.09833787, 80862.79348443,\n", + " 92862.04079618, 104861.82177242,\n", + " 116861.75894296, 128861.96917478,\n", + " 140861.98836512, 152861.43960268,\n", + " 164861.6613091 , 176862.10360022,\n", + " 188862.1547975 , 200862.57312437,\n", + " 212861.43960038, 224862.6824761 ,\n", + " 236862.02704646, 248862.67280663,\n", + " 260861.80232975, 272862.69968633,\n", + " 284863.03968892, 296862.27594177,\n", + " 308863.36959344, 320862.7767121 ,\n", + " 332862.79683064, 344861.59736628,\n", + " 356861.88372131, 368861.82305626,\n", + " 380861.27672775, 392861.25573027,\n", + " 404862.83978888, 416862.48790187,\n", + " 428862.26071291, 440862.59403697,\n", + " 452861.89377259, 464861.81366973,\n", + " 476862.70563957, 488861.51974816,\n", + " 500861.77140022, 512861.95044987,\n", + " 524862.40904252, 536862.85449188,\n", + " 548862.09945016, 560862.20196287,\n", + " 572862.70276656, 584862.09217358,\n", + " 596862.4276693 , 608861.6280585 ,\n", + " 620861.42797071, 632862.17940113,\n", + " 644862.3726426 , 656861.95444672,\n", + " 668862.65863139, 680862.81014377,\n", + " 692862.52099743, 704861.41511084,\n", + " 716862.19408409, 728863.02536633,\n", + " 740861.50621775, 752862.36459892,\n", + " 764861.74143799, 776862.50273099,\n", + " 788863.2968091 , 800862.6963584 ,\n", + " 812861.94605002, 824861.7205462 ,\n", + " 836861.80803198, 848862.4778583 ,\n", + " 860861.89719402, 872862.84895905,\n", + " 884862.52520112, 896861.84719109,\n", + " 908861.8945184 , 920862.37268268,\n", + " 932862.58191684, 944862.38597276,\n", + " 956862.70585726, 968862.75932956,\n", + " 980862.97978011, 992862.99006537,\n", + " 1004862.00768959, 1016862.08692122,\n", + " 1028861.63462963, 1040862.21737941,\n", + " 1052861.91891392, 1064861.98284895,\n", + " 1076862.35437825, 1088862.01009992,\n", + " 1100862.02850585, 1112862.4370279 ,\n", + " 1124863.3456949 , 1136862.34934454,\n", + " 1148862.23038066, 1160861.47692509,\n", + " 1172862.46616204, 1184862.06404513,\n", + " 1196861.91951627, 1208861.89356904,\n", + " 1220862.17889049, 1232861.99106498,\n", + " 1244862.49052547, 1256862.89251711,\n", + " 1268862.41196453, 1280861.96984232,\n", + " 1292861.10334917, 1304861.8676576 ,\n", + " 1316861.85408047, 1328862.71199087,\n", + " 1340862.92749166, 1352862.44262531,\n", + " 1364861.60505252, 1376863.04434379,\n", + " 1388861.42362458, 1400862.3692699 ,\n", + " 1412861.20141946, 1424861.92291089,\n", + " 1436862.85005413, 1448862.46695994,\n", + " 1460862.42366287, 1472861.93234365,\n", + " 1484862.55875226, 1496862.0583845 ,\n", + " 1508862.3186023 , 1520862.95731279,\n", + " 1532862.13580359, 1544862.14789759,\n", + " 1556862.20427556, 1568862.0074687 ,\n", + " 1580861.9026419 , 1592861.99921494,\n", + " 1604861.10001189, 1616862.31297015,\n", + " 1628862.98422931, 1640862.32262325,\n", + " 1652862.62179676, 1664862.12766606,\n", + " 1676862.23304256, 1688862.2324209 ,\n", + " 1700861.42018878, 1712862.49792768,\n", + " 1724861.99388639, 1736861.79393994,\n", + " 1748861.68713378, 1760862.01617442,\n", + " 1772861.8994825 , 1784862.35010491,\n", + " 1796861.98991304, 1808861.01601485,\n", + " 1820862.85914197, 1832862.09010331,\n", + " 1844861.90765863, 1856861.83581031,\n", + " 1868862.30366401, 1880861.87554469,\n", + " 1892861.61954572, 1904861.96465242,\n", + " 1916862.20298818, 1928862.26487282,\n", + " 1940862.25984512, 1952861.70845873,\n", + " 1964862.6760201 , 1976862.40781729,\n", + " 1988862.0601863 , 2000862.06152662,\n", + " 2012861.61160637, 2024861.54817743,\n", + " 2036862.29978874, 2048862.33737603,\n", + " 2060862.31546611, 2072862.34356724,\n", + " 2084862.02940992, 2096862.21104696,\n", + " 2108862.08564468, 2120862.80904264,\n", + " 2132861.29995525, 2144863.04431031,\n", + " 2156861.7680832 , 2168862.22975281,\n", + " 2180862.0745934 , 2192862.45785697,\n", + " 2204862.98450722, 2216862.11947367,\n", + " 2228862.25144406, 2240861.84524301,\n", + " 2252861.64424617, 2264862.07524215,\n", + " 2276862.64609012, 2288862.83419518,\n", + " 2300861.95483109, 2312863.00214068,\n", + " 2324862.2811275 , 2336862.69096942,\n", + " 2348862.5961938 , 2360862.83510549,\n", + " 2372862.49844879, 2384862.20620674,\n", + " 2396862.09153175, 2408862.0470545 ,\n", + " 2420862.20439962, 2432861.21701588,\n", + " 2444862.20681598, 2456862.29183621,\n", + " 2468862.30979652, 2480862.37103315,\n", + " 2492861.55935617, 2504862.36887632,\n", + " 2516862.32624744, 2528862.6837446 ,\n", + " 2540862.62276294, 2552862.56717894,\n", + " 2564862.52648303, 2576862.09514914,\n", + " 2588861.69682198, 2600862.48360977,\n", + " 2612862.07561471, 2624862.03895504,\n", + " 2636862.06879401, 2648862.48574049,\n", + " 2660862.15334881, 2672862.22245282,\n", + " 2684862.70004968, 2696862.03251993],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('y',),\n", + " 'units': 'm',\n", + " 'long_name': 'y coordinate of projection',\n", + " 'standard_name': 'projection_y_coordinate'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.y" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[19.706436, 19.728317, 19.750084, ..., 16.264694, 16.22998 ,\n", + " 16.19516 ],\n", + " [19.805984, 19.827904, 19.849716, ..., 16.358276, 16.323502,\n", + " 16.288643],\n", + " [19.905594, 19.927544, 19.949394, ..., 16.45192 , 16.417084,\n", + " 16.382141],\n", + " ...,\n", + " [59.66961 , 59.70968 , 59.74953 , ..., 53.457195, 53.39534 ,\n", + " 53.33333 ],\n", + " [59.76223 , 59.802353, 59.842262, ..., 53.541912, 53.47999 ,\n", + " 53.417908],\n", + " [59.854744, 59.89492 , 59.934883, ..., 53.62653 , 53.56453 ,\n", + " 53.502373]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-22.32898 , -22.223236, -22.117432, ..., 28.619202, 28.716614,\n", + " 28.813965],\n", + " [-22.352325, -22.24646 , -22.140533, ..., 28.655334, 28.752869,\n", + " 28.850311],\n", + " [-22.375702, -22.269714, -22.163696, ..., 28.69159 , 28.789185,\n", + " 28.886719],\n", + " ...,\n", + " [-39.438995, -39.25531 , -39.07132 , ..., 53.106964, 53.249176,\n", + " 53.391052],\n", + " [-39.518707, -39.334717, -39.15039 , ..., 53.210876, 53.35321 ,\n", + " 53.49518 ],\n", + " [-39.598724, -39.41443 , -39.229828, ..., 53.315125, 53.45755 ,\n", + " 53.59964 ]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude'}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading pm10 var (1/1)\n", + "Rank 000: Loaded pm10 var ((48, 1, 398, 478))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'pm10': {'data': masked_array(\n", + " data=[[[[2.05903081e-08, 2.10736815e-08, 2.16505036e-08, ...,\n", + " 1.57139212e-07, 1.56582516e-07, 1.58654302e-07],\n", + " [2.02237249e-08, 2.07961541e-08, 2.16525038e-08, ...,\n", + " 1.56947792e-07, 1.52752250e-07, 1.51975840e-07],\n", + " [1.92542160e-08, 1.95532017e-08, 2.09430890e-08, ...,\n", + " 1.56698391e-07, 1.52042290e-07, 1.49590434e-07],\n", + " ...,\n", + " [2.00155412e-08, 1.88844460e-08, 1.72373600e-08, ...,\n", + " 1.03697766e-10, 1.24570437e-10, 1.35568029e-10],\n", + " [1.88825418e-08, 1.78339921e-08, 1.65571699e-08, ...,\n", + " 1.29691299e-10, 1.19366197e-10, 1.26047242e-10],\n", + " [1.75334254e-08, 1.67024794e-08, 1.57620299e-08, ...,\n", + " 5.90659299e-10, 5.99817251e-10, 6.93886892e-10]]],\n", + " \n", + " \n", + " [[[2.04419166e-08, 2.10894218e-08, 2.14073363e-08, ...,\n", + " 1.50338764e-07, 1.46800986e-07, 1.45907649e-07],\n", + " [1.97089882e-08, 2.07061799e-08, 2.14063096e-08, ...,\n", + " 1.50352193e-07, 1.44729924e-07, 1.41249188e-07],\n", + " [1.88334628e-08, 1.97772980e-08, 2.10062652e-08, ...,\n", + " 1.50320858e-07, 1.45472683e-07, 1.40802484e-07],\n", + " ...,\n", + " [1.58354645e-08, 1.31688500e-08, 1.04182032e-08, ...,\n", + " 1.36057263e-10, 1.61256425e-10, 1.82640861e-10],\n", + " [1.58553863e-08, 1.35575196e-08, 1.12653220e-08, ...,\n", + " 4.73602046e-10, 5.22056454e-10, 5.99001682e-10],\n", + " [1.51028274e-08, 1.29572175e-08, 1.06422196e-08, ...,\n", + " 1.20180965e-09, 1.22420163e-09, 1.27862843e-09]]],\n", + " \n", + " \n", + " [[[2.06775415e-08, 2.09295870e-08, 2.10403801e-08, ...,\n", + " 1.43437418e-07, 1.36887849e-07, 1.33009308e-07],\n", + " [2.03318944e-08, 2.06964064e-08, 2.09895372e-08, ...,\n", + " 1.43851324e-07, 1.36785971e-07, 1.30620521e-07],\n", + " [1.96500309e-08, 1.97185361e-08, 2.00775236e-08, ...,\n", + " 1.43990789e-07, 1.38915652e-07, 1.32079862e-07],\n", + " ...,\n", + " [1.15522543e-08, 1.03884448e-08, 8.92967922e-09, ...,\n", + " 2.96768943e-10, 5.31864996e-10, 6.76325274e-10],\n", + " [1.30444580e-08, 1.24477344e-08, 1.19089290e-08, ...,\n", + " 9.38010669e-10, 1.02875852e-09, 1.09171505e-09],\n", + " [1.33038087e-08, 1.28954767e-08, 1.27622268e-08, ...,\n", + " 1.10067289e-09, 1.11675491e-09, 1.12590814e-09]]],\n", + " \n", + " \n", + " ...,\n", + " \n", + " \n", + " [[[1.99427657e-08, 2.07774260e-08, 2.12375095e-08, ...,\n", + " 1.42070576e-07, 1.23106801e-07, 1.05545318e-07],\n", + " [1.94999394e-08, 2.03327897e-08, 2.09769428e-08, ...,\n", + " 1.40088332e-07, 1.22823366e-07, 1.06322936e-07],\n", + " [1.92740401e-08, 2.01404546e-08, 2.12538360e-08, ...,\n", + " 1.37738098e-07, 1.25881471e-07, 1.11278482e-07],\n", + " ...,\n", + " [1.62301383e-09, 3.29047856e-09, 4.25983115e-09, ...,\n", + " 5.10578968e-10, 7.78555886e-10, 7.58658358e-10],\n", + " [1.61382108e-09, 3.05706660e-09, 3.85214838e-09, ...,\n", + " 1.44986870e-10, 4.45782633e-10, 6.53098131e-10],\n", + " [1.51593449e-09, 2.81125856e-09, 3.49995122e-09, ...,\n", + " 3.83349671e-11, 5.17163673e-11, 1.41100784e-10]]],\n", + " \n", + " \n", + " [[[2.12318145e-08, 2.15663487e-08, 2.17239737e-08, ...,\n", + " 1.33982709e-07, 1.17447051e-07, 1.01946490e-07],\n", + " [2.09607443e-08, 2.13947366e-08, 2.17022080e-08, ...,\n", + " 1.30945295e-07, 1.15195341e-07, 1.00572883e-07],\n", + " [2.06122941e-08, 2.12153246e-08, 2.20146017e-08, ...,\n", + " 1.27737934e-07, 1.15350041e-07, 1.01470562e-07],\n", + " ...,\n", + " [1.74116932e-09, 3.57977159e-09, 4.66885952e-09, ...,\n", + " 1.35524800e-10, 3.73824971e-10, 5.27946020e-10],\n", + " [1.69331171e-09, 3.35175421e-09, 4.35682335e-09, ...,\n", + " 6.59049343e-11, 8.77121500e-11, 1.72610246e-10],\n", + " [1.66064651e-09, 3.09451931e-09, 3.84808585e-09, ...,\n", + " 3.81935802e-11, 3.20906392e-11, 2.95690208e-11]]],\n", + " \n", + " \n", + " [[[2.20933263e-08, 2.23298162e-08, 2.22625207e-08, ...,\n", + " 1.25896420e-07, 1.11788722e-07, 9.83487993e-08],\n", + " [2.17691785e-08, 2.21585328e-08, 2.22608545e-08, ...,\n", + " 1.21806693e-07, 1.07571751e-07, 9.48248058e-08],\n", + " [2.10832365e-08, 2.17355911e-08, 2.23796377e-08, ...,\n", + " 1.17739212e-07, 1.04824068e-07, 9.16652283e-08],\n", + " ...,\n", + " [1.82784676e-09, 3.94770527e-09, 5.16965803e-09, ...,\n", + " 7.80837409e-11, 9.47864148e-11, 1.31354164e-10],\n", + " [1.82815707e-09, 3.68124264e-09, 4.70819206e-09, ...,\n", + " 5.68534525e-11, 4.92194792e-11, 5.04330119e-11],\n", + " [1.79193316e-09, 3.41961126e-09, 4.36335901e-09, ...,\n", + " 4.09052167e-11, 3.92018085e-11, 3.67623848e-11]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'lev', 'y', 'x'),\n", + " 'units': 'kgm-3',\n", + " 'coordinates': 'lat lon',\n", + " 'grid_mapping': 'Lambert_conformal'}}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating lcc_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing pm10 var (1/1)\n", + "Rank 000: Var pm10 created (1/1)\n", + "Rank 000: Filling pm10)\n", + "Rank 000: Var pm10 data (1/1)\n", + "Rank 000: Var pm10 completed (1/1)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('lcc_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('lcc_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:            (time: 48, lev: 1, y: 398, x: 478)\n",
+       "Coordinates:\n",
+       "  * time               (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n",
+       "  * lev                (lev) float64 0.0\n",
+       "    lat                (y, x) float64 ...\n",
+       "    lon                (y, x) float64 ...\n",
+       "  * y                  (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n",
+       "  * x                  (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n",
+       "Data variables:\n",
+       "    pm10               (time, lev, y, x) float32 ...\n",
+       "    Lambert_conformal  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 48, lev: 1, y: 398, x: 478)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2022-06-26 ... 2022-06-27T23:00:00\n", + " * lev (lev) float64 0.0\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " * y (y) float64 -2.067e+06 -2.055e+06 ... 2.685e+06 2.697e+06\n", + " * x (x) float64 -2.126e+06 -2.114e+06 ... 3.586e+06 3.598e+06\n", + "Data variables:\n", + " pm10 (time, lev, y, x) float32 ...\n", + " Lambert_conformal |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('lcc_file_1.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/1.5-mercator_grids.ipynb b/Jupyter_notebooks/1.5-mercator_grids.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ad8cc7fa57036fb76c1b25bebd93af76b950032f --- /dev/null +++ b/Jupyter_notebooks/1.5-mercator_grids.ipynb @@ -0,0 +1,1396 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to read and write Mercator grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from nes import *\n", + "import xarray as xr" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "nc_path_1 = '/gpfs/scratch/bsc32/bsc32538/a4mg/HERMESv3/auxiliar_files/d01/temporal_coords.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Read and write" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with xarray" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# xr.open_dataset(nc_path_1, decode_times=False).drop(['lat_bnds', 'lon_bnds']).to_netcdf('input/mercator_grid_example.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:    (time: 1, y: 236, x: 210)\n",
+       "Coordinates:\n",
+       "  * time       (time) float64 0.0\n",
+       "    lat        (y, x) float32 -43.52 -43.52 -43.52 -43.52 ... 49.6 49.6 49.6\n",
+       "    lon        (y, x) float32 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
+       "  * x          (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n",
+       "  * y          (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
+       "Data variables:\n",
+       "    var_aux    (time, y, x) float32 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0\n",
+       "    mercator   int32 -2147483647\n",
+       "    cell_area  (y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09 1.051e+09
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, y: 236, x: 210)\n", + "Coordinates:\n", + " * time (time) float64 0.0\n", + " lat (y, x) float32 ...\n", + " lon (y, x) float32 ...\n", + " * x (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n", + " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", + "Data variables:\n", + " var_aux (time, y, x) float32 ...\n", + " mercator int32 ...\n", + " cell_area (y, x) float32 ..." + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('input/mercator_grid_example.nc', decode_times=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path='input/mercator_grid_example.nc', info=True)\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2000, 1, 1, 0, 0)]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-1.01014500e+05, -5.10145000e+04, -1.01450000e+03,\n", + " 4.89855000e+04, 9.89855000e+04, 1.48985500e+05,\n", + " 1.98985500e+05, 2.48985500e+05, 2.98985500e+05,\n", + " 3.48985500e+05, 3.98985500e+05, 4.48985500e+05,\n", + " 4.98985500e+05, 5.48985500e+05, 5.98985500e+05,\n", + " 6.48985500e+05, 6.98985500e+05, 7.48985500e+05,\n", + " 7.98985500e+05, 8.48985500e+05, 8.98985500e+05,\n", + " 9.48985500e+05, 9.98985500e+05, 1.04898550e+06,\n", + " 1.09898550e+06, 1.14898550e+06, 1.19898550e+06,\n", + " 1.24898550e+06, 1.29898550e+06, 1.34898550e+06,\n", + " 1.39898550e+06, 1.44898550e+06, 1.49898550e+06,\n", + " 1.54898550e+06, 1.59898550e+06, 1.64898550e+06,\n", + " 1.69898550e+06, 1.74898550e+06, 1.79898550e+06,\n", + " 1.84898550e+06, 1.89898550e+06, 1.94898550e+06,\n", + " 1.99898550e+06, 2.04898550e+06, 2.09898550e+06,\n", + " 2.14898550e+06, 2.19898550e+06, 2.24898550e+06,\n", + " 2.29898550e+06, 2.34898550e+06, 2.39898550e+06,\n", + " 2.44898550e+06, 2.49898550e+06, 2.54898550e+06,\n", + " 2.59898550e+06, 2.64898550e+06, 2.69898550e+06,\n", + " 2.74898550e+06, 2.79898550e+06, 2.84898550e+06,\n", + " 2.89898550e+06, 2.94898550e+06, 2.99898550e+06,\n", + " 3.04898550e+06, 3.09898550e+06, 3.14898550e+06,\n", + " 3.19898550e+06, 3.24898550e+06, 3.29898550e+06,\n", + " 3.34898550e+06, 3.39898550e+06, 3.44898550e+06,\n", + " 3.49898550e+06, 3.54898550e+06, 3.59898550e+06,\n", + " 3.64898550e+06, 3.69898550e+06, 3.74898550e+06,\n", + " 3.79898550e+06, 3.84898550e+06, 3.89898550e+06,\n", + " 3.94898550e+06, 3.99898550e+06, 4.04898550e+06,\n", + " 4.09898550e+06, 4.14898550e+06, 4.19898550e+06,\n", + " 4.24898550e+06, 4.29898550e+06, 4.34898550e+06,\n", + " 4.39898550e+06, 4.44898550e+06, 4.49898550e+06,\n", + " 4.54898550e+06, 4.59898550e+06, 4.64898550e+06,\n", + " 4.69898550e+06, 4.74898550e+06, 4.79898550e+06,\n", + " 4.84898550e+06, 4.89898550e+06, 4.94898550e+06,\n", + " 4.99898550e+06, 5.04898550e+06, 5.09898550e+06,\n", + " 5.14898550e+06, 5.19898550e+06, 5.24898550e+06,\n", + " 5.29898550e+06, 5.34898550e+06, 5.39898550e+06,\n", + " 5.44898550e+06, 5.49898550e+06, 5.54898550e+06,\n", + " 5.59898550e+06, 5.64898550e+06, 5.69898550e+06,\n", + " 5.74898550e+06, 5.79898550e+06, 5.84898550e+06,\n", + " 5.89898550e+06, 5.94898550e+06, 5.99898550e+06,\n", + " 6.04898550e+06, 6.09898550e+06, 6.14898550e+06,\n", + " 6.19898550e+06, 6.24898550e+06, 6.29898550e+06,\n", + " 6.34898550e+06, 6.39898550e+06, 6.44898550e+06,\n", + " 6.49898550e+06, 6.54898550e+06, 6.59898550e+06,\n", + " 6.64898550e+06, 6.69898550e+06, 6.74898550e+06,\n", + " 6.79898550e+06, 6.84898550e+06, 6.89898550e+06,\n", + " 6.94898550e+06, 6.99898550e+06, 7.04898550e+06,\n", + " 7.09898550e+06, 7.14898550e+06, 7.19898550e+06,\n", + " 7.24898550e+06, 7.29898550e+06, 7.34898550e+06,\n", + " 7.39898550e+06, 7.44898550e+06, 7.49898550e+06,\n", + " 7.54898550e+06, 7.59898550e+06, 7.64898550e+06,\n", + " 7.69898550e+06, 7.74898550e+06, 7.79898550e+06,\n", + " 7.84898550e+06, 7.89898550e+06, 7.94898550e+06,\n", + " 7.99898550e+06, 8.04898550e+06, 8.09898550e+06,\n", + " 8.14898550e+06, 8.19898550e+06, 8.24898550e+06,\n", + " 8.29898550e+06, 8.34898550e+06, 8.39898550e+06,\n", + " 8.44898550e+06, 8.49898550e+06, 8.54898550e+06,\n", + " 8.59898550e+06, 8.64898550e+06, 8.69898550e+06,\n", + " 8.74898550e+06, 8.79898550e+06, 8.84898550e+06,\n", + " 8.89898550e+06, 8.94898550e+06, 8.99898550e+06,\n", + " 9.04898550e+06, 9.09898550e+06, 9.14898550e+06,\n", + " 9.19898550e+06, 9.24898550e+06, 9.29898550e+06,\n", + " 9.34898550e+06, 9.39898550e+06, 9.44898550e+06,\n", + " 9.49898550e+06, 9.54898550e+06, 9.59898550e+06,\n", + " 9.64898550e+06, 9.69898550e+06, 9.74898550e+06,\n", + " 9.79898550e+06, 9.84898550e+06, 9.89898550e+06,\n", + " 9.94898550e+06, 9.99898550e+06, 1.00489855e+07,\n", + " 1.00989855e+07, 1.01489855e+07, 1.01989855e+07,\n", + " 1.02489855e+07, 1.02989855e+07, 1.03489855e+07],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('x',),\n", + " 'units': '1000 m',\n", + " 'long_name': 'x coordinate of projection',\n", + " 'standard_name': 'projection_x_coordinate'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.x" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5382460., -5332460., -5282460., -5232460., -5182460.,\n", + " -5132460., -5082460., -5032460., -4982460., -4932460.,\n", + " -4882460., -4832460., -4782460., -4732460., -4682460.,\n", + " -4632460., -4582460., -4532460., -4482460., -4432460.,\n", + " -4382460., -4332460., -4282460., -4232460., -4182460.,\n", + " -4132460., -4082460., -4032460., -3982460., -3932460.,\n", + " -3882460., -3832460., -3782460., -3732460., -3682460.,\n", + " -3632460., -3582460., -3532460., -3482460., -3432460.,\n", + " -3382460., -3332460., -3282460., -3232460., -3182460.,\n", + " -3132460., -3082460., -3032460., -2982460., -2932460.,\n", + " -2882460., -2832460., -2782460., -2732460., -2682460.,\n", + " -2632460., -2582460., -2532460., -2482460., -2432460.,\n", + " -2382460., -2332460., -2282460., -2232460., -2182460.,\n", + " -2132460., -2082460., -2032460., -1982460., -1932460.,\n", + " -1882460., -1832460., -1782460., -1732460., -1682460.,\n", + " -1632460., -1582460., -1532460., -1482460., -1432460.,\n", + " -1382460., -1332460., -1282460., -1232460., -1182460.,\n", + " -1132460., -1082460., -1032460., -982460., -932460.,\n", + " -882460., -832460., -782460., -732460., -682460.,\n", + " -632460., -582460., -532460., -482460., -432460.,\n", + " -382460., -332460., -282460., -232460., -182460.,\n", + " -132460., -82460., -32460., 17540., 67540.,\n", + " 117540., 167540., 217540., 267540., 317540.,\n", + " 367540., 417540., 467540., 517540., 567540.,\n", + " 617540., 667540., 717540., 767540., 817540.,\n", + " 867540., 917540., 967540., 1017540., 1067540.,\n", + " 1117540., 1167540., 1217540., 1267540., 1317540.,\n", + " 1367540., 1417540., 1467540., 1517540., 1567540.,\n", + " 1617540., 1667540., 1717540., 1767540., 1817540.,\n", + " 1867540., 1917540., 1967540., 2017540., 2067540.,\n", + " 2117540., 2167540., 2217540., 2267540., 2317540.,\n", + " 2367540., 2417540., 2467540., 2517540., 2567540.,\n", + " 2617540., 2667540., 2717540., 2767540., 2817540.,\n", + " 2867540., 2917540., 2967540., 3017540., 3067540.,\n", + " 3117540., 3167540., 3217540., 3267540., 3317540.,\n", + " 3367540., 3417540., 3467540., 3517540., 3567540.,\n", + " 3617540., 3667540., 3717540., 3767540., 3817540.,\n", + " 3867540., 3917540., 3967540., 4017540., 4067540.,\n", + " 4117540., 4167540., 4217540., 4267540., 4317540.,\n", + " 4367540., 4417540., 4467540., 4517540., 4567540.,\n", + " 4617540., 4667540., 4717540., 4767540., 4817540.,\n", + " 4867540., 4917540., 4967540., 5017540., 5067540.,\n", + " 5117540., 5167540., 5217540., 5267540., 5317540.,\n", + " 5367540., 5417540., 5467540., 5517540., 5567540.,\n", + " 5617540., 5667540., 5717540., 5767540., 5817540.,\n", + " 5867540., 5917540., 5967540., 6017540., 6067540.,\n", + " 6117540., 6167540., 6217540., 6267540., 6317540.,\n", + " 6367540.],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('y',),\n", + " 'units': '1000 m',\n", + " 'long_name': 'y coordinate of projection',\n", + " 'standard_name': 'projection_y_coordinate'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.y" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-43.5182 , -43.5182 , -43.5182 , ..., -43.5182 , -43.5182 ,\n", + " -43.5182 ],\n", + " [-43.191082, -43.191082, -43.191082, ..., -43.191082, -43.191082,\n", + " -43.191082],\n", + " [-42.8622 , -42.8622 , -42.8622 , ..., -42.8622 , -42.8622 ,\n", + " -42.8622 ],\n", + " ...,\n", + " [ 49.016712, 49.016712, 49.016712, ..., 49.016712, 49.016712,\n", + " 49.016712],\n", + " [ 49.31089 , 49.31089 , 49.31089 , ..., 49.31089 , 49.31089 ,\n", + " 49.31089 ],\n", + " [ 49.60332 , 49.60332 , 49.60332 , ..., 49.60332 , 49.60332 ,\n", + " 49.60332 ]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_north',\n", + " 'axis': 'Y',\n", + " 'long_name': 'latitude coordinate',\n", + " 'standard_name': 'latitude',\n", + " 'bounds': 'lat_bnds'}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(\n", + " data=[[-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ],\n", + " [-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ],\n", + " [-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ],\n", + " ...,\n", + " [-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ],\n", + " [-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ],\n", + " [-18.9089 , -18.459013, -18.009129, ..., 74.217415, 74.6673 ,\n", + " 75.11718 ]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'units': 'degrees_east',\n", + " 'axis': 'X',\n", + " 'long_name': 'longitude coordinate',\n", + " 'standard_name': 'longitude',\n", + " 'bounds': 'lon_bnds'}" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading var_aux var (1/2)\n", + "Rank 000: Loaded var_aux var ((1, 1, 236, 210))\n", + "Rank 000: Loading cell_area var (2/2)\n", + "Rank 000: Loaded cell_area var ((1, 1, 236, 210))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'var_aux': {'data': masked_array(\n", + " data=[[[[0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " ...,\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.],\n", + " [0., 0., 0., ..., 0., 0., 0.]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('time', 'y', 'x'),\n", + " 'units': '?',\n", + " 'cell_measures': 'area: cell_area',\n", + " 'grid_mapping': 'mercator',\n", + " 'coordinates': 'lat lon'},\n", + " 'cell_area': {'data': masked_array(\n", + " data=[[[[1.31594240e+09, 1.31593690e+09, 1.31594240e+09, ...,\n", + " 1.31593126e+09, 1.31595354e+09, 1.31593126e+09],\n", + " [1.33020250e+09, 1.33019686e+09, 1.33020250e+09, ...,\n", + " 1.33019123e+09, 1.33021389e+09, 1.33019123e+09],\n", + " [1.34454989e+09, 1.34454426e+09, 1.34454989e+09, ...,\n", + " 1.34453850e+09, 1.34456128e+09, 1.34453850e+09],\n", + " ...,\n", + " [1.07638515e+09, 1.07638054e+09, 1.07638515e+09, ...,\n", + " 1.07637606e+09, 1.07639424e+09, 1.07637606e+09],\n", + " [1.06368742e+09, 1.06368288e+09, 1.06368742e+09, ...,\n", + " 1.06367840e+09, 1.06369645e+09, 1.06367840e+09],\n", + " [1.05104730e+09, 1.05104288e+09, 1.05104730e+09, ...,\n", + " 1.05103840e+09, 1.05105626e+09, 1.05103840e+09]]]],\n", + " mask=False,\n", + " fill_value=1e+20,\n", + " dtype=float32),\n", + " 'dimensions': ('y', 'x'),\n", + " 'long_name': 'area of the grid cell',\n", + " 'standard_name': 'cell_area',\n", + " 'units': 'm2',\n", + " 'coordinates': 'lat lon'}}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.variables" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating mercator_file_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing var_aux var (1/2)\n", + "Rank 000: Var var_aux created (1/2)\n", + "Rank 000: Filling var_aux)\n", + "Rank 000: Var var_aux data (1/2)\n", + "Rank 000: Var var_aux completed (1/2)\n", + "Rank 000: Writing cell_area var (2/2)\n", + "Rank 000: Var cell_area created (2/2)\n", + "Rank 000: Filling cell_area)\n", + "Rank 000: Var cell_area data (2/2)\n", + "Rank 000: Var cell_area completed (2/2)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('mercator_file_1.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reopen with NES" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf('mercator_file_1.nc', info=True)\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:    (time: 1, lev: 1, y: 236, x: 210)\n",
+       "Coordinates:\n",
+       "  * time       (time) datetime64[ns] 2000-01-01\n",
+       "  * lev        (lev) float64 0.0\n",
+       "    lat        (y, x) float64 -43.52 -43.52 -43.52 -43.52 ... 49.6 49.6 49.6\n",
+       "    lon        (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
+       "  * y          (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
+       "  * x          (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n",
+       "Data variables:\n",
+       "    var_aux    (time, lev, y, x) float32 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0\n",
+       "    cell_area  (time, lev, y, x) float32 1.316e+09 1.316e+09 ... 1.051e+09\n",
+       "    mercator   |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, y: 236, x: 210)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2000-01-01\n", + " * lev (lev) float64 0.0\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", + " * x (x) float64 -1.01e+05 -5.101e+04 ... 1.03e+07 1.035e+07\n", + "Data variables:\n", + " var_aux (time, lev, y, x) float32 ...\n", + " cell_area (time, lev, y, x) float32 ...\n", + " mercator |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('mercator_file_1.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/2-create_nes.ipynb b/Jupyter_notebooks/2-create_nes.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e69ddcb5f9f1492aeac2196540200ca6cd3d85cc --- /dev/null +++ b/Jupyter_notebooks/2-create_nes.ipynb @@ -0,0 +1,3655 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create regular, rotated, points, LCC and Mercator grids" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import pandas as pd\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Create regular grid" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lat_orig = 41.1\n", + "lon_orig = 1.8\n", + "inc_lat = 0.1\n", + "inc_lon = 0.1\n", + "n_lat = 10\n", + "n_lon = 10\n", + "regular_grid = create_nes(comm=None, info=False, projection='regular',\n", + " lat_orig=lat_orig, lon_orig=lon_orig, inc_lat=inc_lat, inc_lon=inc_lon, \n", + " n_lat=n_lat, n_lon=n_lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating regular_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "regular_grid.to_netcdf('regular_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:  (time: 1, lev: 1, lat: 10, lon: 10)\n",
+       "Coordinates:\n",
+       "  * time     (time) datetime64[ns] 1996-12-31\n",
+       "  * lev      (lev) float64 0.0\n",
+       "  * lat      (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n",
+       "  * lon      (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n",
+       "Data variables:\n",
+       "    crs      |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, lat: 10, lon: 10)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * lat (lat) float64 41.15 41.25 41.35 41.45 ... 41.75 41.85 41.95 42.05\n", + " * lon (lon) float64 1.85 1.95 2.05 2.15 2.25 2.35 2.45 2.55 2.65 2.75\n", + "Data variables:\n", + " crs |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('regular_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Create rotated grid" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "centre_lat = 51\n", + "centre_lon = 10\n", + "west_boundary = -35\n", + "south_boundary = -27\n", + "inc_rlat = 0.2\n", + "inc_rlon = 0.2\n", + "rotated_grid = create_nes(comm=None, info=False, projection='rotated',\n", + " centre_lat=centre_lat, centre_lon=centre_lon,\n", + " west_boundary=west_boundary, south_boundary=south_boundary,\n", + " inc_rlat=inc_rlat, inc_rlon=inc_rlon)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating rotated_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "rotated_grid.to_netcdf('rotated_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 1, lev: 1, rlat: 271, rlon: 351)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 1996-12-31\n",
+       "  * lev           (lev) float64 0.0\n",
+       "  * rlat          (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n",
+       "  * rlon          (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n",
+       "Data variables:\n",
+       "    lat           (rlat, rlon) float64 16.35 16.43 16.52 ... 58.83 58.68 58.53\n",
+       "    lon           (rlat, rlon) float64 -22.18 -22.02 -21.85 ... 88.05 88.23\n",
+       "    rotated_pole  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, rlat: 271, rlon: 351)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * rlat (rlat) float64 -27.0 -26.8 -26.6 -26.4 ... 26.4 26.6 26.8 27.0\n", + " * rlon (rlon) float64 -35.0 -34.8 -34.6 -34.4 ... 34.4 34.6 34.8 35.0\n", + "Data variables:\n", + " lat (rlat, rlon) float64 ...\n", + " lon (rlat, rlon) float64 ...\n", + " rotated_pole |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('rotated_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Create grid from points" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### File 1" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
station.codelatlonstandardised_network_provided_area_classification
0ES0266A41.3793222.086140urban-centre
1ES0392A41.7277041.838531urban-suburban
2ES0395A41.5678242.014598urban-centre
3ES0559A41.3874242.164918urban-centre
4ES0567A41.3849062.119574urban-centre
...............
129ES2087A41.9292832.257302NaN
130ES2091A40.5799000.553500NaN
131ES2088A41.7710602.250647NaN
132ES1908A41.2390691.856564NaN
133ES9994A42.3583631.459455NaN
\n", + "

134 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " station.code lat lon \\\n", + "0 ES0266A 41.379322 2.086140 \n", + "1 ES0392A 41.727704 1.838531 \n", + "2 ES0395A 41.567824 2.014598 \n", + "3 ES0559A 41.387424 2.164918 \n", + "4 ES0567A 41.384906 2.119574 \n", + ".. ... ... ... \n", + "129 ES2087A 41.929283 2.257302 \n", + "130 ES2091A 40.579900 0.553500 \n", + "131 ES2088A 41.771060 2.250647 \n", + "132 ES1908A 41.239069 1.856564 \n", + "133 ES9994A 42.358363 1.459455 \n", + "\n", + " standardised_network_provided_area_classification \n", + "0 urban-centre \n", + "1 urban-suburban \n", + "2 urban-centre \n", + "3 urban-centre \n", + "4 urban-centre \n", + ".. ... \n", + "129 NaN \n", + "130 NaN \n", + "131 NaN \n", + "132 NaN \n", + "133 NaN \n", + "\n", + "[134 rows x 4 columns]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/XVPCA_info.csv'\n", + "df = pd.read_csv(file_path)\n", + "df" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "lat = df['lat']\n", + "lon = df['lon']\n", + "metadata = {'station_code': {'data': df['station.code'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'area_classification': {'data': df['standardised_network_provided_area_classification'],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str}}" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_1.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_code var (1/2)\n", + "Rank 000: Var station_code created (1/2)\n", + "Rank 000: Var station_code data (1/2)\n", + "Rank 000: Var station_code completed (1/2)\n", + "Rank 000: Writing area_classification var (2/2)\n", + "Rank 000: Var area_classification created (2/2)\n", + "Rank 000: Var area_classification data (2/2)\n", + "Rank 000: Var area_classification completed (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable area_classificationInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_1.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:              (time: 1, station: 134)\n",
+       "Coordinates:\n",
+       "  * time                 (time) datetime64[ns] 1996-12-31\n",
+       "  * station              (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n",
+       "Data variables:\n",
+       "    station_code         (station) object 'ES0266A' 'ES0392A' ... 'ES9994A'\n",
+       "    area_classification  (station) object 'urban-centre' ... 'nan'\n",
+       "    lat                  (station) float64 41.38 41.73 41.57 ... 41.24 42.36\n",
+       "    lon                  (station) float64 2.086 1.839 2.015 ... 1.857 1.459\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, station: 134)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 ... 131.0 132.0 133.0\n", + "Data variables:\n", + " station_code (station) object ...\n", + " area_classification (station) object ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_1.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### File 2" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
EstacióPM10 Barcelona (Eixample)PM10 Badalona (guàrdia urbana)PM10 Badalona (Assamblea de Catalunya)PM10 Barcelona (Pl. de la Universitat)PM10 Barcelona (Poblenou)PM10 Barcelona (Zona Universitària)PM10 Barcelona (el Port Vell)PM10 Barcelona (IES Goya)PM10 Barcelona (IES Verdaguer)...PM10 Constantí (Gaudí)PM10 Vila-seca (RENFE)PM10 Sitges (Vallcarca-oficines)PM10 Sant Vicenç dels Horts (Àlaba)PM10 Montsec (OAM)PM10 Montseny (la Castanya)PM10 Caldes de Montbui (Ajuntament)PM10 Sant Feliu de Llobregat (Eugeni d'Ors)PM 10 La Seu d'Urgell (CC Les Monges)PM10 Vic (Centre Cívic Santa Anna)
0Codi europeuES1438AES1928AES2027AES0559AES0691AES0567AES1870AES1852AES1900A...ES1123AES1117AES2033AES2011AES1982AES1778AES1680AES1362AES9994AES1874A
1DiaValor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)...Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)Valor (µg/m3)
22017-01-01 00:00:0019.6NaN2020.225.616.529NaN23.8...12.97NaN1122.499.5002997.936455NaNNaNNaNNaN
32017-01-02 00:00:0027.220.862331.63522.82817.232.4...NaN25.382625.391.8296189.7870043222.06NaNNaN
42017-01-03 00:00:0035.7NaN323736.230.931NaN35.8...21.836.494830.658.09460716.978294335.84NaNNaN
..................................................................
3622017-12-27 00:00:0017.57.591016.91413.121NaN20.8...1222.95NaNNaN13.066751NaN10.3NaNNaN
3632017-12-28 00:00:0017NaN1417.915NaN1314.516...NaN6.5NaN9.97613.351872NaN26.81NaNNaN
3642017-12-29 00:00:0024.6212423.225.815.321NaN25.9...8.869.56NaN23.7614.219732NaN14.09NaNNaN
3652017-12-30 00:00:0027.4NaN1522.316.611.21610.718.8...NaNNaNNaN19.041.0911874.713029NaNNaNNaNNaN
3662017-12-31 00:00:0017.312.51316.317.69.914NaN17.4...12.77NaNNaN15.232.156595.024302NaNNaNNaNNaN
\n", + "

367 rows × 84 columns

\n", + "
" + ], + "text/plain": [ + " Estació PM10 Barcelona (Eixample) \\\n", + "0 Codi europeu ES1438A \n", + "1 Dia Valor (µg/m3) \n", + "2 2017-01-01 00:00:00 19.6 \n", + "3 2017-01-02 00:00:00 27.2 \n", + "4 2017-01-03 00:00:00 35.7 \n", + ".. ... ... \n", + "362 2017-12-27 00:00:00 17.5 \n", + "363 2017-12-28 00:00:00 17 \n", + "364 2017-12-29 00:00:00 24.6 \n", + "365 2017-12-30 00:00:00 27.4 \n", + "366 2017-12-31 00:00:00 17.3 \n", + "\n", + " PM10 Badalona (guàrdia urbana) PM10 Badalona (Assamblea de Catalunya) \\\n", + "0 ES1928A ES2027A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN 20 \n", + "3 20.86 23 \n", + "4 NaN 32 \n", + ".. ... ... \n", + "362 7.59 10 \n", + "363 NaN 14 \n", + "364 21 24 \n", + "365 NaN 15 \n", + "366 12.5 13 \n", + "\n", + " PM10 Barcelona (Pl. de la Universitat) PM10 Barcelona (Poblenou) \\\n", + "0 ES0559A ES0691A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 20.2 25.6 \n", + "3 31.6 35 \n", + "4 37 36.2 \n", + ".. ... ... \n", + "362 16.9 14 \n", + "363 17.9 15 \n", + "364 23.2 25.8 \n", + "365 22.3 16.6 \n", + "366 16.3 17.6 \n", + "\n", + " PM10 Barcelona (Zona Universitària) PM10 Barcelona (el Port Vell) \\\n", + "0 ES0567A ES1870A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 16.5 29 \n", + "3 22.8 28 \n", + "4 30.9 31 \n", + ".. ... ... \n", + "362 13.1 21 \n", + "363 NaN 13 \n", + "364 15.3 21 \n", + "365 11.2 16 \n", + "366 9.9 14 \n", + "\n", + " PM10 Barcelona (IES Goya) PM10 Barcelona (IES Verdaguer) ... \\\n", + "0 ES1852A ES1900A ... \n", + "1 Valor (µg/m3) Valor (µg/m3) ... \n", + "2 NaN 23.8 ... \n", + "3 17.2 32.4 ... \n", + "4 NaN 35.8 ... \n", + ".. ... ... ... \n", + "362 NaN 20.8 ... \n", + "363 14.5 16 ... \n", + "364 NaN 25.9 ... \n", + "365 10.7 18.8 ... \n", + "366 NaN 17.4 ... \n", + "\n", + " PM10 Constantí (Gaudí) PM10 Vila-seca (RENFE) \\\n", + "0 ES1123A ES1117A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 12.97 NaN \n", + "3 NaN 25.38 \n", + "4 21.8 36.49 \n", + ".. ... ... \n", + "362 12 22.95 \n", + "363 NaN 6.5 \n", + "364 8.86 9.56 \n", + "365 NaN NaN \n", + "366 12.77 NaN \n", + "\n", + " PM10 Sitges (Vallcarca-oficines) PM10 Sant Vicenç dels Horts (Àlaba) \\\n", + "0 ES2033A ES2011A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 11 22.49 \n", + "3 26 25.39 \n", + "4 48 30.65 \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN 9.976 \n", + "364 NaN 23.76 \n", + "365 NaN 19.04 \n", + "366 NaN 15.23 \n", + "\n", + " PM10 Montsec (OAM) PM10 Montseny (la Castanya) \\\n", + "0 ES1982A ES1778A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 9.500299 7.936455 \n", + "3 1.829618 9.787004 \n", + "4 8.094607 16.97829 \n", + ".. ... ... \n", + "362 1 3.066751 \n", + "363 1 3.351872 \n", + "364 1 4.219732 \n", + "365 1.091187 4.713029 \n", + "366 2.15659 5.024302 \n", + "\n", + " PM10 Caldes de Montbui (Ajuntament) \\\n", + "0 ES1680A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 32 \n", + "4 43 \n", + ".. ... \n", + "362 NaN \n", + "363 NaN \n", + "364 NaN \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM10 Sant Feliu de Llobregat (Eugeni d'Ors) \\\n", + "0 ES1362A \n", + "1 Valor (µg/m3) \n", + "2 NaN \n", + "3 22.06 \n", + "4 35.84 \n", + ".. ... \n", + "362 10.3 \n", + "363 26.81 \n", + "364 14.09 \n", + "365 NaN \n", + "366 NaN \n", + "\n", + " PM 10 La Seu d'Urgell (CC Les Monges) PM10 Vic (Centre Cívic Santa Anna) \n", + "0 ES9994A ES1874A \n", + "1 Valor (µg/m3) Valor (µg/m3) \n", + "2 NaN NaN \n", + "3 NaN NaN \n", + "4 NaN NaN \n", + ".. ... ... \n", + "362 NaN NaN \n", + "363 NaN NaN \n", + "364 NaN NaN \n", + "365 NaN NaN \n", + "366 NaN NaN \n", + "\n", + "[367 rows x 84 columns]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/Dades_2017.xlsx'\n", + "df_2 = pd.read_excel(file_path)\n", + "df_2" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "times = df_2['Estació'].iloc[2:]\n", + "lat = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", + "lon = np.full(len(df_2.iloc[0].values[1:]), np.nan)\n", + "metadata = {'station_name': {'data': df_2.columns.str.replace('PM10 ', '').str.replace('PM 10 ', '').to_numpy()[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'station_code': {'data': df_2.iloc[0].values[1:],\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'pm10': {'data': df_2.iloc[2:, 1:].to_numpy().T,\n", + " 'dimensions': ('station', 'time',),\n", + " 'dtype': float}}" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_name var (1/3)\n", + "Rank 000: Var station_name created (1/3)\n", + "Rank 000: Var station_name data (1/3)\n", + "Rank 000: Var station_name completed (1/3)\n", + "Rank 000: Writing station_code var (2/3)\n", + "Rank 000: Var station_code created (2/3)\n", + "Rank 000: Var station_code data (2/3)\n", + "Rank 000: Var station_code completed (2/3)\n", + "Rank 000: Writing pm10 var (3/3)\n", + "Rank 000: Var pm10 created (3/3)\n", + "Rank 000: Var pm10 data (3/3)\n", + "Rank 000: Var pm10 completed (3/3)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable station_codeInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n", + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:411: UserWarning: WARNING!!! Different data types for variable pm10Input dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_2.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 365, station: 83)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n",
+       "  * station       (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n",
+       "Data variables:\n",
+       "    station_name  (station) object 'Barcelona (Eixample)' ... 'Vic (Centre Cí...\n",
+       "    station_code  (station) object 'ES1438A' 'ES1928A' ... 'ES9994A' 'ES1874A'\n",
+       "    pm10          (station, time) float64 19.6 27.2 35.7 30.9 ... nan nan nan\n",
+       "    lat           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
+       "    lon           (station) float64 nan nan nan nan nan ... nan nan nan nan nan\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 365, station: 83)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2017-01-01 2017-01-02 ... 2017-12-31\n", + " * station (station) float64 0.0 1.0 2.0 3.0 4.0 ... 79.0 80.0 81.0 82.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " station_code (station) object ...\n", + " pm10 (station, time) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_2.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Create LCC grid" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "lat_1 = 37\n", + "lat_2 = 43\n", + "lon_0 = -3\n", + "lat_0 = 40\n", + "nx = 397\n", + "ny = 397\n", + "inc_x = 4000\n", + "inc_y = 4000\n", + "x_0 = -807847.688\n", + "y_0 = -797137.125\n", + "lcc_grid = create_nes(comm=None, info=False, projection='lcc',\n", + " lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, lat_0=lat_0, \n", + " nx=nx, ny=ny, inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating lcc_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "lcc_grid.to_netcdf('lcc_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:            (time: 1, lev: 1, y: 397, x: 397)\n",
+       "Coordinates:\n",
+       "  * time               (time) datetime64[ns] 1996-12-31\n",
+       "  * lev                (lev) float64 0.0\n",
+       "  * y                  (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n",
+       "  * x                  (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n",
+       "Data variables:\n",
+       "    lat                (y, x) float64 ...\n",
+       "    lon                (y, x) float64 ...\n",
+       "    Lambert_conformal  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, y: 397, x: 397)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * y (y) float64 -7.951e+05 -7.911e+05 ... 7.849e+05 7.889e+05\n", + " * x (x) float64 -8.058e+05 -8.018e+05 ... 7.742e+05 7.782e+05\n", + "Data variables:\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " Lambert_conformal |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('lcc_grid.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 5. Create Mercator grid" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "lat_ts = -1.5\n", + "lon_0 = -18.0\n", + "nx = 210\n", + "ny = 236\n", + "inc_x = 50000\n", + "inc_y = 50000\n", + "x_0 = -126017.5\n", + "y_0 = -5407460.0\n", + "mercator_grid = create_nes(comm=None, info=False, projection='mercator',\n", + " lat_ts=lat_ts, lon_0=lon_0, nx=nx, ny=ny, \n", + " inc_x=inc_x, inc_y=inc_y, x_0=x_0, y_0=y_0)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating mercator_grid.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n" + ] + } + ], + "source": [ + "mercator_grid.to_netcdf('mercator_grid.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:   (time: 1, lev: 1, y: 236, x: 210)\n",
+       "Coordinates:\n",
+       "  * time      (time) datetime64[ns] 1996-12-31\n",
+       "  * lev       (lev) float64 0.0\n",
+       "  * y         (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n",
+       "  * x         (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n",
+       "Data variables:\n",
+       "    lat       (y, x) float64 -43.52 -43.52 -43.52 -43.52 ... 49.6 49.6 49.6 49.6\n",
+       "    lon       (y, x) float64 -18.91 -18.46 -18.01 -17.56 ... 74.22 74.67 75.12\n",
+       "    mercator  |S1 b''\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 1, lev: 1, y: 236, x: 210)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 1996-12-31\n", + " * lev (lev) float64 0.0\n", + " * y (y) float64 -5.382e+06 -5.332e+06 ... 6.318e+06 6.368e+06\n", + " * x (x) float64 -1.01e+05 -5.102e+04 -1.018e+03 ... 1.03e+07 1.035e+07\n", + "Data variables:\n", + " lat (y, x) float64 ...\n", + " lon (y, x) float64 ...\n", + " mercator |S1 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('mercator_grid.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/2.1-create_nes_port.ipynb b/Jupyter_notebooks/2.1-create_nes_port.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e66b4f8812f1c0195b9adfe46cb635773e2e1e52 --- /dev/null +++ b/Jupyter_notebooks/2.1-create_nes_port.ipynb @@ -0,0 +1,1969 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to create monthly observations datasets (Port Barcelona)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import pandas as pd\n", + "import numpy as np\n", + "from datetime import datetime, timedelta\n", + "from nes import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
SO2-DarsenaSO2-UMNO2-UMNO2-ZAL Prat
Horario: UTC
2017-01-01 00:00:00.0000000.20NaN64.6449.08
2017-01-01 01:00:00.0000030.18NaN68.1653.00
2017-01-01 01:59:59.9999970.04NaN68.2946.75
2017-01-01 03:00:00.0000000.00NaN62.9139.65
2017-01-01 04:00:00.0000030.00NaN47.1328.86
...............
2021-12-31 13:00:00.0000030.77NaN35.3824.96
2021-12-31 13:59:59.9999970.37NaN35.8024.16
2021-12-31 15:00:00.0000000.23NaN29.1025.79
2021-12-31 16:00:00.0000030.14NaN9.2429.82
2021-12-31 16:59:59.9999970.20NaN12.7628.66
\n", + "

43818 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " SO2-Darsena SO2-UM NO2-UM NO2-ZAL Prat\n", + "Horario: UTC \n", + "2017-01-01 00:00:00.000000 0.20 NaN 64.64 49.08\n", + "2017-01-01 01:00:00.000003 0.18 NaN 68.16 53.00\n", + "2017-01-01 01:59:59.999997 0.04 NaN 68.29 46.75\n", + "2017-01-01 03:00:00.000000 0.00 NaN 62.91 39.65\n", + "2017-01-01 04:00:00.000003 0.00 NaN 47.13 28.86\n", + "... ... ... ... ...\n", + "2021-12-31 13:00:00.000003 0.77 NaN 35.38 24.96\n", + "2021-12-31 13:59:59.999997 0.37 NaN 35.80 24.16\n", + "2021-12-31 15:00:00.000000 0.23 NaN 29.10 25.79\n", + "2021-12-31 16:00:00.000003 0.14 NaN 9.24 29.82\n", + "2021-12-31 16:59:59.999997 0.20 NaN 12.76 28.66\n", + "\n", + "[43818 rows x 4 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx'\n", + "df_data = pd.read_excel(file_path, header=3, index_col='Horario: UTC')\n", + "df_data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "DatetimeIndex(['2017-01-01 00:00:00', '2017-01-01 01:00:00',\n", + " '2017-01-01 02:00:00', '2017-01-01 03:00:00',\n", + " '2017-01-01 04:00:00', '2017-01-01 05:00:00',\n", + " '2017-01-01 06:00:00', '2017-01-01 07:00:00',\n", + " '2017-01-01 08:00:00', '2017-01-01 09:00:00',\n", + " ...\n", + " '2021-12-31 08:00:00', '2021-12-31 09:00:00',\n", + " '2021-12-31 10:00:00', '2021-12-31 11:00:00',\n", + " '2021-12-31 12:00:00', '2021-12-31 13:00:00',\n", + " '2021-12-31 14:00:00', '2021-12-31 15:00:00',\n", + " '2021-12-31 16:00:00', '2021-12-31 17:00:00'],\n", + " dtype='datetime64[ns]', length=43818, freq=None)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_data.index = pd.Index([datetime(year=2017, month=1, day=1, minute=0, second=0, microsecond=0) + timedelta(hours=i) for i in range(len(df_data))])\n", + "df_data.index " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
station.codelatlonstandardised_network_provided_area_classification
0Dàrsena sud41.3328892.140807NaN
1Unitat Mobil41.3737772.184514NaN
2ZAL Prat41.3172772.134501NaN
\n", + "
" + ], + "text/plain": [ + " station.code lat lon \\\n", + "0 Dàrsena sud 41.332889 2.140807 \n", + "1 Unitat Mobil 41.373777 2.184514 \n", + "2 ZAL Prat 41.317277 2.134501 \n", + "\n", + " standardised_network_provided_area_classification \n", + "0 NaN \n", + "1 NaN \n", + "2 NaN " + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "file_path = '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/estaciones.csv'\n", + "df_stations = pd.read_csv(file_path)\n", + "df_stations" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create dataset with all timesteps" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([datetime.datetime(2017, 1, 1, 0, 0),\n", + " datetime.datetime(2017, 1, 1, 1, 0),\n", + " datetime.datetime(2017, 1, 1, 2, 0), ...,\n", + " datetime.datetime(2021, 12, 31, 15, 0),\n", + " datetime.datetime(2021, 12, 31, 16, 0),\n", + " datetime.datetime(2021, 12, 31, 17, 0)], dtype=object)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "times = df_data.index.to_pydatetime()\n", + "times" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([41.3737771 , 41.31727665])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lat = df_stations['lat'][1:3].to_numpy()\n", + "lat" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([2.1845141 , 2.13450079])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lon = df_stations['lon'][1:3].to_numpy()\n", + "lon" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "metadata = {'station_name': {'data': df_data.columns[2:4].to_numpy(),\n", + " 'dimensions': ('station',),\n", + " 'dtype': str},\n", + " 'sconcno2': {'data': df_data.iloc[:, 2:4].to_numpy(),\n", + " 'dimensions': ('time', 'station',),\n", + " 'dtype': float}}" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "points_grid.variables = metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating points_grid_no2.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing station_name var (1/2)\n", + "Rank 000: Var station_name created (1/2)\n", + "Rank 000: Var station_name data (1/2)\n", + "Rank 000: Var station_name completed (1/2)\n", + "Rank 000: Writing sconcno2 var (2/2)\n", + "Rank 000: Var sconcno2 created (2/2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py:405: UserWarning: WARNING!!! Different data types for variable station_nameInput dtype=, data dtype=object\n", + " warnings.warn(msg)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Var sconcno2 data (2/2)\n", + "Rank 000: Var sconcno2 completed (2/2)\n" + ] + } + ], + "source": [ + "points_grid.to_netcdf('points_grid_no2.nc', info=True)\n", + "del points_grid" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 43818, station: 2)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2017-01-01 ... 2021-12-31T17:00:00\n",
+       "  * station       (station) float64 0.0 1.0\n",
+       "Data variables:\n",
+       "    station_name  (station) object 'NO2-UM' 'NO2-ZAL Prat'\n",
+       "    sconcno2      (time, station) float64 64.64 49.08 68.16 ... 12.76 28.66\n",
+       "    lat           (station) float64 41.37 41.32\n",
+       "    lon           (station) float64 2.185 2.135\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 43818, station: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2017-01-01 ... 2021-12-31T17:00:00\n", + " * station (station) float64 0.0 1.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " sconcno2 (time, station) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('points_grid_no2.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create one dataset per month (Ready for Providentia)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "df_data['month'] = df_data.index.month\n", + "df_data['year'] = df_data.index.year" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
SO2-DarsenaSO2-UMNO2-UMNO2-ZAL Pratmonthyear
2017-01-01 00:00:000.20NaN64.6449.0812017
2017-01-01 01:00:000.18NaN68.1653.0012017
2017-01-01 02:00:000.04NaN68.2946.7512017
2017-01-01 03:00:000.00NaN62.9139.6512017
2017-01-01 04:00:000.00NaN47.1328.8612017
.....................
2021-12-31 13:00:000.77NaN35.3824.96122021
2021-12-31 14:00:000.37NaN35.8024.16122021
2021-12-31 15:00:000.23NaN29.1025.79122021
2021-12-31 16:00:000.14NaN9.2429.82122021
2021-12-31 17:00:000.20NaN12.7628.66122021
\n", + "

43818 rows × 6 columns

\n", + "
" + ], + "text/plain": [ + " SO2-Darsena SO2-UM NO2-UM NO2-ZAL Prat month year\n", + "2017-01-01 00:00:00 0.20 NaN 64.64 49.08 1 2017\n", + "2017-01-01 01:00:00 0.18 NaN 68.16 53.00 1 2017\n", + "2017-01-01 02:00:00 0.04 NaN 68.29 46.75 1 2017\n", + "2017-01-01 03:00:00 0.00 NaN 62.91 39.65 1 2017\n", + "2017-01-01 04:00:00 0.00 NaN 47.13 28.86 1 2017\n", + "... ... ... ... ... ... ...\n", + "2021-12-31 13:00:00 0.77 NaN 35.38 24.96 12 2021\n", + "2021-12-31 14:00:00 0.37 NaN 35.80 24.16 12 2021\n", + "2021-12-31 15:00:00 0.23 NaN 29.10 25.79 12 2021\n", + "2021-12-31 16:00:00 0.14 NaN 9.24 29.82 12 2021\n", + "2021-12-31 17:00:00 0.20 NaN 12.76 28.66 12 2021\n", + "\n", + "[43818 rows x 6 columns]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_data" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Done sconcno2_201701.nc\n", + "Done sconcno2_201702.nc\n", + "Done sconcno2_201703.nc\n", + "Done sconcno2_201704.nc\n", + "Done sconcno2_201705.nc\n", + "Done sconcno2_201706.nc\n", + "Done sconcno2_201707.nc\n", + "Done sconcno2_201708.nc\n", + "Done sconcno2_201709.nc\n", + "Done sconcno2_201710.nc\n", + "Done sconcno2_201711.nc\n", + "Done sconcno2_201712.nc\n", + "Done sconcno2_201801.nc\n", + "Done sconcno2_201802.nc\n", + "Done sconcno2_201803.nc\n", + "Done sconcno2_201804.nc\n", + "Done sconcno2_201805.nc\n", + "Done sconcno2_201806.nc\n", + "Done sconcno2_201807.nc\n", + "Done sconcno2_201808.nc\n", + "Done sconcno2_201809.nc\n", + "Done sconcno2_201810.nc\n", + "Done sconcno2_201811.nc\n", + "Done sconcno2_201812.nc\n", + "Done sconcno2_201901.nc\n", + "Done sconcno2_201902.nc\n", + "Done sconcno2_201903.nc\n", + "Done sconcno2_201904.nc\n", + "Done sconcno2_201905.nc\n", + "Done sconcno2_201906.nc\n", + "Done sconcno2_201907.nc\n", + "Done sconcno2_201908.nc\n", + "Done sconcno2_201909.nc\n", + "Done sconcno2_201910.nc\n", + "Done sconcno2_201911.nc\n", + "Done sconcno2_201912.nc\n", + "Done sconcno2_202001.nc\n", + "Done sconcno2_202002.nc\n", + "Done sconcno2_202003.nc\n", + "Done sconcno2_202004.nc\n", + "Done sconcno2_202005.nc\n", + "Done sconcno2_202006.nc\n", + "Done sconcno2_202007.nc\n", + "Done sconcno2_202008.nc\n", + "Done sconcno2_202009.nc\n", + "Done sconcno2_202010.nc\n", + "Done sconcno2_202011.nc\n", + "Done sconcno2_202012.nc\n", + "Done sconcno2_202101.nc\n", + "Done sconcno2_202102.nc\n", + "Done sconcno2_202103.nc\n", + "Done sconcno2_202104.nc\n", + "Done sconcno2_202105.nc\n", + "Done sconcno2_202106.nc\n", + "Done sconcno2_202107.nc\n", + "Done sconcno2_202108.nc\n", + "Done sconcno2_202109.nc\n", + "Done sconcno2_202110.nc\n", + "Done sconcno2_202111.nc\n", + "Done sconcno2_202112.nc\n" + ] + } + ], + "source": [ + "for (year, month), current in df_data.groupby(['year', 'month']):\n", + "\n", + " # Read time\n", + " times = current.index.to_pydatetime()\n", + " \n", + " # Fill altitude with nans\n", + " altitude = np.full(len(current.columns[2:4]), np.nan)\n", + " \n", + " # Read metadata\n", + " metadata = {'station_name': {'data': current.columns[2:4].to_numpy(),\n", + " 'dimensions': ('station'),\n", + " 'standard_name': ''},\n", + " 'altitude': {'data': altitude,\n", + " 'dimensions': ('station',),\n", + " 'units': 'meters',\n", + " 'standard_name': 'altitude'},\n", + " 'sconcno2': {'data': current.iloc[:, 2:4].to_numpy(),\n", + " 'units': 'µg m-3',\n", + " 'dimensions': ('time', 'station',),\n", + " 'long_name': ''}\n", + " }\n", + " \n", + " # Create object\n", + " points_grid = create_nes(comm=None, info=False, projection=None, parallel_method='X',\n", + " lat=lat, lon=lon, times=times)\n", + " \n", + " # Assign metadata\n", + " points_grid.variables = metadata\n", + " \n", + " # Save files\n", + " points_grid.to_netcdf('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_{0}{1}.nc'.format(year, str(month).zfill(2)))\n", + " \n", + " del points_grid\n", + " print('Done sconcno2_{0}{1}.nc'.format(year, str(month).zfill(2)))" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 744, station: 2)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2017-05-01 ... 2017-05-31T23:00:00\n",
+       "  * station       (station) float64 0.0 1.0\n",
+       "Data variables:\n",
+       "    station_name  (station) object 'NO2-UM' 'NO2-ZAL Prat'\n",
+       "    altitude      (station) float64 nan nan\n",
+       "    sconcno2      (time, station) float64 8.77 23.49 1.76 ... 27.11 13.87 30.07\n",
+       "    lat           (station) float64 41.37 41.32\n",
+       "    lon           (station) float64 2.185 2.135\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 744, station: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2017-05-01 ... 2017-05-31T23:00:00\n", + " * station (station) float64 0.0 1.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " altitude (station) float64 ...\n", + " sconcno2 (time, station) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_201705.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:       (time: 738, station: 2)\n",
+       "Coordinates:\n",
+       "  * time          (time) datetime64[ns] 2021-12-01 ... 2021-12-31T17:00:00\n",
+       "  * station       (station) float64 0.0 1.0\n",
+       "Data variables:\n",
+       "    station_name  (station) object 'NO2-UM' 'NO2-ZAL Prat'\n",
+       "    altitude      (station) float64 nan nan\n",
+       "    sconcno2      (time, station) float64 13.33 nan 13.42 ... 29.82 12.76 28.66\n",
+       "    lat           (station) float64 41.37 41.32\n",
+       "    lon           (station) float64 2.185 2.135\n",
+       "Attributes:\n",
+       "    Conventions:  CF-1.7
" + ], + "text/plain": [ + "\n", + "Dimensions: (time: 738, station: 2)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2021-12-01 ... 2021-12-31T17:00:00\n", + " * station (station) float64 0.0 1.0\n", + "Data variables:\n", + " station_name (station) object ...\n", + " altitude (station) float64 ...\n", + " sconcno2 (time, station) float64 ...\n", + " lat (station) float64 ...\n", + " lon (station) float64 ...\n", + "Attributes:\n", + " Conventions: CF-1.7" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset('/esarchive/obs/generalitat/port-barcelona/hourly/sconcno2/sconcno2_202112.nc')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/3-add_time_bnds.ipynb b/Jupyter_notebooks/3-add_time_bnds.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..9c08016b91387c8db5d3efaeef8d56d2edbd04be --- /dev/null +++ b/Jupyter_notebooks/3-add_time_bnds.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to add time bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "import datetime\n", + "import numpy as np\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Set time bounds" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "test_path = \"/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc\"\n", + "nessy = open_netcdf(path=test_path, info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", + " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "array = np.array([[datetime.datetime(year=2020, month=2, day=20), \n", + " datetime.datetime(year=2020, month=2, day=15)]])\n", + "nessy.set_time_bnds(array)\n", + "nessy.time_bnds" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading O3_all var (1/1)\n", + "Rank 000: Loaded O3_all var ((1, 24, 271, 351))\n" + ] + } + ], + "source": [ + "nessy.load()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "nessy.to_netcdf('nc_serial_test.nc')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Explore variables" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[datetime.datetime(2020, 2, 20, 0, 0),\n", + " datetime.datetime(2020, 2, 15, 0, 0)]], dtype=object)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy.time_bnds" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/4-providentia.ipynb b/Jupyter_notebooks/4-providentia.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5e0303360a2221c26721ce15c7d648399a6d5e71 --- /dev/null +++ b/Jupyter_notebooks/4-providentia.ipynb @@ -0,0 +1,9953 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import xarray as xr\n", + "from netCDF4 import Dataset\n", + "from nes import *" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Observations dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "obs_path = '/gpfs/projects/bsc32/AC_cache/obs/ghost/EBAS/1.3.3/hourly/sconco3/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Read" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                                                           (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n",
+       "Coordinates:\n",
+       "  * time                                                              (time) datetime64[ns] ...\n",
+       "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n",
+       "Data variables: (12/179)\n",
+       "    ASTER_v3_altitude                                                 (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_BC_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_CO_emissions                          (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NH3_emissions                         (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NMVOC_emissions                       (station) float32 ...\n",
+       "    EDGAR_v4.3.2_annual_average_NOx_emissions                         (station) float32 ...\n",
+       "    ...                                                                ...\n",
+       "    station_timezone                                                  (station) object ...\n",
+       "    street_type                                                       (station) object ...\n",
+       "    street_width                                                      (station) float32 ...\n",
+       "    terrain                                                           (station) object ...\n",
+       "    vertical_datum                                                    (station) object ...\n",
+       "    weekday_weekend_code                                              (station, time) uint8 ...\n",
+       "Attributes:\n",
+       "    title:                     Surface ozone data in the EBAS network in 2018...\n",
+       "    institution:               Barcelona Supercomputing Center\n",
+       "    source:                    Surface observations\n",
+       "    creator_name:              Dene R. Bowdalo\n",
+       "    creator_email:             dene.bowdalo@bsc.es\n",
+       "    conventions:               CF-1.7\n",
+       "    data_version:              1.3.3\n",
+       "    history:                   Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n",
+       "    NCO:                       4.7.2\n",
+       "    nco_openmp_thread_number:  1
" + ], + "text/plain": [ + "\n", + "Dimensions: (station: 168, time: 720, N_flag_codes: 186, N_qa_codes: 77)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] ...\n", + "Dimensions without coordinates: station, N_flag_codes, N_qa_codes\n", + "Data variables: (12/179)\n", + " ASTER_v3_altitude (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_BC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_CO_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NH3_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NMVOC_emissions (station) float32 ...\n", + " EDGAR_v4.3.2_annual_average_NOx_emissions (station) float32 ...\n", + " ... ...\n", + " station_timezone (station) object ...\n", + " street_type (station) object ...\n", + " street_width (station) float32 ...\n", + " terrain (station) object ...\n", + " vertical_datum (station) object ...\n", + " weekday_weekend_code (station, time) uint8 ...\n", + "Attributes:\n", + " title: Surface ozone data in the EBAS network in 2018...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Surface observations\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.3.3\n", + " history: Tue Mar 30 12:38:43 2021: ncks -O --fix_rec_dm...\n", + " NCO: 4.7.2\n", + " nco_openmp_thread_number: 1" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(obs_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1 = open_netcdf(path=obs_path, info=True, parallel_method='X')\n", + "nessy_1" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2018, 4, 1, 0, 0),\n", + " datetime.datetime(2018, 4, 1, 1, 0),\n", + " datetime.datetime(2018, 4, 1, 2, 0),\n", + " datetime.datetime(2018, 4, 1, 3, 0),\n", + " datetime.datetime(2018, 4, 1, 4, 0),\n", + " datetime.datetime(2018, 4, 1, 5, 0),\n", + " datetime.datetime(2018, 4, 1, 6, 0),\n", + " datetime.datetime(2018, 4, 1, 7, 0),\n", + " datetime.datetime(2018, 4, 1, 8, 0),\n", + " datetime.datetime(2018, 4, 1, 9, 0),\n", + " datetime.datetime(2018, 4, 1, 10, 0),\n", + " datetime.datetime(2018, 4, 1, 11, 0),\n", + " datetime.datetime(2018, 4, 1, 12, 0),\n", + " datetime.datetime(2018, 4, 1, 13, 0),\n", + " datetime.datetime(2018, 4, 1, 14, 0),\n", + " datetime.datetime(2018, 4, 1, 15, 0),\n", + " datetime.datetime(2018, 4, 1, 16, 0),\n", + " datetime.datetime(2018, 4, 1, 17, 0),\n", + " datetime.datetime(2018, 4, 1, 18, 0),\n", + " datetime.datetime(2018, 4, 1, 19, 0),\n", + " datetime.datetime(2018, 4, 1, 20, 0),\n", + " datetime.datetime(2018, 4, 1, 21, 0),\n", + " datetime.datetime(2018, 4, 1, 22, 0),\n", + " datetime.datetime(2018, 4, 1, 23, 0),\n", + " datetime.datetime(2018, 4, 2, 0, 0),\n", + " datetime.datetime(2018, 4, 2, 1, 0),\n", + " datetime.datetime(2018, 4, 2, 2, 0),\n", + " datetime.datetime(2018, 4, 2, 3, 0),\n", + " datetime.datetime(2018, 4, 2, 4, 0),\n", + " datetime.datetime(2018, 4, 2, 5, 0),\n", + " datetime.datetime(2018, 4, 2, 6, 0),\n", + " datetime.datetime(2018, 4, 2, 7, 0),\n", + " datetime.datetime(2018, 4, 2, 8, 0),\n", + " datetime.datetime(2018, 4, 2, 9, 0),\n", + " datetime.datetime(2018, 4, 2, 10, 0),\n", + " datetime.datetime(2018, 4, 2, 11, 0),\n", + " datetime.datetime(2018, 4, 2, 12, 0),\n", + " datetime.datetime(2018, 4, 2, 13, 0),\n", + " datetime.datetime(2018, 4, 2, 14, 0),\n", + " datetime.datetime(2018, 4, 2, 15, 0),\n", + " datetime.datetime(2018, 4, 2, 16, 0),\n", + " datetime.datetime(2018, 4, 2, 17, 0),\n", + " datetime.datetime(2018, 4, 2, 18, 0),\n", + " datetime.datetime(2018, 4, 2, 19, 0),\n", + " datetime.datetime(2018, 4, 2, 20, 0),\n", + " datetime.datetime(2018, 4, 2, 21, 0),\n", + " datetime.datetime(2018, 4, 2, 22, 0),\n", + " datetime.datetime(2018, 4, 2, 23, 0),\n", + " datetime.datetime(2018, 4, 3, 0, 0),\n", + " datetime.datetime(2018, 4, 3, 1, 0),\n", + " datetime.datetime(2018, 4, 3, 2, 0),\n", + " datetime.datetime(2018, 4, 3, 3, 0),\n", + " datetime.datetime(2018, 4, 3, 4, 0),\n", + " datetime.datetime(2018, 4, 3, 5, 0),\n", + " datetime.datetime(2018, 4, 3, 6, 0),\n", + " datetime.datetime(2018, 4, 3, 7, 0),\n", + " datetime.datetime(2018, 4, 3, 8, 0),\n", + " datetime.datetime(2018, 4, 3, 9, 0),\n", + " datetime.datetime(2018, 4, 3, 10, 0),\n", + " datetime.datetime(2018, 4, 3, 11, 0),\n", + " datetime.datetime(2018, 4, 3, 12, 0),\n", + " datetime.datetime(2018, 4, 3, 13, 0),\n", + " datetime.datetime(2018, 4, 3, 14, 0),\n", + " datetime.datetime(2018, 4, 3, 15, 0),\n", + " datetime.datetime(2018, 4, 3, 16, 0),\n", + " datetime.datetime(2018, 4, 3, 17, 0),\n", + " datetime.datetime(2018, 4, 3, 18, 0),\n", + " datetime.datetime(2018, 4, 3, 19, 0),\n", + " datetime.datetime(2018, 4, 3, 20, 0),\n", + " datetime.datetime(2018, 4, 3, 21, 0),\n", + " datetime.datetime(2018, 4, 3, 22, 0),\n", + " datetime.datetime(2018, 4, 3, 23, 0),\n", + " datetime.datetime(2018, 4, 4, 0, 0),\n", + " datetime.datetime(2018, 4, 4, 1, 0),\n", + " datetime.datetime(2018, 4, 4, 2, 0),\n", + " datetime.datetime(2018, 4, 4, 3, 0),\n", + " datetime.datetime(2018, 4, 4, 4, 0),\n", + " datetime.datetime(2018, 4, 4, 5, 0),\n", + " datetime.datetime(2018, 4, 4, 6, 0),\n", + " datetime.datetime(2018, 4, 4, 7, 0),\n", + " datetime.datetime(2018, 4, 4, 8, 0),\n", + " datetime.datetime(2018, 4, 4, 9, 0),\n", + " datetime.datetime(2018, 4, 4, 10, 0),\n", + " datetime.datetime(2018, 4, 4, 11, 0),\n", + " datetime.datetime(2018, 4, 4, 12, 0),\n", + " datetime.datetime(2018, 4, 4, 13, 0),\n", + " datetime.datetime(2018, 4, 4, 14, 0),\n", + " datetime.datetime(2018, 4, 4, 15, 0),\n", + " datetime.datetime(2018, 4, 4, 16, 0),\n", + " datetime.datetime(2018, 4, 4, 17, 0),\n", + " datetime.datetime(2018, 4, 4, 18, 0),\n", + " datetime.datetime(2018, 4, 4, 19, 0),\n", + " datetime.datetime(2018, 4, 4, 20, 0),\n", + " datetime.datetime(2018, 4, 4, 21, 0),\n", + " datetime.datetime(2018, 4, 4, 22, 0),\n", + " datetime.datetime(2018, 4, 4, 23, 0),\n", + " datetime.datetime(2018, 4, 5, 0, 0),\n", + " datetime.datetime(2018, 4, 5, 1, 0),\n", + " datetime.datetime(2018, 4, 5, 2, 0),\n", + " datetime.datetime(2018, 4, 5, 3, 0),\n", + " datetime.datetime(2018, 4, 5, 4, 0),\n", + " datetime.datetime(2018, 4, 5, 5, 0),\n", + " datetime.datetime(2018, 4, 5, 6, 0),\n", + " datetime.datetime(2018, 4, 5, 7, 0),\n", + " datetime.datetime(2018, 4, 5, 8, 0),\n", + " datetime.datetime(2018, 4, 5, 9, 0),\n", + " datetime.datetime(2018, 4, 5, 10, 0),\n", + " datetime.datetime(2018, 4, 5, 11, 0),\n", + " datetime.datetime(2018, 4, 5, 12, 0),\n", + " datetime.datetime(2018, 4, 5, 13, 0),\n", + " datetime.datetime(2018, 4, 5, 14, 0),\n", + " datetime.datetime(2018, 4, 5, 15, 0),\n", + " datetime.datetime(2018, 4, 5, 16, 0),\n", + " datetime.datetime(2018, 4, 5, 17, 0),\n", + " datetime.datetime(2018, 4, 5, 18, 0),\n", + " datetime.datetime(2018, 4, 5, 19, 0),\n", + " datetime.datetime(2018, 4, 5, 20, 0),\n", + " datetime.datetime(2018, 4, 5, 21, 0),\n", + " datetime.datetime(2018, 4, 5, 22, 0),\n", + " datetime.datetime(2018, 4, 5, 23, 0),\n", + " datetime.datetime(2018, 4, 6, 0, 0),\n", + " datetime.datetime(2018, 4, 6, 1, 0),\n", + " datetime.datetime(2018, 4, 6, 2, 0),\n", + " datetime.datetime(2018, 4, 6, 3, 0),\n", + " datetime.datetime(2018, 4, 6, 4, 0),\n", + " datetime.datetime(2018, 4, 6, 5, 0),\n", + " datetime.datetime(2018, 4, 6, 6, 0),\n", + " datetime.datetime(2018, 4, 6, 7, 0),\n", + " datetime.datetime(2018, 4, 6, 8, 0),\n", + " datetime.datetime(2018, 4, 6, 9, 0),\n", + " datetime.datetime(2018, 4, 6, 10, 0),\n", + " datetime.datetime(2018, 4, 6, 11, 0),\n", + " datetime.datetime(2018, 4, 6, 12, 0),\n", + " datetime.datetime(2018, 4, 6, 13, 0),\n", + " datetime.datetime(2018, 4, 6, 14, 0),\n", + " datetime.datetime(2018, 4, 6, 15, 0),\n", + " datetime.datetime(2018, 4, 6, 16, 0),\n", + " datetime.datetime(2018, 4, 6, 17, 0),\n", + " datetime.datetime(2018, 4, 6, 18, 0),\n", + " datetime.datetime(2018, 4, 6, 19, 0),\n", + " datetime.datetime(2018, 4, 6, 20, 0),\n", + " datetime.datetime(2018, 4, 6, 21, 0),\n", + " datetime.datetime(2018, 4, 6, 22, 0),\n", + " datetime.datetime(2018, 4, 6, 23, 0),\n", + " datetime.datetime(2018, 4, 7, 0, 0),\n", + " datetime.datetime(2018, 4, 7, 1, 0),\n", + " datetime.datetime(2018, 4, 7, 2, 0),\n", + " datetime.datetime(2018, 4, 7, 3, 0),\n", + " datetime.datetime(2018, 4, 7, 4, 0),\n", + " datetime.datetime(2018, 4, 7, 5, 0),\n", + " datetime.datetime(2018, 4, 7, 6, 0),\n", + " datetime.datetime(2018, 4, 7, 7, 0),\n", + " datetime.datetime(2018, 4, 7, 8, 0),\n", + " datetime.datetime(2018, 4, 7, 9, 0),\n", + " datetime.datetime(2018, 4, 7, 10, 0),\n", + " datetime.datetime(2018, 4, 7, 11, 0),\n", + " datetime.datetime(2018, 4, 7, 12, 0),\n", + " datetime.datetime(2018, 4, 7, 13, 0),\n", + " datetime.datetime(2018, 4, 7, 14, 0),\n", + " datetime.datetime(2018, 4, 7, 15, 0),\n", + " datetime.datetime(2018, 4, 7, 16, 0),\n", + " datetime.datetime(2018, 4, 7, 17, 0),\n", + " datetime.datetime(2018, 4, 7, 18, 0),\n", + " datetime.datetime(2018, 4, 7, 19, 0),\n", + " datetime.datetime(2018, 4, 7, 20, 0),\n", + " datetime.datetime(2018, 4, 7, 21, 0),\n", + " datetime.datetime(2018, 4, 7, 22, 0),\n", + " datetime.datetime(2018, 4, 7, 23, 0),\n", + " datetime.datetime(2018, 4, 8, 0, 0),\n", + " datetime.datetime(2018, 4, 8, 1, 0),\n", + " datetime.datetime(2018, 4, 8, 2, 0),\n", + " datetime.datetime(2018, 4, 8, 3, 0),\n", + " datetime.datetime(2018, 4, 8, 4, 0),\n", + " datetime.datetime(2018, 4, 8, 5, 0),\n", + " datetime.datetime(2018, 4, 8, 6, 0),\n", + " datetime.datetime(2018, 4, 8, 7, 0),\n", + " datetime.datetime(2018, 4, 8, 8, 0),\n", + " datetime.datetime(2018, 4, 8, 9, 0),\n", + " datetime.datetime(2018, 4, 8, 10, 0),\n", + " datetime.datetime(2018, 4, 8, 11, 0),\n", + " datetime.datetime(2018, 4, 8, 12, 0),\n", + " datetime.datetime(2018, 4, 8, 13, 0),\n", + " datetime.datetime(2018, 4, 8, 14, 0),\n", + " datetime.datetime(2018, 4, 8, 15, 0),\n", + " datetime.datetime(2018, 4, 8, 16, 0),\n", + " datetime.datetime(2018, 4, 8, 17, 0),\n", + " datetime.datetime(2018, 4, 8, 18, 0),\n", + " datetime.datetime(2018, 4, 8, 19, 0),\n", + " datetime.datetime(2018, 4, 8, 20, 0),\n", + " datetime.datetime(2018, 4, 8, 21, 0),\n", + " datetime.datetime(2018, 4, 8, 22, 0),\n", + " datetime.datetime(2018, 4, 8, 23, 0),\n", + " datetime.datetime(2018, 4, 9, 0, 0),\n", + " datetime.datetime(2018, 4, 9, 1, 0),\n", + " datetime.datetime(2018, 4, 9, 2, 0),\n", + " datetime.datetime(2018, 4, 9, 3, 0),\n", + " datetime.datetime(2018, 4, 9, 4, 0),\n", + " datetime.datetime(2018, 4, 9, 5, 0),\n", + " datetime.datetime(2018, 4, 9, 6, 0),\n", + " datetime.datetime(2018, 4, 9, 7, 0),\n", + " datetime.datetime(2018, 4, 9, 8, 0),\n", + " datetime.datetime(2018, 4, 9, 9, 0),\n", + " datetime.datetime(2018, 4, 9, 10, 0),\n", + " datetime.datetime(2018, 4, 9, 11, 0),\n", + " datetime.datetime(2018, 4, 9, 12, 0),\n", + " datetime.datetime(2018, 4, 9, 13, 0),\n", + " datetime.datetime(2018, 4, 9, 14, 0),\n", + " datetime.datetime(2018, 4, 9, 15, 0),\n", + " datetime.datetime(2018, 4, 9, 16, 0),\n", + " datetime.datetime(2018, 4, 9, 17, 0),\n", + " datetime.datetime(2018, 4, 9, 18, 0),\n", + " datetime.datetime(2018, 4, 9, 19, 0),\n", + " datetime.datetime(2018, 4, 9, 20, 0),\n", + " datetime.datetime(2018, 4, 9, 21, 0),\n", + " datetime.datetime(2018, 4, 9, 22, 0),\n", + " datetime.datetime(2018, 4, 9, 23, 0),\n", + " datetime.datetime(2018, 4, 10, 0, 0),\n", + " datetime.datetime(2018, 4, 10, 1, 0),\n", + " datetime.datetime(2018, 4, 10, 2, 0),\n", + " datetime.datetime(2018, 4, 10, 3, 0),\n", + " datetime.datetime(2018, 4, 10, 4, 0),\n", + " datetime.datetime(2018, 4, 10, 5, 0),\n", + " datetime.datetime(2018, 4, 10, 6, 0),\n", + " datetime.datetime(2018, 4, 10, 7, 0),\n", + " datetime.datetime(2018, 4, 10, 8, 0),\n", + " datetime.datetime(2018, 4, 10, 9, 0),\n", + " datetime.datetime(2018, 4, 10, 10, 0),\n", + " datetime.datetime(2018, 4, 10, 11, 0),\n", + " datetime.datetime(2018, 4, 10, 12, 0),\n", + " datetime.datetime(2018, 4, 10, 13, 0),\n", + " datetime.datetime(2018, 4, 10, 14, 0),\n", + " datetime.datetime(2018, 4, 10, 15, 0),\n", + " datetime.datetime(2018, 4, 10, 16, 0),\n", + " datetime.datetime(2018, 4, 10, 17, 0),\n", + " datetime.datetime(2018, 4, 10, 18, 0),\n", + " datetime.datetime(2018, 4, 10, 19, 0),\n", + " datetime.datetime(2018, 4, 10, 20, 0),\n", + " datetime.datetime(2018, 4, 10, 21, 0),\n", + " datetime.datetime(2018, 4, 10, 22, 0),\n", + " datetime.datetime(2018, 4, 10, 23, 0),\n", + " datetime.datetime(2018, 4, 11, 0, 0),\n", + " datetime.datetime(2018, 4, 11, 1, 0),\n", + " datetime.datetime(2018, 4, 11, 2, 0),\n", + " datetime.datetime(2018, 4, 11, 3, 0),\n", + " datetime.datetime(2018, 4, 11, 4, 0),\n", + " datetime.datetime(2018, 4, 11, 5, 0),\n", + " datetime.datetime(2018, 4, 11, 6, 0),\n", + " datetime.datetime(2018, 4, 11, 7, 0),\n", + " datetime.datetime(2018, 4, 11, 8, 0),\n", + " datetime.datetime(2018, 4, 11, 9, 0),\n", + " datetime.datetime(2018, 4, 11, 10, 0),\n", + " datetime.datetime(2018, 4, 11, 11, 0),\n", + " datetime.datetime(2018, 4, 11, 12, 0),\n", + " datetime.datetime(2018, 4, 11, 13, 0),\n", + " datetime.datetime(2018, 4, 11, 14, 0),\n", + " datetime.datetime(2018, 4, 11, 15, 0),\n", + " datetime.datetime(2018, 4, 11, 16, 0),\n", + " datetime.datetime(2018, 4, 11, 17, 0),\n", + " datetime.datetime(2018, 4, 11, 18, 0),\n", + " datetime.datetime(2018, 4, 11, 19, 0),\n", + " datetime.datetime(2018, 4, 11, 20, 0),\n", + " datetime.datetime(2018, 4, 11, 21, 0),\n", + " datetime.datetime(2018, 4, 11, 22, 0),\n", + " datetime.datetime(2018, 4, 11, 23, 0),\n", + " datetime.datetime(2018, 4, 12, 0, 0),\n", + " datetime.datetime(2018, 4, 12, 1, 0),\n", + " datetime.datetime(2018, 4, 12, 2, 0),\n", + " datetime.datetime(2018, 4, 12, 3, 0),\n", + " datetime.datetime(2018, 4, 12, 4, 0),\n", + " datetime.datetime(2018, 4, 12, 5, 0),\n", + " datetime.datetime(2018, 4, 12, 6, 0),\n", + " datetime.datetime(2018, 4, 12, 7, 0),\n", + " datetime.datetime(2018, 4, 12, 8, 0),\n", + " datetime.datetime(2018, 4, 12, 9, 0),\n", + " datetime.datetime(2018, 4, 12, 10, 0),\n", + " datetime.datetime(2018, 4, 12, 11, 0),\n", + " datetime.datetime(2018, 4, 12, 12, 0),\n", + " datetime.datetime(2018, 4, 12, 13, 0),\n", + " datetime.datetime(2018, 4, 12, 14, 0),\n", + " datetime.datetime(2018, 4, 12, 15, 0),\n", + " datetime.datetime(2018, 4, 12, 16, 0),\n", + " datetime.datetime(2018, 4, 12, 17, 0),\n", + " datetime.datetime(2018, 4, 12, 18, 0),\n", + " datetime.datetime(2018, 4, 12, 19, 0),\n", + " datetime.datetime(2018, 4, 12, 20, 0),\n", + " datetime.datetime(2018, 4, 12, 21, 0),\n", + " datetime.datetime(2018, 4, 12, 22, 0),\n", + " datetime.datetime(2018, 4, 12, 23, 0),\n", + " datetime.datetime(2018, 4, 13, 0, 0),\n", + " datetime.datetime(2018, 4, 13, 1, 0),\n", + " datetime.datetime(2018, 4, 13, 2, 0),\n", + " datetime.datetime(2018, 4, 13, 3, 0),\n", + " datetime.datetime(2018, 4, 13, 4, 0),\n", + " datetime.datetime(2018, 4, 13, 5, 0),\n", + " datetime.datetime(2018, 4, 13, 6, 0),\n", + " datetime.datetime(2018, 4, 13, 7, 0),\n", + " datetime.datetime(2018, 4, 13, 8, 0),\n", + " datetime.datetime(2018, 4, 13, 9, 0),\n", + " datetime.datetime(2018, 4, 13, 10, 0),\n", + " datetime.datetime(2018, 4, 13, 11, 0),\n", + " datetime.datetime(2018, 4, 13, 12, 0),\n", + " datetime.datetime(2018, 4, 13, 13, 0),\n", + " datetime.datetime(2018, 4, 13, 14, 0),\n", + " datetime.datetime(2018, 4, 13, 15, 0),\n", + " datetime.datetime(2018, 4, 13, 16, 0),\n", + " datetime.datetime(2018, 4, 13, 17, 0),\n", + " datetime.datetime(2018, 4, 13, 18, 0),\n", + " datetime.datetime(2018, 4, 13, 19, 0),\n", + " datetime.datetime(2018, 4, 13, 20, 0),\n", + " datetime.datetime(2018, 4, 13, 21, 0),\n", + " datetime.datetime(2018, 4, 13, 22, 0),\n", + " datetime.datetime(2018, 4, 13, 23, 0),\n", + " datetime.datetime(2018, 4, 14, 0, 0),\n", + " datetime.datetime(2018, 4, 14, 1, 0),\n", + " datetime.datetime(2018, 4, 14, 2, 0),\n", + " datetime.datetime(2018, 4, 14, 3, 0),\n", + " datetime.datetime(2018, 4, 14, 4, 0),\n", + " datetime.datetime(2018, 4, 14, 5, 0),\n", + " datetime.datetime(2018, 4, 14, 6, 0),\n", + " datetime.datetime(2018, 4, 14, 7, 0),\n", + " datetime.datetime(2018, 4, 14, 8, 0),\n", + " datetime.datetime(2018, 4, 14, 9, 0),\n", + " datetime.datetime(2018, 4, 14, 10, 0),\n", + " datetime.datetime(2018, 4, 14, 11, 0),\n", + " datetime.datetime(2018, 4, 14, 12, 0),\n", + " datetime.datetime(2018, 4, 14, 13, 0),\n", + " datetime.datetime(2018, 4, 14, 14, 0),\n", + " datetime.datetime(2018, 4, 14, 15, 0),\n", + " datetime.datetime(2018, 4, 14, 16, 0),\n", + " datetime.datetime(2018, 4, 14, 17, 0),\n", + " datetime.datetime(2018, 4, 14, 18, 0),\n", + " datetime.datetime(2018, 4, 14, 19, 0),\n", + " datetime.datetime(2018, 4, 14, 20, 0),\n", + " datetime.datetime(2018, 4, 14, 21, 0),\n", + " datetime.datetime(2018, 4, 14, 22, 0),\n", + " datetime.datetime(2018, 4, 14, 23, 0),\n", + " datetime.datetime(2018, 4, 15, 0, 0),\n", + " datetime.datetime(2018, 4, 15, 1, 0),\n", + " datetime.datetime(2018, 4, 15, 2, 0),\n", + " datetime.datetime(2018, 4, 15, 3, 0),\n", + " datetime.datetime(2018, 4, 15, 4, 0),\n", + " datetime.datetime(2018, 4, 15, 5, 0),\n", + " datetime.datetime(2018, 4, 15, 6, 0),\n", + " datetime.datetime(2018, 4, 15, 7, 0),\n", + " datetime.datetime(2018, 4, 15, 8, 0),\n", + " datetime.datetime(2018, 4, 15, 9, 0),\n", + " datetime.datetime(2018, 4, 15, 10, 0),\n", + " datetime.datetime(2018, 4, 15, 11, 0),\n", + " datetime.datetime(2018, 4, 15, 12, 0),\n", + " datetime.datetime(2018, 4, 15, 13, 0),\n", + " datetime.datetime(2018, 4, 15, 14, 0),\n", + " datetime.datetime(2018, 4, 15, 15, 0),\n", + " datetime.datetime(2018, 4, 15, 16, 0),\n", + " datetime.datetime(2018, 4, 15, 17, 0),\n", + " datetime.datetime(2018, 4, 15, 18, 0),\n", + " datetime.datetime(2018, 4, 15, 19, 0),\n", + " datetime.datetime(2018, 4, 15, 20, 0),\n", + " datetime.datetime(2018, 4, 15, 21, 0),\n", + " datetime.datetime(2018, 4, 15, 22, 0),\n", + " datetime.datetime(2018, 4, 15, 23, 0),\n", + " datetime.datetime(2018, 4, 16, 0, 0),\n", + " datetime.datetime(2018, 4, 16, 1, 0),\n", + " datetime.datetime(2018, 4, 16, 2, 0),\n", + " datetime.datetime(2018, 4, 16, 3, 0),\n", + " datetime.datetime(2018, 4, 16, 4, 0),\n", + " datetime.datetime(2018, 4, 16, 5, 0),\n", + " datetime.datetime(2018, 4, 16, 6, 0),\n", + " datetime.datetime(2018, 4, 16, 7, 0),\n", + " datetime.datetime(2018, 4, 16, 8, 0),\n", + " datetime.datetime(2018, 4, 16, 9, 0),\n", + " datetime.datetime(2018, 4, 16, 10, 0),\n", + " datetime.datetime(2018, 4, 16, 11, 0),\n", + " datetime.datetime(2018, 4, 16, 12, 0),\n", + " datetime.datetime(2018, 4, 16, 13, 0),\n", + " datetime.datetime(2018, 4, 16, 14, 0),\n", + " datetime.datetime(2018, 4, 16, 15, 0),\n", + " datetime.datetime(2018, 4, 16, 16, 0),\n", + " datetime.datetime(2018, 4, 16, 17, 0),\n", + " datetime.datetime(2018, 4, 16, 18, 0),\n", + " datetime.datetime(2018, 4, 16, 19, 0),\n", + " datetime.datetime(2018, 4, 16, 20, 0),\n", + " datetime.datetime(2018, 4, 16, 21, 0),\n", + " datetime.datetime(2018, 4, 16, 22, 0),\n", + " datetime.datetime(2018, 4, 16, 23, 0),\n", + " datetime.datetime(2018, 4, 17, 0, 0),\n", + " datetime.datetime(2018, 4, 17, 1, 0),\n", + " datetime.datetime(2018, 4, 17, 2, 0),\n", + " datetime.datetime(2018, 4, 17, 3, 0),\n", + " datetime.datetime(2018, 4, 17, 4, 0),\n", + " datetime.datetime(2018, 4, 17, 5, 0),\n", + " datetime.datetime(2018, 4, 17, 6, 0),\n", + " datetime.datetime(2018, 4, 17, 7, 0),\n", + " datetime.datetime(2018, 4, 17, 8, 0),\n", + " datetime.datetime(2018, 4, 17, 9, 0),\n", + " datetime.datetime(2018, 4, 17, 10, 0),\n", + " datetime.datetime(2018, 4, 17, 11, 0),\n", + " datetime.datetime(2018, 4, 17, 12, 0),\n", + " datetime.datetime(2018, 4, 17, 13, 0),\n", + " datetime.datetime(2018, 4, 17, 14, 0),\n", + " datetime.datetime(2018, 4, 17, 15, 0),\n", + " datetime.datetime(2018, 4, 17, 16, 0),\n", + " datetime.datetime(2018, 4, 17, 17, 0),\n", + " datetime.datetime(2018, 4, 17, 18, 0),\n", + " datetime.datetime(2018, 4, 17, 19, 0),\n", + " datetime.datetime(2018, 4, 17, 20, 0),\n", + " datetime.datetime(2018, 4, 17, 21, 0),\n", + " datetime.datetime(2018, 4, 17, 22, 0),\n", + " datetime.datetime(2018, 4, 17, 23, 0),\n", + " datetime.datetime(2018, 4, 18, 0, 0),\n", + " datetime.datetime(2018, 4, 18, 1, 0),\n", + " datetime.datetime(2018, 4, 18, 2, 0),\n", + " datetime.datetime(2018, 4, 18, 3, 0),\n", + " datetime.datetime(2018, 4, 18, 4, 0),\n", + " datetime.datetime(2018, 4, 18, 5, 0),\n", + " datetime.datetime(2018, 4, 18, 6, 0),\n", + " datetime.datetime(2018, 4, 18, 7, 0),\n", + " datetime.datetime(2018, 4, 18, 8, 0),\n", + " datetime.datetime(2018, 4, 18, 9, 0),\n", + " datetime.datetime(2018, 4, 18, 10, 0),\n", + " datetime.datetime(2018, 4, 18, 11, 0),\n", + " datetime.datetime(2018, 4, 18, 12, 0),\n", + " datetime.datetime(2018, 4, 18, 13, 0),\n", + " datetime.datetime(2018, 4, 18, 14, 0),\n", + " datetime.datetime(2018, 4, 18, 15, 0),\n", + " datetime.datetime(2018, 4, 18, 16, 0),\n", + " datetime.datetime(2018, 4, 18, 17, 0),\n", + " datetime.datetime(2018, 4, 18, 18, 0),\n", + " datetime.datetime(2018, 4, 18, 19, 0),\n", + " datetime.datetime(2018, 4, 18, 20, 0),\n", + " datetime.datetime(2018, 4, 18, 21, 0),\n", + " datetime.datetime(2018, 4, 18, 22, 0),\n", + " datetime.datetime(2018, 4, 18, 23, 0),\n", + " datetime.datetime(2018, 4, 19, 0, 0),\n", + " datetime.datetime(2018, 4, 19, 1, 0),\n", + " datetime.datetime(2018, 4, 19, 2, 0),\n", + " datetime.datetime(2018, 4, 19, 3, 0),\n", + " datetime.datetime(2018, 4, 19, 4, 0),\n", + " datetime.datetime(2018, 4, 19, 5, 0),\n", + " datetime.datetime(2018, 4, 19, 6, 0),\n", + " datetime.datetime(2018, 4, 19, 7, 0),\n", + " datetime.datetime(2018, 4, 19, 8, 0),\n", + " datetime.datetime(2018, 4, 19, 9, 0),\n", + " datetime.datetime(2018, 4, 19, 10, 0),\n", + " datetime.datetime(2018, 4, 19, 11, 0),\n", + " datetime.datetime(2018, 4, 19, 12, 0),\n", + " datetime.datetime(2018, 4, 19, 13, 0),\n", + " datetime.datetime(2018, 4, 19, 14, 0),\n", + " datetime.datetime(2018, 4, 19, 15, 0),\n", + " datetime.datetime(2018, 4, 19, 16, 0),\n", + " datetime.datetime(2018, 4, 19, 17, 0),\n", + " datetime.datetime(2018, 4, 19, 18, 0),\n", + " datetime.datetime(2018, 4, 19, 19, 0),\n", + " datetime.datetime(2018, 4, 19, 20, 0),\n", + " datetime.datetime(2018, 4, 19, 21, 0),\n", + " datetime.datetime(2018, 4, 19, 22, 0),\n", + " datetime.datetime(2018, 4, 19, 23, 0),\n", + " datetime.datetime(2018, 4, 20, 0, 0),\n", + " datetime.datetime(2018, 4, 20, 1, 0),\n", + " datetime.datetime(2018, 4, 20, 2, 0),\n", + " datetime.datetime(2018, 4, 20, 3, 0),\n", + " datetime.datetime(2018, 4, 20, 4, 0),\n", + " datetime.datetime(2018, 4, 20, 5, 0),\n", + " datetime.datetime(2018, 4, 20, 6, 0),\n", + " datetime.datetime(2018, 4, 20, 7, 0),\n", + " datetime.datetime(2018, 4, 20, 8, 0),\n", + " datetime.datetime(2018, 4, 20, 9, 0),\n", + " datetime.datetime(2018, 4, 20, 10, 0),\n", + " datetime.datetime(2018, 4, 20, 11, 0),\n", + " datetime.datetime(2018, 4, 20, 12, 0),\n", + " datetime.datetime(2018, 4, 20, 13, 0),\n", + " datetime.datetime(2018, 4, 20, 14, 0),\n", + " datetime.datetime(2018, 4, 20, 15, 0),\n", + " datetime.datetime(2018, 4, 20, 16, 0),\n", + " datetime.datetime(2018, 4, 20, 17, 0),\n", + " datetime.datetime(2018, 4, 20, 18, 0),\n", + " datetime.datetime(2018, 4, 20, 19, 0),\n", + " datetime.datetime(2018, 4, 20, 20, 0),\n", + " datetime.datetime(2018, 4, 20, 21, 0),\n", + " datetime.datetime(2018, 4, 20, 22, 0),\n", + " datetime.datetime(2018, 4, 20, 23, 0),\n", + " datetime.datetime(2018, 4, 21, 0, 0),\n", + " datetime.datetime(2018, 4, 21, 1, 0),\n", + " datetime.datetime(2018, 4, 21, 2, 0),\n", + " datetime.datetime(2018, 4, 21, 3, 0),\n", + " datetime.datetime(2018, 4, 21, 4, 0),\n", + " datetime.datetime(2018, 4, 21, 5, 0),\n", + " datetime.datetime(2018, 4, 21, 6, 0),\n", + " datetime.datetime(2018, 4, 21, 7, 0),\n", + " datetime.datetime(2018, 4, 21, 8, 0),\n", + " datetime.datetime(2018, 4, 21, 9, 0),\n", + " datetime.datetime(2018, 4, 21, 10, 0),\n", + " datetime.datetime(2018, 4, 21, 11, 0),\n", + " datetime.datetime(2018, 4, 21, 12, 0),\n", + " datetime.datetime(2018, 4, 21, 13, 0),\n", + " datetime.datetime(2018, 4, 21, 14, 0),\n", + " datetime.datetime(2018, 4, 21, 15, 0),\n", + " datetime.datetime(2018, 4, 21, 16, 0),\n", + " datetime.datetime(2018, 4, 21, 17, 0),\n", + " datetime.datetime(2018, 4, 21, 18, 0),\n", + " datetime.datetime(2018, 4, 21, 19, 0),\n", + " datetime.datetime(2018, 4, 21, 20, 0),\n", + " datetime.datetime(2018, 4, 21, 21, 0),\n", + " datetime.datetime(2018, 4, 21, 22, 0),\n", + " datetime.datetime(2018, 4, 21, 23, 0),\n", + " datetime.datetime(2018, 4, 22, 0, 0),\n", + " datetime.datetime(2018, 4, 22, 1, 0),\n", + " datetime.datetime(2018, 4, 22, 2, 0),\n", + " datetime.datetime(2018, 4, 22, 3, 0),\n", + " datetime.datetime(2018, 4, 22, 4, 0),\n", + " datetime.datetime(2018, 4, 22, 5, 0),\n", + " datetime.datetime(2018, 4, 22, 6, 0),\n", + " datetime.datetime(2018, 4, 22, 7, 0),\n", + " datetime.datetime(2018, 4, 22, 8, 0),\n", + " datetime.datetime(2018, 4, 22, 9, 0),\n", + " datetime.datetime(2018, 4, 22, 10, 0),\n", + " datetime.datetime(2018, 4, 22, 11, 0),\n", + " datetime.datetime(2018, 4, 22, 12, 0),\n", + " datetime.datetime(2018, 4, 22, 13, 0),\n", + " datetime.datetime(2018, 4, 22, 14, 0),\n", + " datetime.datetime(2018, 4, 22, 15, 0),\n", + " datetime.datetime(2018, 4, 22, 16, 0),\n", + " datetime.datetime(2018, 4, 22, 17, 0),\n", + " datetime.datetime(2018, 4, 22, 18, 0),\n", + " datetime.datetime(2018, 4, 22, 19, 0),\n", + " datetime.datetime(2018, 4, 22, 20, 0),\n", + " datetime.datetime(2018, 4, 22, 21, 0),\n", + " datetime.datetime(2018, 4, 22, 22, 0),\n", + " datetime.datetime(2018, 4, 22, 23, 0),\n", + " datetime.datetime(2018, 4, 23, 0, 0),\n", + " datetime.datetime(2018, 4, 23, 1, 0),\n", + " datetime.datetime(2018, 4, 23, 2, 0),\n", + " datetime.datetime(2018, 4, 23, 3, 0),\n", + " datetime.datetime(2018, 4, 23, 4, 0),\n", + " datetime.datetime(2018, 4, 23, 5, 0),\n", + " datetime.datetime(2018, 4, 23, 6, 0),\n", + " datetime.datetime(2018, 4, 23, 7, 0),\n", + " datetime.datetime(2018, 4, 23, 8, 0),\n", + " datetime.datetime(2018, 4, 23, 9, 0),\n", + " datetime.datetime(2018, 4, 23, 10, 0),\n", + " datetime.datetime(2018, 4, 23, 11, 0),\n", + " datetime.datetime(2018, 4, 23, 12, 0),\n", + " datetime.datetime(2018, 4, 23, 13, 0),\n", + " datetime.datetime(2018, 4, 23, 14, 0),\n", + " datetime.datetime(2018, 4, 23, 15, 0),\n", + " datetime.datetime(2018, 4, 23, 16, 0),\n", + " datetime.datetime(2018, 4, 23, 17, 0),\n", + " datetime.datetime(2018, 4, 23, 18, 0),\n", + " datetime.datetime(2018, 4, 23, 19, 0),\n", + " datetime.datetime(2018, 4, 23, 20, 0),\n", + " datetime.datetime(2018, 4, 23, 21, 0),\n", + " datetime.datetime(2018, 4, 23, 22, 0),\n", + " datetime.datetime(2018, 4, 23, 23, 0),\n", + " datetime.datetime(2018, 4, 24, 0, 0),\n", + " datetime.datetime(2018, 4, 24, 1, 0),\n", + " datetime.datetime(2018, 4, 24, 2, 0),\n", + " datetime.datetime(2018, 4, 24, 3, 0),\n", + " datetime.datetime(2018, 4, 24, 4, 0),\n", + " datetime.datetime(2018, 4, 24, 5, 0),\n", + " datetime.datetime(2018, 4, 24, 6, 0),\n", + " datetime.datetime(2018, 4, 24, 7, 0),\n", + " datetime.datetime(2018, 4, 24, 8, 0),\n", + " datetime.datetime(2018, 4, 24, 9, 0),\n", + " datetime.datetime(2018, 4, 24, 10, 0),\n", + " datetime.datetime(2018, 4, 24, 11, 0),\n", + " datetime.datetime(2018, 4, 24, 12, 0),\n", + " datetime.datetime(2018, 4, 24, 13, 0),\n", + " datetime.datetime(2018, 4, 24, 14, 0),\n", + " datetime.datetime(2018, 4, 24, 15, 0),\n", + " datetime.datetime(2018, 4, 24, 16, 0),\n", + " datetime.datetime(2018, 4, 24, 17, 0),\n", + " datetime.datetime(2018, 4, 24, 18, 0),\n", + " datetime.datetime(2018, 4, 24, 19, 0),\n", + " datetime.datetime(2018, 4, 24, 20, 0),\n", + " datetime.datetime(2018, 4, 24, 21, 0),\n", + " datetime.datetime(2018, 4, 24, 22, 0),\n", + " datetime.datetime(2018, 4, 24, 23, 0),\n", + " datetime.datetime(2018, 4, 25, 0, 0),\n", + " datetime.datetime(2018, 4, 25, 1, 0),\n", + " datetime.datetime(2018, 4, 25, 2, 0),\n", + " datetime.datetime(2018, 4, 25, 3, 0),\n", + " datetime.datetime(2018, 4, 25, 4, 0),\n", + " datetime.datetime(2018, 4, 25, 5, 0),\n", + " datetime.datetime(2018, 4, 25, 6, 0),\n", + " datetime.datetime(2018, 4, 25, 7, 0),\n", + " datetime.datetime(2018, 4, 25, 8, 0),\n", + " datetime.datetime(2018, 4, 25, 9, 0),\n", + " datetime.datetime(2018, 4, 25, 10, 0),\n", + " datetime.datetime(2018, 4, 25, 11, 0),\n", + " datetime.datetime(2018, 4, 25, 12, 0),\n", + " datetime.datetime(2018, 4, 25, 13, 0),\n", + " datetime.datetime(2018, 4, 25, 14, 0),\n", + " datetime.datetime(2018, 4, 25, 15, 0),\n", + " datetime.datetime(2018, 4, 25, 16, 0),\n", + " datetime.datetime(2018, 4, 25, 17, 0),\n", + " datetime.datetime(2018, 4, 25, 18, 0),\n", + " datetime.datetime(2018, 4, 25, 19, 0),\n", + " datetime.datetime(2018, 4, 25, 20, 0),\n", + " datetime.datetime(2018, 4, 25, 21, 0),\n", + " datetime.datetime(2018, 4, 25, 22, 0),\n", + " datetime.datetime(2018, 4, 25, 23, 0),\n", + " datetime.datetime(2018, 4, 26, 0, 0),\n", + " datetime.datetime(2018, 4, 26, 1, 0),\n", + " datetime.datetime(2018, 4, 26, 2, 0),\n", + " datetime.datetime(2018, 4, 26, 3, 0),\n", + " datetime.datetime(2018, 4, 26, 4, 0),\n", + " datetime.datetime(2018, 4, 26, 5, 0),\n", + " datetime.datetime(2018, 4, 26, 6, 0),\n", + " datetime.datetime(2018, 4, 26, 7, 0),\n", + " datetime.datetime(2018, 4, 26, 8, 0),\n", + " datetime.datetime(2018, 4, 26, 9, 0),\n", + " datetime.datetime(2018, 4, 26, 10, 0),\n", + " datetime.datetime(2018, 4, 26, 11, 0),\n", + " datetime.datetime(2018, 4, 26, 12, 0),\n", + " datetime.datetime(2018, 4, 26, 13, 0),\n", + " datetime.datetime(2018, 4, 26, 14, 0),\n", + " datetime.datetime(2018, 4, 26, 15, 0),\n", + " datetime.datetime(2018, 4, 26, 16, 0),\n", + " datetime.datetime(2018, 4, 26, 17, 0),\n", + " datetime.datetime(2018, 4, 26, 18, 0),\n", + " datetime.datetime(2018, 4, 26, 19, 0),\n", + " datetime.datetime(2018, 4, 26, 20, 0),\n", + " datetime.datetime(2018, 4, 26, 21, 0),\n", + " datetime.datetime(2018, 4, 26, 22, 0),\n", + " datetime.datetime(2018, 4, 26, 23, 0),\n", + " datetime.datetime(2018, 4, 27, 0, 0),\n", + " datetime.datetime(2018, 4, 27, 1, 0),\n", + " datetime.datetime(2018, 4, 27, 2, 0),\n", + " datetime.datetime(2018, 4, 27, 3, 0),\n", + " datetime.datetime(2018, 4, 27, 4, 0),\n", + " datetime.datetime(2018, 4, 27, 5, 0),\n", + " datetime.datetime(2018, 4, 27, 6, 0),\n", + " datetime.datetime(2018, 4, 27, 7, 0),\n", + " datetime.datetime(2018, 4, 27, 8, 0),\n", + " datetime.datetime(2018, 4, 27, 9, 0),\n", + " datetime.datetime(2018, 4, 27, 10, 0),\n", + " datetime.datetime(2018, 4, 27, 11, 0),\n", + " datetime.datetime(2018, 4, 27, 12, 0),\n", + " datetime.datetime(2018, 4, 27, 13, 0),\n", + " datetime.datetime(2018, 4, 27, 14, 0),\n", + " datetime.datetime(2018, 4, 27, 15, 0),\n", + " datetime.datetime(2018, 4, 27, 16, 0),\n", + " datetime.datetime(2018, 4, 27, 17, 0),\n", + " datetime.datetime(2018, 4, 27, 18, 0),\n", + " datetime.datetime(2018, 4, 27, 19, 0),\n", + " datetime.datetime(2018, 4, 27, 20, 0),\n", + " datetime.datetime(2018, 4, 27, 21, 0),\n", + " datetime.datetime(2018, 4, 27, 22, 0),\n", + " datetime.datetime(2018, 4, 27, 23, 0),\n", + " datetime.datetime(2018, 4, 28, 0, 0),\n", + " datetime.datetime(2018, 4, 28, 1, 0),\n", + " datetime.datetime(2018, 4, 28, 2, 0),\n", + " datetime.datetime(2018, 4, 28, 3, 0),\n", + " datetime.datetime(2018, 4, 28, 4, 0),\n", + " datetime.datetime(2018, 4, 28, 5, 0),\n", + " datetime.datetime(2018, 4, 28, 6, 0),\n", + " datetime.datetime(2018, 4, 28, 7, 0),\n", + " datetime.datetime(2018, 4, 28, 8, 0),\n", + " datetime.datetime(2018, 4, 28, 9, 0),\n", + " datetime.datetime(2018, 4, 28, 10, 0),\n", + " datetime.datetime(2018, 4, 28, 11, 0),\n", + " datetime.datetime(2018, 4, 28, 12, 0),\n", + " datetime.datetime(2018, 4, 28, 13, 0),\n", + " datetime.datetime(2018, 4, 28, 14, 0),\n", + " datetime.datetime(2018, 4, 28, 15, 0),\n", + " datetime.datetime(2018, 4, 28, 16, 0),\n", + " datetime.datetime(2018, 4, 28, 17, 0),\n", + " datetime.datetime(2018, 4, 28, 18, 0),\n", + " datetime.datetime(2018, 4, 28, 19, 0),\n", + " datetime.datetime(2018, 4, 28, 20, 0),\n", + " datetime.datetime(2018, 4, 28, 21, 0),\n", + " datetime.datetime(2018, 4, 28, 22, 0),\n", + " datetime.datetime(2018, 4, 28, 23, 0),\n", + " datetime.datetime(2018, 4, 29, 0, 0),\n", + " datetime.datetime(2018, 4, 29, 1, 0),\n", + " datetime.datetime(2018, 4, 29, 2, 0),\n", + " datetime.datetime(2018, 4, 29, 3, 0),\n", + " datetime.datetime(2018, 4, 29, 4, 0),\n", + " datetime.datetime(2018, 4, 29, 5, 0),\n", + " datetime.datetime(2018, 4, 29, 6, 0),\n", + " datetime.datetime(2018, 4, 29, 7, 0),\n", + " datetime.datetime(2018, 4, 29, 8, 0),\n", + " datetime.datetime(2018, 4, 29, 9, 0),\n", + " datetime.datetime(2018, 4, 29, 10, 0),\n", + " datetime.datetime(2018, 4, 29, 11, 0),\n", + " datetime.datetime(2018, 4, 29, 12, 0),\n", + " datetime.datetime(2018, 4, 29, 13, 0),\n", + " datetime.datetime(2018, 4, 29, 14, 0),\n", + " datetime.datetime(2018, 4, 29, 15, 0),\n", + " datetime.datetime(2018, 4, 29, 16, 0),\n", + " datetime.datetime(2018, 4, 29, 17, 0),\n", + " datetime.datetime(2018, 4, 29, 18, 0),\n", + " datetime.datetime(2018, 4, 29, 19, 0),\n", + " datetime.datetime(2018, 4, 29, 20, 0),\n", + " datetime.datetime(2018, 4, 29, 21, 0),\n", + " datetime.datetime(2018, 4, 29, 22, 0),\n", + " datetime.datetime(2018, 4, 29, 23, 0),\n", + " datetime.datetime(2018, 4, 30, 0, 0),\n", + " datetime.datetime(2018, 4, 30, 1, 0),\n", + " datetime.datetime(2018, 4, 30, 2, 0),\n", + " datetime.datetime(2018, 4, 30, 3, 0),\n", + " datetime.datetime(2018, 4, 30, 4, 0),\n", + " datetime.datetime(2018, 4, 30, 5, 0),\n", + " datetime.datetime(2018, 4, 30, 6, 0),\n", + " datetime.datetime(2018, 4, 30, 7, 0),\n", + " datetime.datetime(2018, 4, 30, 8, 0),\n", + " datetime.datetime(2018, 4, 30, 9, 0),\n", + " datetime.datetime(2018, 4, 30, 10, 0),\n", + " datetime.datetime(2018, 4, 30, 11, 0),\n", + " datetime.datetime(2018, 4, 30, 12, 0),\n", + " datetime.datetime(2018, 4, 30, 13, 0),\n", + " datetime.datetime(2018, 4, 30, 14, 0),\n", + " datetime.datetime(2018, 4, 30, 15, 0),\n", + " datetime.datetime(2018, 4, 30, 16, 0),\n", + " datetime.datetime(2018, 4, 30, 17, 0),\n", + " datetime.datetime(2018, 4, 30, 18, 0),\n", + " datetime.datetime(2018, 4, 30, 19, 0),\n", + " datetime.datetime(2018, 4, 30, 20, 0),\n", + " datetime.datetime(2018, 4, 30, 21, 0),\n", + " datetime.datetime(2018, 4, 30, 22, 0),\n", + " datetime.datetime(2018, 4, 30, 23, 0)]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.time" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, 47.76666641, 46.677778 ,\n", + " 48.721111 , 47.529167 , 47.05407 , 47.348056 ,\n", + " 47.973056 , 48.878611 , 48.106111 , 48.371111 ,\n", + " 48.334722 , 48.050833 , 47.838611 , 47.040277 ,\n", + " 47.06694444, 49.877778 , 50.629421 , 50.503333 ,\n", + " 41.695833 , 32.27000046, 80.05000305, 46.5475 ,\n", + " 46.813056 , 47.479722 , 47.049722 , 47.0675 ,\n", + " 47.18961391, -30.17254 , 16.86403 , 35.0381 ,\n", + " 49.73508444, 49.573394 , 49.066667 , 54.925556 ,\n", + " 52.802222 , 47.914722 , 53.166667 , 50.65 ,\n", + " 54.4368 , 47.80149841, 47.4165 , -70.666 ,\n", + " 54.746495 , 81.6 , 55.693588 , 72.58000183,\n", + " 56.290424 , 59.5 , 58.383333 , 39.54694 ,\n", + " 42.72056 , 39.87528 , 37.23722 , 43.43917 ,\n", + " 41.27417 , 42.31917 , 38.47278 , 39.08278 ,\n", + " 41.23889 , 41.39389 , 42.63472 , 37.05194 ,\n", + " 28.309 , 59.779167 , 60.53002 , 66.320278 ,\n", + " 67.97333333, 48.5 , 49.9 , 47.266667 ,\n", + " 43.616667 , 47.3 , 46.65 , 45. ,\n", + " 45.8 , 48.633333 , 42.936667 , 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , 51.149617 , 38.366667 ,\n", + " 35.316667 , 46.966667 , 46.91 , -0.20194 ,\n", + " 51.939722 , 53.32583 , 45.8 , 44.183333 ,\n", + " 37.571111 , 42.805462 , -69.005 , 39.0319 ,\n", + " 24.2883 , 24.466941 , 36.53833389, 33.293917 ,\n", + " 55.37611111, 56.161944 , 57.135278 , 36.0722 ,\n", + " 52.083333 , 53.333889 , 51.541111 , 52.3 ,\n", + " 51.974444 , 58.38853 , 65.833333 , 62.783333 ,\n", + " 78.90715 , 59. , 69.45 , 59.2 ,\n", + " 60.372386 , -72.0117 , 59.2 , -41.40819168,\n", + " -77.83200073, -45.0379982 , 51.814408 , 50.736444 ,\n", + " 54.753894 , 54.15 , 43.4 , 71.58616638,\n", + " 63.85 , 67.883333 , 57.394 , 57.1645 ,\n", + " 57.9525 , 56.0429 , 60.0858 , 57.816667 ,\n", + " 64.25 , 59.728 , 45.566667 , 46.428611 ,\n", + " 46.299444 , 48.933333 , 49.15 , 49.05 ,\n", + " 47.96 , 71.32301331, 40.12498 , 19.53623009,\n", + " -89.99694824, 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'long_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, 1.67666664e+01,\n", + " 1.29722220e+01, 1.59422220e+01, 9.92666700e+00,\n", + " 1.29579400e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 5.27897222e+00, 2.61000833e+00,\n", + " 2.96488600e+00, -3.20416700e+00, -7.87000000e+00,\n", + " -3.71305600e+00, -8.07500000e-01, -4.77444400e+00,\n", + " -3.03305600e+00, -3.20500000e+00, -1.75333300e+00,\n", + " 1.79444000e-01, 1.46305600e+00, -4.69146200e+00,\n", + " 2.92778000e-01, -3.24290000e+00, 1.12194400e+00,\n", + " 1.08223000e+00, -1.18531900e+00, -1.43822800e+00,\n", + " 2.30833330e+01, 2.56666670e+01, 1.95833330e+01,\n", + " 1.63200000e+01, 1.00318100e+02, -1.02444440e+01,\n", + " -9.89944000e+00, 8.63333300e+00, 1.07000000e+01,\n", + " 1.26597220e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 1.42184000e+01,\n", + " 6.56666700e+00, 6.27722200e+00, 5.85361100e+00,\n", + " 4.50000000e+00, 4.92361100e+00, 8.25200000e+00,\n", + " 1.39166670e+01, 8.88333300e+00, 1.18866800e+01,\n", + " 1.15333330e+01, 3.00333330e+01, 5.20000000e+00,\n", + " 1.10781420e+01, 2.53510000e+00, 9.51666700e+00,\n", + " 1.74870804e+02, 1.66660004e+02, 1.69684006e+02,\n", + " 2.19724190e+01, 1.57395000e+01, 1.75342640e+01,\n", + " 2.20666670e+01, 2.19500000e+01, 1.28918823e+02,\n", + " 1.53333330e+01, 2.10666670e+01, 1.19140000e+01,\n", + " 1.47825000e+01, 1.24030000e+01, 1.31480000e+01,\n", + " 1.75052800e+01, 1.55666670e+01, 1.97666670e+01,\n", + " 1.54720000e+01, 1.48666670e+01, 1.50033330e+01,\n", + " 1.45386110e+01, 1.95833330e+01, 2.02833330e+01,\n", + " 2.22666670e+01, 1.78605560e+01, -1.56611465e+02,\n", + " -1.05236800e+02, -1.55576157e+02, -2.47999992e+01,\n", + " -1.24151001e+02, 1.03515700e+02, 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'long_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_1.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading ASTER_v3_altitude var (1/175)\n", + "Rank 000: Loaded ASTER_v3_altitude var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_BC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_CO_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NH3_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NMVOC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_NOx_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_OC_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_PM10_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_SO2_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Loaded EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var ((168,))\n", + "Rank 000: Loading ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Loaded ESDAC_Iwahashi_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Loaded ESDAC_Meybeck_landform_classification var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Loaded ESDAC_modal_Iwahashi_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_25km var ((168,))\n", + "Rank 000: Loading ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Loaded ESDAC_modal_Meybeck_landform_classification_5km var ((168,))\n", + "Rank 000: Loading ETOPO1_altitude var (18/175)\n", + "Rank 000: Loaded ETOPO1_altitude var ((168,))\n", + "Rank 000: Loading ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Loaded ETOPO1_max_altitude_difference_5km var ((168,))\n", + "Rank 000: Loading GHOST_version var (20/175)\n", + "Rank 000: Loaded GHOST_version var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Loaded GHSL_average_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Loaded GHSL_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Loaded GHSL_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Loaded GHSL_built_up_area_density var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Loaded GHSL_max_built_up_area_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Loaded GHSL_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Loaded GHSL_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_25km var ((168,))\n", + "Rank 000: Loading GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Loaded GHSL_modal_settlement_model_classification_5km var ((168,))\n", + "Rank 000: Loading GHSL_population_density var (32/175)\n", + "Rank 000: Loaded GHSL_population_density var ((168,))\n", + "Rank 000: Loading GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Loaded GHSL_settlement_model_classification var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Loaded GPW_average_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Loaded GPW_average_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Loaded GPW_max_population_density_25km var ((168,))\n", + "Rank 000: Loading GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Loaded GPW_max_population_density_5km var ((168,))\n", + "Rank 000: Loading GPW_population_density var (38/175)\n", + "Rank 000: Loaded GPW_population_density var ((168,))\n", + "Rank 000: Loading GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Loaded GSFC_coastline_proximity var ((168,))\n", + "Rank 000: Loading Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Loaded Joly-Peuch_classification_code var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Loaded Koppen-Geiger_classification var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_25km var ((168,))\n", + "Rank 000: Loading Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Loaded Koppen-Geiger_modal_classification_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_IGBP_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_LAI var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_UMD_land_use var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_LAI_5km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_25km var ((168,))\n", + "Rank 000: Loading MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Loaded MODIS_MCD12C1_v6_modal_UMD_land_use_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var ((168,))\n", + "Rank 000: Loading NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Loaded NOAA-DMSP-OLS_v4_nighttime_stable_lights var ((168,))\n", + "Rank 000: Loading OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Loaded OMI_level3_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Loaded OMI_level3_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Loaded OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var ((168,))\n", + "Rank 000: Loading UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Loaded UMBC_anthrome_classification var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_25km var ((168,))\n", + "Rank 000: Loading UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Loaded UMBC_modal_anthrome_classification_5km var ((168,))\n", + "Rank 000: Loading WMO_region var (65/175)\n", + "Rank 000: Loaded WMO_region var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Loaded WWF_TEOW_biogeographical_realm var ((168,))\n", + "Rank 000: Loading WWF_TEOW_biome var (67/175)\n", + "Rank 000: Loaded WWF_TEOW_biome var ((168,))\n", + "Rank 000: Loading WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Loaded WWF_TEOW_terrestrial_ecoregion var ((168,))\n", + "Rank 000: Loading administrative_country_division_1 var (69/175)\n", + "Rank 000: Loaded administrative_country_division_1 var ((168,))\n", + "Rank 000: Loading administrative_country_division_2 var (70/175)\n", + "Rank 000: Loaded administrative_country_division_2 var ((168,))\n", + "Rank 000: Loading altitude var (71/175)\n", + "Rank 000: Loaded altitude var ((168,))\n", + "Rank 000: Loading annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Loaded annual_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading annual_native_representativity_percent var (73/175)\n", + "Rank 000: Loaded annual_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading area_classification var (74/175)\n", + "Rank 000: Loaded area_classification var ((168,))\n", + "Rank 000: Loading associated_networks var (75/175)\n", + "Rank 000: Loaded associated_networks var ((168,))\n", + "Rank 000: Loading city var (76/175)\n", + "Rank 000: Loaded city var ((168,))\n", + "Rank 000: Loading climatology var (77/175)\n", + "Rank 000: Loaded climatology var ((168,))\n", + "Rank 000: Loading contact_email_address var (78/175)\n", + "Rank 000: Loaded contact_email_address var ((168,))\n", + "Rank 000: Loading contact_institution var (79/175)\n", + "Rank 000: Loaded contact_institution var ((168,))\n", + "Rank 000: Loading contact_name var (80/175)\n", + "Rank 000: Loaded contact_name var ((168,))\n", + "Rank 000: Loading country var (81/175)\n", + "Rank 000: Loaded country var ((168,))\n", + "Rank 000: Loading daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Loaded daily_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading daily_native_representativity_percent var (83/175)\n", + "Rank 000: Loaded daily_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading daily_passing_vehicles var (84/175)\n", + "Rank 000: Loaded daily_passing_vehicles var ((168,))\n", + "Rank 000: Loading data_level var (85/175)\n", + "Rank 000: Loaded data_level var ((168,))\n", + "Rank 000: Loading data_licence var (86/175)\n", + "Rank 000: Loaded data_licence var ((168,))\n", + "Rank 000: Loading day_night_code var (87/175)\n", + "Rank 000: Loaded day_night_code var ((168, 720))\n", + "Rank 000: Loading daytime_traffic_speed var (88/175)\n", + "Rank 000: Loaded daytime_traffic_speed var ((168,))\n", + "Rank 000: Loading derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Loaded derived_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading distance_to_building var (90/175)\n", + "Rank 000: Loaded distance_to_building var ((168,))\n", + "Rank 000: Loading distance_to_junction var (91/175)\n", + "Rank 000: Loaded distance_to_junction var ((168,))\n", + "Rank 000: Loading distance_to_kerb var (92/175)\n", + "Rank 000: Loaded distance_to_kerb var ((168,))\n", + "Rank 000: Loading distance_to_source var (93/175)\n", + "Rank 000: Loaded distance_to_source var ((168,))\n", + "Rank 000: Loading ellipsoid var (94/175)\n", + "Rank 000: Loaded ellipsoid var ((168,))\n", + "Rank 000: Loading horizontal_datum var (95/175)\n", + "Rank 000: Loaded horizontal_datum var ((168,))\n", + "Rank 000: Loading hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Loaded hourly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Loaded hourly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading land_use var (98/175)\n", + "Rank 000: Loaded land_use var ((168,))\n", + "Rank 000: Loading local_time var (99/175)\n", + "Rank 000: Loaded local_time var ((168, 720))\n", + "Rank 000: Loading main_emission_source var (100/175)\n", + "Rank 000: Loaded main_emission_source var ((168,))\n", + "Rank 000: Loading mean_solar_time var (101/175)\n", + "Rank 000: Loaded mean_solar_time var ((168, 720))\n", + "Rank 000: Loading measurement_altitude var (102/175)\n", + "Rank 000: Loaded measurement_altitude var ((168,))\n", + "Rank 000: Loading measurement_methodology var (103/175)\n", + "Rank 000: Loaded measurement_methodology var ((168,))\n", + "Rank 000: Loading measurement_scale var (104/175)\n", + "Rank 000: Loaded measurement_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Loaded measuring_instrument_calibration_scale var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Loaded measuring_instrument_documented_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Loaded measuring_instrument_documented_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Loaded measuring_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Loaded measuring_instrument_documented_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Loaded measuring_instrument_documented_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Loaded measuring_instrument_documented_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Loaded measuring_instrument_documented_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Loaded measuring_instrument_documented_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Loaded measuring_instrument_documented_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Loaded measuring_instrument_documented_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_further_details var (117/175)\n", + "Rank 000: Loaded measuring_instrument_further_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Loaded measuring_instrument_inlet_information var ((168,))\n", + "Rank 000: Loading measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Loaded measuring_instrument_manual_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_name var (120/175)\n", + "Rank 000: Loaded measuring_instrument_name var ((168,))\n", + "Rank 000: Loading measuring_instrument_process_details var (121/175)\n", + "Rank 000: Loaded measuring_instrument_process_details var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Loaded measuring_instrument_reported_absorption_cross_section var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Loaded measuring_instrument_reported_accuracy var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Loaded measuring_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Loaded measuring_instrument_reported_lower_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Loaded measuring_instrument_reported_measurement_resolution var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Loaded measuring_instrument_reported_precision var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Loaded measuring_instrument_reported_span_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Loaded measuring_instrument_reported_uncertainty var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Loaded measuring_instrument_reported_units var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Loaded measuring_instrument_reported_upper_limit_of_detection var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zero_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Loaded measuring_instrument_reported_zonal_drift var ((168,))\n", + "Rank 000: Loading measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Loaded measuring_instrument_sampling_type var ((168,))\n", + "Rank 000: Loading monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Loaded monthly_native_max_gap_percent var ((168, 720))\n", + "Rank 000: Loading monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Loaded monthly_native_representativity_percent var ((168, 720))\n", + "Rank 000: Loading network var (137/175)\n", + "Rank 000: Loaded network var ((168,))\n", + "Rank 000: Loading network_maintenance_details var (138/175)\n", + "Rank 000: Loaded network_maintenance_details var ((168,))\n", + "Rank 000: Loading network_miscellaneous_details var (139/175)\n", + "Rank 000: Loaded network_miscellaneous_details var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Loaded network_provided_volume_standard_pressure var ((168,))\n", + "Rank 000: Loading network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Loaded network_provided_volume_standard_temperature var ((168,))\n", + "Rank 000: Loading network_qa_details var (142/175)\n", + "Rank 000: Loaded network_qa_details var ((168,))\n", + "Rank 000: Loading network_sampling_details var (143/175)\n", + "Rank 000: Loaded network_sampling_details var ((168,))\n", + "Rank 000: Loading network_uncertainty_details var (144/175)\n", + "Rank 000: Loaded network_uncertainty_details var ((168,))\n", + "Rank 000: Loading population var (145/175)\n", + "Rank 000: Loaded population var ((168,))\n", + "Rank 000: Loading primary_sampling_further_details var (146/175)\n", + "Rank 000: Loaded primary_sampling_further_details var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Loaded primary_sampling_instrument_documented_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Loaded primary_sampling_instrument_manual_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Loaded primary_sampling_instrument_name var ((168,))\n", + "Rank 000: Loading primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Loaded primary_sampling_instrument_reported_flow_rate var ((168,))\n", + "Rank 000: Loading primary_sampling_process_details var (151/175)\n", + "Rank 000: Loaded primary_sampling_process_details var ((168,))\n", + "Rank 000: Loading primary_sampling_type var (152/175)\n", + "Rank 000: Loaded primary_sampling_type var ((168,))\n", + "Rank 000: Loading principal_investigator_email_address var (153/175)\n", + "Rank 000: Loaded principal_investigator_email_address var ((168,))\n", + "Rank 000: Loading principal_investigator_institution var (154/175)\n", + "Rank 000: Loaded principal_investigator_institution var ((168,))\n", + "Rank 000: Loading principal_investigator_name var (155/175)\n", + "Rank 000: Loaded principal_investigator_name var ((168,))\n", + "Rank 000: Loading process_warnings var (156/175)\n", + "Rank 000: Loaded process_warnings var ((168,))\n", + "Rank 000: Loading projection var (157/175)\n", + "Rank 000: Loaded projection var ((168,))\n", + "Rank 000: Loading reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Loaded reported_uncertainty_per_measurement var ((168, 720))\n", + "Rank 000: Loading representative_radius var (159/175)\n", + "Rank 000: Loaded representative_radius var ((168,))\n", + "Rank 000: Loading sample_preparation_further_details var (160/175)\n", + "Rank 000: Loaded sample_preparation_further_details var ((168,))\n", + "Rank 000: Loading sample_preparation_process_details var (161/175)\n", + "Rank 000: Loaded sample_preparation_process_details var ((168,))\n", + "Rank 000: Loading sample_preparation_techniques var (162/175)\n", + "Rank 000: Loaded sample_preparation_techniques var ((168,))\n", + "Rank 000: Loading sample_preparation_types var (163/175)\n", + "Rank 000: Loaded sample_preparation_types var ((168,))\n", + "Rank 000: Loading sampling_height var (164/175)\n", + "Rank 000: Loaded sampling_height var ((168,))\n", + "Rank 000: Loading sconco3 var (165/175)\n", + "Rank 000: Loaded sconco3 var ((168, 720))\n", + "Rank 000: Loading season_code var (166/175)\n", + "Rank 000: Loaded season_code var ((168, 720))\n", + "Rank 000: Loading station_classification var (167/175)\n", + "Rank 000: Loaded station_classification var ((168,))\n", + "Rank 000: Loading station_name var (168/175)\n", + "Rank 000: Loaded station_name var ((168,))\n", + "Rank 000: Loading station_reference var (169/175)\n", + "Rank 000: Loaded station_reference var ((168,))\n", + "Rank 000: Loading station_timezone var (170/175)\n", + "Rank 000: Loaded station_timezone var ((168,))\n", + "Rank 000: Loading street_type var (171/175)\n", + "Rank 000: Loaded street_type var ((168,))\n", + "Rank 000: Loading street_width var (172/175)\n", + "Rank 000: Loaded street_width var ((168,))\n", + "Rank 000: Loading terrain var (173/175)\n", + "Rank 000: Loaded terrain var ((168,))\n", + "Rank 000: Loading vertical_datum var (174/175)\n", + "Rank 000: Loaded vertical_datum var ((168,))\n", + "Rank 000: Loading weekday_weekend_code var (175/175)\n", + "Rank 000: Loaded weekday_weekend_code var ((168, 720))\n" + ] + } + ], + "source": [ + "nessy_1.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating providentia_obs_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing ASTER_v3_altitude var (1/175)\n", + "Rank 000: Var ASTER_v3_altitude created (1/175)\n", + "Rank 000: Var ASTER_v3_altitude data (1/175)\n", + "Rank 000: Var ASTER_v3_altitude completed (1/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_BC_emissions var (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions created (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions data (2/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_BC_emissions completed (2/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_CO_emissions var (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions created (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions data (3/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_CO_emissions completed (3/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NH3_emissions var (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions created (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions data (4/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NH3_emissions completed (4/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NMVOC_emissions var (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions created (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions data (5/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NMVOC_emissions completed (5/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_NOx_emissions var (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions created (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions data (6/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_NOx_emissions completed (6/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_OC_emissions var (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions created (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions data (7/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_OC_emissions completed (7/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_PM10_emissions var (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions created (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions data (8/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_PM10_emissions completed (8/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_SO2_emissions var (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions created (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions data (9/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_SO2_emissions completed (9/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions var (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions created (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions data (10/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_biogenic_PM2.5_emissions completed (10/175)\n", + "Rank 000: Writing EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions var (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions created (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions data (11/175)\n", + "Rank 000: Var EDGAR_v4.3.2_annual_average_fossilfuel_PM2.5_emissions completed (11/175)\n", + "Rank 000: Writing ESDAC_Iwahashi_landform_classification var (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification created (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification data (12/175)\n", + "Rank 000: Var ESDAC_Iwahashi_landform_classification completed (12/175)\n", + "Rank 000: Writing ESDAC_Meybeck_landform_classification var (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification created (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification data (13/175)\n", + "Rank 000: Var ESDAC_Meybeck_landform_classification completed (13/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_25km var (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km created (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km data (14/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_25km completed (14/175)\n", + "Rank 000: Writing ESDAC_modal_Iwahashi_landform_classification_5km var (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km created (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km data (15/175)\n", + "Rank 000: Var ESDAC_modal_Iwahashi_landform_classification_5km completed (15/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_25km var (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km created (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km data (16/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_25km completed (16/175)\n", + "Rank 000: Writing ESDAC_modal_Meybeck_landform_classification_5km var (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km created (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km data (17/175)\n", + "Rank 000: Var ESDAC_modal_Meybeck_landform_classification_5km completed (17/175)\n", + "Rank 000: Writing ETOPO1_altitude var (18/175)\n", + "Rank 000: Var ETOPO1_altitude created (18/175)\n", + "Rank 000: Var ETOPO1_altitude data (18/175)\n", + "Rank 000: Var ETOPO1_altitude completed (18/175)\n", + "Rank 000: Writing ETOPO1_max_altitude_difference_5km var (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km created (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km data (19/175)\n", + "Rank 000: Var ETOPO1_max_altitude_difference_5km completed (19/175)\n", + "Rank 000: Writing GHOST_version var (20/175)\n", + "Rank 000: Var GHOST_version created (20/175)\n", + "Rank 000: Var GHOST_version data (20/175)\n", + "Rank 000: Var GHOST_version completed (20/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_25km var (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km created (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km data (21/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_25km completed (21/175)\n", + "Rank 000: Writing GHSL_average_built_up_area_density_5km var (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km created (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km data (22/175)\n", + "Rank 000: Var GHSL_average_built_up_area_density_5km completed (22/175)\n", + "Rank 000: Writing GHSL_average_population_density_25km var (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km created (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km data (23/175)\n", + "Rank 000: Var GHSL_average_population_density_25km completed (23/175)\n", + "Rank 000: Writing GHSL_average_population_density_5km var (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km created (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km data (24/175)\n", + "Rank 000: Var GHSL_average_population_density_5km completed (24/175)\n", + "Rank 000: Writing GHSL_built_up_area_density var (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density created (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density data (25/175)\n", + "Rank 000: Var GHSL_built_up_area_density completed (25/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_25km var (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km created (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km data (26/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_25km completed (26/175)\n", + "Rank 000: Writing GHSL_max_built_up_area_density_5km var (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km created (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km data (27/175)\n", + "Rank 000: Var GHSL_max_built_up_area_density_5km completed (27/175)\n", + "Rank 000: Writing GHSL_max_population_density_25km var (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km created (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km data (28/175)\n", + "Rank 000: Var GHSL_max_population_density_25km completed (28/175)\n", + "Rank 000: Writing GHSL_max_population_density_5km var (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km created (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km data (29/175)\n", + "Rank 000: Var GHSL_max_population_density_5km completed (29/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_25km var (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km created (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km data (30/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_25km completed (30/175)\n", + "Rank 000: Writing GHSL_modal_settlement_model_classification_5km var (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km created (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km data (31/175)\n", + "Rank 000: Var GHSL_modal_settlement_model_classification_5km completed (31/175)\n", + "Rank 000: Writing GHSL_population_density var (32/175)\n", + "Rank 000: Var GHSL_population_density created (32/175)\n", + "Rank 000: Var GHSL_population_density data (32/175)\n", + "Rank 000: Var GHSL_population_density completed (32/175)\n", + "Rank 000: Writing GHSL_settlement_model_classification var (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification created (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification data (33/175)\n", + "Rank 000: Var GHSL_settlement_model_classification completed (33/175)\n", + "Rank 000: Writing GPW_average_population_density_25km var (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km created (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km data (34/175)\n", + "Rank 000: Var GPW_average_population_density_25km completed (34/175)\n", + "Rank 000: Writing GPW_average_population_density_5km var (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km created (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km data (35/175)\n", + "Rank 000: Var GPW_average_population_density_5km completed (35/175)\n", + "Rank 000: Writing GPW_max_population_density_25km var (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km created (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km data (36/175)\n", + "Rank 000: Var GPW_max_population_density_25km completed (36/175)\n", + "Rank 000: Writing GPW_max_population_density_5km var (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km created (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km data (37/175)\n", + "Rank 000: Var GPW_max_population_density_5km completed (37/175)\n", + "Rank 000: Writing GPW_population_density var (38/175)\n", + "Rank 000: Var GPW_population_density created (38/175)\n", + "Rank 000: Var GPW_population_density data (38/175)\n", + "Rank 000: Var GPW_population_density completed (38/175)\n", + "Rank 000: Writing GSFC_coastline_proximity var (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity created (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity data (39/175)\n", + "Rank 000: Var GSFC_coastline_proximity completed (39/175)\n", + "Rank 000: Writing Joly-Peuch_classification_code var (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code created (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code data (40/175)\n", + "Rank 000: Var Joly-Peuch_classification_code completed (40/175)\n", + "Rank 000: Writing Koppen-Geiger_classification var (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification created (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification data (41/175)\n", + "Rank 000: Var Koppen-Geiger_classification completed (41/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_25km var (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km created (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km data (42/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_25km completed (42/175)\n", + "Rank 000: Writing Koppen-Geiger_modal_classification_5km var (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km created (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km data (43/175)\n", + "Rank 000: Var Koppen-Geiger_modal_classification_5km completed (43/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_IGBP_land_use var (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use created (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use data (44/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_IGBP_land_use completed (44/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_LAI var (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI created (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI data (45/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_LAI completed (45/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_UMD_land_use var (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use created (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use data (46/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_UMD_land_use completed (46/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_25km var (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km created (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km data (47/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_25km completed (47/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_IGBP_land_use_5km var (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km created (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km data (48/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_IGBP_land_use_5km completed (48/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_25km var (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km created (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km data (49/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_25km completed (49/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_LAI_5km var (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km created (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km data (50/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_LAI_5km completed (50/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_25km var (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km created (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km data (51/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_25km completed (51/175)\n", + "Rank 000: Writing MODIS_MCD12C1_v6_modal_UMD_land_use_5km var (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km created (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km data (52/175)\n", + "Rank 000: Var MODIS_MCD12C1_v6_modal_UMD_land_use_5km completed (52/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km var (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km created (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km data (53/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_25km completed (53/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km var (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km created (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km data (54/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_average_nighttime_stable_lights_5km completed (54/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km var (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km created (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km data (55/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_25km completed (55/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km var (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km created (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km data (56/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_max_nighttime_stable_lights_5km completed (56/175)\n", + "Rank 000: Writing NOAA-DMSP-OLS_v4_nighttime_stable_lights var (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights created (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights data (57/175)\n", + "Rank 000: Var NOAA-DMSP-OLS_v4_nighttime_stable_lights completed (57/175)\n", + "Rank 000: Writing OMI_level3_column_annual_average_NO2 var (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 created (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 data (58/175)\n", + "Rank 000: Var OMI_level3_column_annual_average_NO2 completed (58/175)\n", + "Rank 000: Writing OMI_level3_column_cloud_screened_annual_average_NO2 var (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 created (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 data (59/175)\n", + "Rank 000: Var OMI_level3_column_cloud_screened_annual_average_NO2 completed (59/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_annual_average_NO2 var (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 created (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 data (60/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_annual_average_NO2 completed (60/175)\n", + "Rank 000: Writing OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 var (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 created (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 data (61/175)\n", + "Rank 000: Var OMI_level3_tropospheric_column_cloud_screened_annual_average_NO2 completed (61/175)\n", + "Rank 000: Writing UMBC_anthrome_classification var (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification created (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification data (62/175)\n", + "Rank 000: Var UMBC_anthrome_classification completed (62/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_25km var (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km created (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km data (63/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_25km completed (63/175)\n", + "Rank 000: Writing UMBC_modal_anthrome_classification_5km var (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km created (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km data (64/175)\n", + "Rank 000: Var UMBC_modal_anthrome_classification_5km completed (64/175)\n", + "Rank 000: Writing WMO_region var (65/175)\n", + "Rank 000: Var WMO_region created (65/175)\n", + "Rank 000: Var WMO_region data (65/175)\n", + "Rank 000: Var WMO_region completed (65/175)\n", + "Rank 000: Writing WWF_TEOW_biogeographical_realm var (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm created (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm data (66/175)\n", + "Rank 000: Var WWF_TEOW_biogeographical_realm completed (66/175)\n", + "Rank 000: Writing WWF_TEOW_biome var (67/175)\n", + "Rank 000: Var WWF_TEOW_biome created (67/175)\n", + "Rank 000: Var WWF_TEOW_biome data (67/175)\n", + "Rank 000: Var WWF_TEOW_biome completed (67/175)\n", + "Rank 000: Writing WWF_TEOW_terrestrial_ecoregion var (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion created (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion data (68/175)\n", + "Rank 000: Var WWF_TEOW_terrestrial_ecoregion completed (68/175)\n", + "Rank 000: Writing administrative_country_division_1 var (69/175)\n", + "Rank 000: Var administrative_country_division_1 created (69/175)\n", + "Rank 000: Var administrative_country_division_1 data (69/175)\n", + "Rank 000: Var administrative_country_division_1 completed (69/175)\n", + "Rank 000: Writing administrative_country_division_2 var (70/175)\n", + "Rank 000: Var administrative_country_division_2 created (70/175)\n", + "Rank 000: Var administrative_country_division_2 data (70/175)\n", + "Rank 000: Var administrative_country_division_2 completed (70/175)\n", + "Rank 000: Writing altitude var (71/175)\n", + "Rank 000: Var altitude created (71/175)\n", + "Rank 000: Var altitude data (71/175)\n", + "Rank 000: Var altitude completed (71/175)\n", + "Rank 000: Writing annual_native_max_gap_percent var (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent created (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent data (72/175)\n", + "Rank 000: Var annual_native_max_gap_percent completed (72/175)\n", + "Rank 000: Writing annual_native_representativity_percent var (73/175)\n", + "Rank 000: Var annual_native_representativity_percent created (73/175)\n", + "Rank 000: Var annual_native_representativity_percent data (73/175)\n", + "Rank 000: Var annual_native_representativity_percent completed (73/175)\n", + "Rank 000: Writing area_classification var (74/175)\n", + "Rank 000: Var area_classification created (74/175)\n", + "Rank 000: Var area_classification data (74/175)\n", + "Rank 000: Var area_classification completed (74/175)\n", + "Rank 000: Writing associated_networks var (75/175)\n", + "Rank 000: Var associated_networks created (75/175)\n", + "Rank 000: Var associated_networks data (75/175)\n", + "Rank 000: Var associated_networks completed (75/175)\n", + "Rank 000: Writing city var (76/175)\n", + "Rank 000: Var city created (76/175)\n", + "Rank 000: Var city data (76/175)\n", + "Rank 000: Var city completed (76/175)\n", + "Rank 000: Writing climatology var (77/175)\n", + "Rank 000: Var climatology created (77/175)\n", + "Rank 000: Var climatology data (77/175)\n", + "Rank 000: Var climatology completed (77/175)\n", + "Rank 000: Writing contact_email_address var (78/175)\n", + "Rank 000: Var contact_email_address created (78/175)\n", + "Rank 000: Var contact_email_address data (78/175)\n", + "Rank 000: Var contact_email_address completed (78/175)\n", + "Rank 000: Writing contact_institution var (79/175)\n", + "Rank 000: Var contact_institution created (79/175)\n", + "Rank 000: Var contact_institution data (79/175)\n", + "Rank 000: Var contact_institution completed (79/175)\n", + "Rank 000: Writing contact_name var (80/175)\n", + "Rank 000: Var contact_name created (80/175)\n", + "Rank 000: Var contact_name data (80/175)\n", + "Rank 000: Var contact_name completed (80/175)\n", + "Rank 000: Writing country var (81/175)\n", + "Rank 000: Var country created (81/175)\n", + "Rank 000: Var country data (81/175)\n", + "Rank 000: Var country completed (81/175)\n", + "Rank 000: Writing daily_native_max_gap_percent var (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent created (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent data (82/175)\n", + "Rank 000: Var daily_native_max_gap_percent completed (82/175)\n", + "Rank 000: Writing daily_native_representativity_percent var (83/175)\n", + "Rank 000: Var daily_native_representativity_percent created (83/175)\n", + "Rank 000: Var daily_native_representativity_percent data (83/175)\n", + "Rank 000: Var daily_native_representativity_percent completed (83/175)\n", + "Rank 000: Writing daily_passing_vehicles var (84/175)\n", + "Rank 000: Var daily_passing_vehicles created (84/175)\n", + "Rank 000: Var daily_passing_vehicles data (84/175)\n", + "Rank 000: Var daily_passing_vehicles completed (84/175)\n", + "Rank 000: Writing data_level var (85/175)\n", + "Rank 000: Var data_level created (85/175)\n", + "Rank 000: Var data_level data (85/175)\n", + "Rank 000: Var data_level completed (85/175)\n", + "Rank 000: Writing data_licence var (86/175)\n", + "Rank 000: Var data_licence created (86/175)\n", + "Rank 000: Var data_licence data (86/175)\n", + "Rank 000: Var data_licence completed (86/175)\n", + "Rank 000: Writing day_night_code var (87/175)\n", + "Rank 000: Var day_night_code created (87/175)\n", + "Rank 000: Var day_night_code data (87/175)\n", + "Rank 000: Var day_night_code completed (87/175)\n", + "Rank 000: Writing daytime_traffic_speed var (88/175)\n", + "Rank 000: Var daytime_traffic_speed created (88/175)\n", + "Rank 000: Var daytime_traffic_speed data (88/175)\n", + "Rank 000: Var daytime_traffic_speed completed (88/175)\n", + "Rank 000: Writing derived_uncertainty_per_measurement var (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement created (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement data (89/175)\n", + "Rank 000: Var derived_uncertainty_per_measurement completed (89/175)\n", + "Rank 000: Writing distance_to_building var (90/175)\n", + "Rank 000: Var distance_to_building created (90/175)\n", + "Rank 000: Var distance_to_building data (90/175)\n", + "Rank 000: Var distance_to_building completed (90/175)\n", + "Rank 000: Writing distance_to_junction var (91/175)\n", + "Rank 000: Var distance_to_junction created (91/175)\n", + "Rank 000: Var distance_to_junction data (91/175)\n", + "Rank 000: Var distance_to_junction completed (91/175)\n", + "Rank 000: Writing distance_to_kerb var (92/175)\n", + "Rank 000: Var distance_to_kerb created (92/175)\n", + "Rank 000: Var distance_to_kerb data (92/175)\n", + "Rank 000: Var distance_to_kerb completed (92/175)\n", + "Rank 000: Writing distance_to_source var (93/175)\n", + "Rank 000: Var distance_to_source created (93/175)\n", + "Rank 000: Var distance_to_source data (93/175)\n", + "Rank 000: Var distance_to_source completed (93/175)\n", + "Rank 000: Writing ellipsoid var (94/175)\n", + "Rank 000: Var ellipsoid created (94/175)\n", + "Rank 000: Var ellipsoid data (94/175)\n", + "Rank 000: Var ellipsoid completed (94/175)\n", + "Rank 000: Writing horizontal_datum var (95/175)\n", + "Rank 000: Var horizontal_datum created (95/175)\n", + "Rank 000: Var horizontal_datum data (95/175)\n", + "Rank 000: Var horizontal_datum completed (95/175)\n", + "Rank 000: Writing hourly_native_max_gap_percent var (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent created (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent data (96/175)\n", + "Rank 000: Var hourly_native_max_gap_percent completed (96/175)\n", + "Rank 000: Writing hourly_native_representativity_percent var (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent created (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent data (97/175)\n", + "Rank 000: Var hourly_native_representativity_percent completed (97/175)\n", + "Rank 000: Writing land_use var (98/175)\n", + "Rank 000: Var land_use created (98/175)\n", + "Rank 000: Var land_use data (98/175)\n", + "Rank 000: Var land_use completed (98/175)\n", + "Rank 000: Writing local_time var (99/175)\n", + "Rank 000: Var local_time created (99/175)\n", + "Rank 000: Var local_time data (99/175)\n", + "Rank 000: Var local_time completed (99/175)\n", + "Rank 000: Writing main_emission_source var (100/175)\n", + "Rank 000: Var main_emission_source created (100/175)\n", + "Rank 000: Var main_emission_source data (100/175)\n", + "Rank 000: Var main_emission_source completed (100/175)\n", + "Rank 000: Writing mean_solar_time var (101/175)\n", + "Rank 000: Var mean_solar_time created (101/175)\n", + "Rank 000: Var mean_solar_time data (101/175)\n", + "Rank 000: Var mean_solar_time completed (101/175)\n", + "Rank 000: Writing measurement_altitude var (102/175)\n", + "Rank 000: Var measurement_altitude created (102/175)\n", + "Rank 000: Var measurement_altitude data (102/175)\n", + "Rank 000: Var measurement_altitude completed (102/175)\n", + "Rank 000: Writing measurement_methodology var (103/175)\n", + "Rank 000: Var measurement_methodology created (103/175)\n", + "Rank 000: Var measurement_methodology data (103/175)\n", + "Rank 000: Var measurement_methodology completed (103/175)\n", + "Rank 000: Writing measurement_scale var (104/175)\n", + "Rank 000: Var measurement_scale created (104/175)\n", + "Rank 000: Var measurement_scale data (104/175)\n", + "Rank 000: Var measurement_scale completed (104/175)\n", + "Rank 000: Writing measuring_instrument_calibration_scale var (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale created (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale data (105/175)\n", + "Rank 000: Var measuring_instrument_calibration_scale completed (105/175)\n", + "Rank 000: Writing measuring_instrument_documented_absorption_cross_section var (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section created (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section data (106/175)\n", + "Rank 000: Var measuring_instrument_documented_absorption_cross_section completed (106/175)\n", + "Rank 000: Writing measuring_instrument_documented_accuracy var (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy created (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy data (107/175)\n", + "Rank 000: Var measuring_instrument_documented_accuracy completed (107/175)\n", + "Rank 000: Writing measuring_instrument_documented_flow_rate var (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate created (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate data (108/175)\n", + "Rank 000: Var measuring_instrument_documented_flow_rate completed (108/175)\n", + "Rank 000: Writing measuring_instrument_documented_lower_limit_of_detection var (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection created (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection data (109/175)\n", + "Rank 000: Var measuring_instrument_documented_lower_limit_of_detection completed (109/175)\n", + "Rank 000: Writing measuring_instrument_documented_measurement_resolution var (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution created (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution data (110/175)\n", + "Rank 000: Var measuring_instrument_documented_measurement_resolution completed (110/175)\n", + "Rank 000: Writing measuring_instrument_documented_precision var (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision created (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision data (111/175)\n", + "Rank 000: Var measuring_instrument_documented_precision completed (111/175)\n", + "Rank 000: Writing measuring_instrument_documented_span_drift var (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift created (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift data (112/175)\n", + "Rank 000: Var measuring_instrument_documented_span_drift completed (112/175)\n", + "Rank 000: Writing measuring_instrument_documented_uncertainty var (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty created (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty data (113/175)\n", + "Rank 000: Var measuring_instrument_documented_uncertainty completed (113/175)\n", + "Rank 000: Writing measuring_instrument_documented_upper_limit_of_detection var (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection created (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection data (114/175)\n", + "Rank 000: Var measuring_instrument_documented_upper_limit_of_detection completed (114/175)\n", + "Rank 000: Writing measuring_instrument_documented_zero_drift var (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift created (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift data (115/175)\n", + "Rank 000: Var measuring_instrument_documented_zero_drift completed (115/175)\n", + "Rank 000: Writing measuring_instrument_documented_zonal_drift var (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift created (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift data (116/175)\n", + "Rank 000: Var measuring_instrument_documented_zonal_drift completed (116/175)\n", + "Rank 000: Writing measuring_instrument_further_details var (117/175)\n", + "Rank 000: Var measuring_instrument_further_details created (117/175)\n", + "Rank 000: Var measuring_instrument_further_details data (117/175)\n", + "Rank 000: Var measuring_instrument_further_details completed (117/175)\n", + "Rank 000: Writing measuring_instrument_inlet_information var (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information created (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information data (118/175)\n", + "Rank 000: Var measuring_instrument_inlet_information completed (118/175)\n", + "Rank 000: Writing measuring_instrument_manual_name var (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name created (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name data (119/175)\n", + "Rank 000: Var measuring_instrument_manual_name completed (119/175)\n", + "Rank 000: Writing measuring_instrument_name var (120/175)\n", + "Rank 000: Var measuring_instrument_name created (120/175)\n", + "Rank 000: Var measuring_instrument_name data (120/175)\n", + "Rank 000: Var measuring_instrument_name completed (120/175)\n", + "Rank 000: Writing measuring_instrument_process_details var (121/175)\n", + "Rank 000: Var measuring_instrument_process_details created (121/175)\n", + "Rank 000: Var measuring_instrument_process_details data (121/175)\n", + "Rank 000: Var measuring_instrument_process_details completed (121/175)\n", + "Rank 000: Writing measuring_instrument_reported_absorption_cross_section var (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section created (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section data (122/175)\n", + "Rank 000: Var measuring_instrument_reported_absorption_cross_section completed (122/175)\n", + "Rank 000: Writing measuring_instrument_reported_accuracy var (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy created (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy data (123/175)\n", + "Rank 000: Var measuring_instrument_reported_accuracy completed (123/175)\n", + "Rank 000: Writing measuring_instrument_reported_flow_rate var (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate created (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate data (124/175)\n", + "Rank 000: Var measuring_instrument_reported_flow_rate completed (124/175)\n", + "Rank 000: Writing measuring_instrument_reported_lower_limit_of_detection var (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection created (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection data (125/175)\n", + "Rank 000: Var measuring_instrument_reported_lower_limit_of_detection completed (125/175)\n", + "Rank 000: Writing measuring_instrument_reported_measurement_resolution var (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution created (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution data (126/175)\n", + "Rank 000: Var measuring_instrument_reported_measurement_resolution completed (126/175)\n", + "Rank 000: Writing measuring_instrument_reported_precision var (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision created (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision data (127/175)\n", + "Rank 000: Var measuring_instrument_reported_precision completed (127/175)\n", + "Rank 000: Writing measuring_instrument_reported_span_drift var (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift created (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift data (128/175)\n", + "Rank 000: Var measuring_instrument_reported_span_drift completed (128/175)\n", + "Rank 000: Writing measuring_instrument_reported_uncertainty var (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty created (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty data (129/175)\n", + "Rank 000: Var measuring_instrument_reported_uncertainty completed (129/175)\n", + "Rank 000: Writing measuring_instrument_reported_units var (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units created (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units data (130/175)\n", + "Rank 000: Var measuring_instrument_reported_units completed (130/175)\n", + "Rank 000: Writing measuring_instrument_reported_upper_limit_of_detection var (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection created (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection data (131/175)\n", + "Rank 000: Var measuring_instrument_reported_upper_limit_of_detection completed (131/175)\n", + "Rank 000: Writing measuring_instrument_reported_zero_drift var (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift created (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift data (132/175)\n", + "Rank 000: Var measuring_instrument_reported_zero_drift completed (132/175)\n", + "Rank 000: Writing measuring_instrument_reported_zonal_drift var (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift created (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift data (133/175)\n", + "Rank 000: Var measuring_instrument_reported_zonal_drift completed (133/175)\n", + "Rank 000: Writing measuring_instrument_sampling_type var (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type created (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type data (134/175)\n", + "Rank 000: Var measuring_instrument_sampling_type completed (134/175)\n", + "Rank 000: Writing monthly_native_max_gap_percent var (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent created (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent data (135/175)\n", + "Rank 000: Var monthly_native_max_gap_percent completed (135/175)\n", + "Rank 000: Writing monthly_native_representativity_percent var (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent created (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent data (136/175)\n", + "Rank 000: Var monthly_native_representativity_percent completed (136/175)\n", + "Rank 000: Writing network var (137/175)\n", + "Rank 000: Var network created (137/175)\n", + "Rank 000: Var network data (137/175)\n", + "Rank 000: Var network completed (137/175)\n", + "Rank 000: Writing network_maintenance_details var (138/175)\n", + "Rank 000: Var network_maintenance_details created (138/175)\n", + "Rank 000: Var network_maintenance_details data (138/175)\n", + "Rank 000: Var network_maintenance_details completed (138/175)\n", + "Rank 000: Writing network_miscellaneous_details var (139/175)\n", + "Rank 000: Var network_miscellaneous_details created (139/175)\n", + "Rank 000: Var network_miscellaneous_details data (139/175)\n", + "Rank 000: Var network_miscellaneous_details completed (139/175)\n", + "Rank 000: Writing network_provided_volume_standard_pressure var (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure created (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure data (140/175)\n", + "Rank 000: Var network_provided_volume_standard_pressure completed (140/175)\n", + "Rank 000: Writing network_provided_volume_standard_temperature var (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature created (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature data (141/175)\n", + "Rank 000: Var network_provided_volume_standard_temperature completed (141/175)\n", + "Rank 000: Writing network_qa_details var (142/175)\n", + "Rank 000: Var network_qa_details created (142/175)\n", + "Rank 000: Var network_qa_details data (142/175)\n", + "Rank 000: Var network_qa_details completed (142/175)\n", + "Rank 000: Writing network_sampling_details var (143/175)\n", + "Rank 000: Var network_sampling_details created (143/175)\n", + "Rank 000: Var network_sampling_details data (143/175)\n", + "Rank 000: Var network_sampling_details completed (143/175)\n", + "Rank 000: Writing network_uncertainty_details var (144/175)\n", + "Rank 000: Var network_uncertainty_details created (144/175)\n", + "Rank 000: Var network_uncertainty_details data (144/175)\n", + "Rank 000: Var network_uncertainty_details completed (144/175)\n", + "Rank 000: Writing population var (145/175)\n", + "Rank 000: Var population created (145/175)\n", + "Rank 000: Var population data (145/175)\n", + "Rank 000: Var population completed (145/175)\n", + "Rank 000: Writing primary_sampling_further_details var (146/175)\n", + "Rank 000: Var primary_sampling_further_details created (146/175)\n", + "Rank 000: Var primary_sampling_further_details data (146/175)\n", + "Rank 000: Var primary_sampling_further_details completed (146/175)\n", + "Rank 000: Writing primary_sampling_instrument_documented_flow_rate var (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate created (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate data (147/175)\n", + "Rank 000: Var primary_sampling_instrument_documented_flow_rate completed (147/175)\n", + "Rank 000: Writing primary_sampling_instrument_manual_name var (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name created (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name data (148/175)\n", + "Rank 000: Var primary_sampling_instrument_manual_name completed (148/175)\n", + "Rank 000: Writing primary_sampling_instrument_name var (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name created (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name data (149/175)\n", + "Rank 000: Var primary_sampling_instrument_name completed (149/175)\n", + "Rank 000: Writing primary_sampling_instrument_reported_flow_rate var (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate created (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate data (150/175)\n", + "Rank 000: Var primary_sampling_instrument_reported_flow_rate completed (150/175)\n", + "Rank 000: Writing primary_sampling_process_details var (151/175)\n", + "Rank 000: Var primary_sampling_process_details created (151/175)\n", + "Rank 000: Var primary_sampling_process_details data (151/175)\n", + "Rank 000: Var primary_sampling_process_details completed (151/175)\n", + "Rank 000: Writing primary_sampling_type var (152/175)\n", + "Rank 000: Var primary_sampling_type created (152/175)\n", + "Rank 000: Var primary_sampling_type data (152/175)\n", + "Rank 000: Var primary_sampling_type completed (152/175)\n", + "Rank 000: Writing principal_investigator_email_address var (153/175)\n", + "Rank 000: Var principal_investigator_email_address created (153/175)\n", + "Rank 000: Var principal_investigator_email_address data (153/175)\n", + "Rank 000: Var principal_investigator_email_address completed (153/175)\n", + "Rank 000: Writing principal_investigator_institution var (154/175)\n", + "Rank 000: Var principal_investigator_institution created (154/175)\n", + "Rank 000: Var principal_investigator_institution data (154/175)\n", + "Rank 000: Var principal_investigator_institution completed (154/175)\n", + "Rank 000: Writing principal_investigator_name var (155/175)\n", + "Rank 000: Var principal_investigator_name created (155/175)\n", + "Rank 000: Var principal_investigator_name data (155/175)\n", + "Rank 000: Var principal_investigator_name completed (155/175)\n", + "Rank 000: Writing process_warnings var (156/175)\n", + "Rank 000: Var process_warnings created (156/175)\n", + "Rank 000: Var process_warnings data (156/175)\n", + "Rank 000: Var process_warnings completed (156/175)\n", + "Rank 000: Writing projection var (157/175)\n", + "Rank 000: Var projection created (157/175)\n", + "Rank 000: Var projection data (157/175)\n", + "Rank 000: Var projection completed (157/175)\n", + "Rank 000: Writing reported_uncertainty_per_measurement var (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement created (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement data (158/175)\n", + "Rank 000: Var reported_uncertainty_per_measurement completed (158/175)\n", + "Rank 000: Writing representative_radius var (159/175)\n", + "Rank 000: Var representative_radius created (159/175)\n", + "Rank 000: Var representative_radius data (159/175)\n", + "Rank 000: Var representative_radius completed (159/175)\n", + "Rank 000: Writing sample_preparation_further_details var (160/175)\n", + "Rank 000: Var sample_preparation_further_details created (160/175)\n", + "Rank 000: Var sample_preparation_further_details data (160/175)\n", + "Rank 000: Var sample_preparation_further_details completed (160/175)\n", + "Rank 000: Writing sample_preparation_process_details var (161/175)\n", + "Rank 000: Var sample_preparation_process_details created (161/175)\n", + "Rank 000: Var sample_preparation_process_details data (161/175)\n", + "Rank 000: Var sample_preparation_process_details completed (161/175)\n", + "Rank 000: Writing sample_preparation_techniques var (162/175)\n", + "Rank 000: Var sample_preparation_techniques created (162/175)\n", + "Rank 000: Var sample_preparation_techniques data (162/175)\n", + "Rank 000: Var sample_preparation_techniques completed (162/175)\n", + "Rank 000: Writing sample_preparation_types var (163/175)\n", + "Rank 000: Var sample_preparation_types created (163/175)\n", + "Rank 000: Var sample_preparation_types data (163/175)\n", + "Rank 000: Var sample_preparation_types completed (163/175)\n", + "Rank 000: Writing sampling_height var (164/175)\n", + "Rank 000: Var sampling_height created (164/175)\n", + "Rank 000: Var sampling_height data (164/175)\n", + "Rank 000: Var sampling_height completed (164/175)\n", + "Rank 000: Writing sconco3 var (165/175)\n", + "Rank 000: Var sconco3 created (165/175)\n", + "Rank 000: Var sconco3 data (165/175)\n", + "Rank 000: Var sconco3 completed (165/175)\n", + "Rank 000: Writing season_code var (166/175)\n", + "Rank 000: Var season_code created (166/175)\n", + "Rank 000: Var season_code data (166/175)\n", + "Rank 000: Var season_code completed (166/175)\n", + "Rank 000: Writing station_classification var (167/175)\n", + "Rank 000: Var station_classification created (167/175)\n", + "Rank 000: Var station_classification data (167/175)\n", + "Rank 000: Var station_classification completed (167/175)\n", + "Rank 000: Writing station_name var (168/175)\n", + "Rank 000: Var station_name created (168/175)\n", + "Rank 000: Var station_name data (168/175)\n", + "Rank 000: Var station_name completed (168/175)\n", + "Rank 000: Writing station_reference var (169/175)\n", + "Rank 000: Var station_reference created (169/175)\n", + "Rank 000: Var station_reference data (169/175)\n", + "Rank 000: Var station_reference completed (169/175)\n", + "Rank 000: Writing station_timezone var (170/175)\n", + "Rank 000: Var station_timezone created (170/175)\n", + "Rank 000: Var station_timezone data (170/175)\n", + "Rank 000: Var station_timezone completed (170/175)\n", + "Rank 000: Writing street_type var (171/175)\n", + "Rank 000: Var street_type created (171/175)\n", + "Rank 000: Var street_type data (171/175)\n", + "Rank 000: Var street_type completed (171/175)\n", + "Rank 000: Writing street_width var (172/175)\n", + "Rank 000: Var street_width created (172/175)\n", + "Rank 000: Var street_width data (172/175)\n", + "Rank 000: Var street_width completed (172/175)\n", + "Rank 000: Writing terrain var (173/175)\n", + "Rank 000: Var terrain created (173/175)\n", + "Rank 000: Var terrain data (173/175)\n", + "Rank 000: Var terrain completed (173/175)\n", + "Rank 000: Writing vertical_datum var (174/175)\n", + "Rank 000: Var vertical_datum created (174/175)\n", + "Rank 000: Var vertical_datum data (174/175)\n", + "Rank 000: Var vertical_datum completed (174/175)\n", + "Rank 000: Writing weekday_weekend_code var (175/175)\n", + "Rank 000: Var weekday_weekend_code created (175/175)\n", + "Rank 000: Var weekday_weekend_code data (175/175)\n", + "Rank 000: Var weekday_weekend_code completed (175/175)\n" + ] + } + ], + "source": [ + "nessy_1.to_netcdf('providentia_obs_file.nc', info=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Experiments dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "exp_path = '/gpfs/projects/bsc32/AC_cache/recon/exp_interp/1.3.3/cams61_chimere_ph2-eu-000/hourly/sconco3/EBAS/sconco3_201804.nc'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Read" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset>\n",
+       "Dimensions:                 (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n",
+       "Coordinates:\n",
+       "  * time                    (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n",
+       "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n",
+       "Data variables:\n",
+       "    grid_edge_latitude      (grid_edge) float64 29.9 30.1 30.3 ... 29.9 29.9\n",
+       "    grid_edge_longitude     (grid_edge) float64 -25.1 -25.1 ... -24.9 -25.1\n",
+       "    latitude                (station) float64 -64.24 -54.85 ... 21.57 -34.35\n",
+       "    longitude               (station) float64 -56.62 -68.31 ... 103.5 18.49\n",
+       "    model_centre_latitude   (model_latitude, model_longitude) float64 30.0 .....\n",
+       "    model_centre_longitude  (model_latitude, model_longitude) float64 -25.0 ....\n",
+       "    sconco3                 (station, time) float32 ...\n",
+       "    station_reference       (station) object 'AR0001R_UVP' ... 'ZA0001G_UVP'\n",
+       "Attributes:\n",
+       "    title:          Inverse distance weighting (4 neighbours) interpolated ca...\n",
+       "    institution:    Barcelona Supercomputing Center\n",
+       "    source:         Experiment cams61_chimere_ph2\n",
+       "    creator_name:   Dene R. Bowdalo\n",
+       "    creator_email:  dene.bowdalo@bsc.es\n",
+       "    conventions:    CF-1.7\n",
+       "    data_version:   1.0\n",
+       "    history:        Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n",
+       "    NCO:            4.7.2
" + ], + "text/plain": [ + "\n", + "Dimensions: (grid_edge: 1125, station: 175, model_latitude: 211, model_longitude: 351, time: 720)\n", + "Coordinates:\n", + " * time (time) datetime64[ns] 2018-04-01 ... 2018-04-30T2...\n", + "Dimensions without coordinates: grid_edge, station, model_latitude, model_longitude\n", + "Data variables:\n", + " grid_edge_latitude (grid_edge) float64 ...\n", + " grid_edge_longitude (grid_edge) float64 ...\n", + " latitude (station) float64 ...\n", + " longitude (station) float64 ...\n", + " model_centre_latitude (model_latitude, model_longitude) float64 ...\n", + " model_centre_longitude (model_latitude, model_longitude) float64 ...\n", + " sconco3 (station, time) float32 ...\n", + " station_reference (station) object ...\n", + "Attributes:\n", + " title: Inverse distance weighting (4 neighbours) interpolated ca...\n", + " institution: Barcelona Supercomputing Center\n", + " source: Experiment cams61_chimere_ph2\n", + " creator_name: Dene R. Bowdalo\n", + " creator_email: dene.bowdalo@bsc.es\n", + " conventions: CF-1.7\n", + " data_version: 1.0\n", + " history: Thu Feb 11 10:19:01 2021: ncks -O --dfl_lvl 1 /gpfs/proje...\n", + " NCO: 4.7.2" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xr.open_dataset(exp_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2 = open_netcdf(path=exp_path, info=True, parallel_method='X')\n", + "nessy_2" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[datetime.datetime(2018, 4, 1, 0, 0),\n", + " datetime.datetime(2018, 4, 1, 1, 0),\n", + " datetime.datetime(2018, 4, 1, 2, 0),\n", + " datetime.datetime(2018, 4, 1, 3, 0),\n", + " datetime.datetime(2018, 4, 1, 4, 0),\n", + " datetime.datetime(2018, 4, 1, 5, 0),\n", + " datetime.datetime(2018, 4, 1, 6, 0),\n", + " datetime.datetime(2018, 4, 1, 7, 0),\n", + " datetime.datetime(2018, 4, 1, 8, 0),\n", + " datetime.datetime(2018, 4, 1, 9, 0),\n", + " datetime.datetime(2018, 4, 1, 10, 0),\n", + " datetime.datetime(2018, 4, 1, 11, 0),\n", + " datetime.datetime(2018, 4, 1, 12, 0),\n", + " datetime.datetime(2018, 4, 1, 13, 0),\n", + " datetime.datetime(2018, 4, 1, 14, 0),\n", + " datetime.datetime(2018, 4, 1, 15, 0),\n", + " datetime.datetime(2018, 4, 1, 16, 0),\n", + " datetime.datetime(2018, 4, 1, 17, 0),\n", + " datetime.datetime(2018, 4, 1, 18, 0),\n", + " datetime.datetime(2018, 4, 1, 19, 0),\n", + " datetime.datetime(2018, 4, 1, 20, 0),\n", + " datetime.datetime(2018, 4, 1, 21, 0),\n", + " datetime.datetime(2018, 4, 1, 22, 0),\n", + " datetime.datetime(2018, 4, 1, 23, 0),\n", + " datetime.datetime(2018, 4, 2, 0, 0),\n", + " datetime.datetime(2018, 4, 2, 1, 0),\n", + " datetime.datetime(2018, 4, 2, 2, 0),\n", + " datetime.datetime(2018, 4, 2, 3, 0),\n", + " datetime.datetime(2018, 4, 2, 4, 0),\n", + " datetime.datetime(2018, 4, 2, 5, 0),\n", + " datetime.datetime(2018, 4, 2, 6, 0),\n", + " datetime.datetime(2018, 4, 2, 7, 0),\n", + " datetime.datetime(2018, 4, 2, 8, 0),\n", + " datetime.datetime(2018, 4, 2, 9, 0),\n", + " datetime.datetime(2018, 4, 2, 10, 0),\n", + " datetime.datetime(2018, 4, 2, 11, 0),\n", + " datetime.datetime(2018, 4, 2, 12, 0),\n", + " datetime.datetime(2018, 4, 2, 13, 0),\n", + " datetime.datetime(2018, 4, 2, 14, 0),\n", + " datetime.datetime(2018, 4, 2, 15, 0),\n", + " datetime.datetime(2018, 4, 2, 16, 0),\n", + " datetime.datetime(2018, 4, 2, 17, 0),\n", + " datetime.datetime(2018, 4, 2, 18, 0),\n", + " datetime.datetime(2018, 4, 2, 19, 0),\n", + " datetime.datetime(2018, 4, 2, 20, 0),\n", + " datetime.datetime(2018, 4, 2, 21, 0),\n", + " datetime.datetime(2018, 4, 2, 22, 0),\n", + " datetime.datetime(2018, 4, 2, 23, 0),\n", + " datetime.datetime(2018, 4, 3, 0, 0),\n", + " datetime.datetime(2018, 4, 3, 1, 0),\n", + " datetime.datetime(2018, 4, 3, 2, 0),\n", + " datetime.datetime(2018, 4, 3, 3, 0),\n", + " datetime.datetime(2018, 4, 3, 4, 0),\n", + " datetime.datetime(2018, 4, 3, 5, 0),\n", + " datetime.datetime(2018, 4, 3, 6, 0),\n", + " datetime.datetime(2018, 4, 3, 7, 0),\n", + " datetime.datetime(2018, 4, 3, 8, 0),\n", + " datetime.datetime(2018, 4, 3, 9, 0),\n", + " datetime.datetime(2018, 4, 3, 10, 0),\n", + " datetime.datetime(2018, 4, 3, 11, 0),\n", + " datetime.datetime(2018, 4, 3, 12, 0),\n", + " datetime.datetime(2018, 4, 3, 13, 0),\n", + " datetime.datetime(2018, 4, 3, 14, 0),\n", + " datetime.datetime(2018, 4, 3, 15, 0),\n", + " datetime.datetime(2018, 4, 3, 16, 0),\n", + " datetime.datetime(2018, 4, 3, 17, 0),\n", + " datetime.datetime(2018, 4, 3, 18, 0),\n", + " datetime.datetime(2018, 4, 3, 19, 0),\n", + " datetime.datetime(2018, 4, 3, 20, 0),\n", + " datetime.datetime(2018, 4, 3, 21, 0),\n", + " datetime.datetime(2018, 4, 3, 22, 0),\n", + " datetime.datetime(2018, 4, 3, 23, 0),\n", + " datetime.datetime(2018, 4, 4, 0, 0),\n", + " datetime.datetime(2018, 4, 4, 1, 0),\n", + " datetime.datetime(2018, 4, 4, 2, 0),\n", + " datetime.datetime(2018, 4, 4, 3, 0),\n", + " datetime.datetime(2018, 4, 4, 4, 0),\n", + " datetime.datetime(2018, 4, 4, 5, 0),\n", + " datetime.datetime(2018, 4, 4, 6, 0),\n", + " datetime.datetime(2018, 4, 4, 7, 0),\n", + " datetime.datetime(2018, 4, 4, 8, 0),\n", + " datetime.datetime(2018, 4, 4, 9, 0),\n", + " datetime.datetime(2018, 4, 4, 10, 0),\n", + " datetime.datetime(2018, 4, 4, 11, 0),\n", + " datetime.datetime(2018, 4, 4, 12, 0),\n", + " datetime.datetime(2018, 4, 4, 13, 0),\n", + " datetime.datetime(2018, 4, 4, 14, 0),\n", + " datetime.datetime(2018, 4, 4, 15, 0),\n", + " datetime.datetime(2018, 4, 4, 16, 0),\n", + " datetime.datetime(2018, 4, 4, 17, 0),\n", + " datetime.datetime(2018, 4, 4, 18, 0),\n", + " datetime.datetime(2018, 4, 4, 19, 0),\n", + " datetime.datetime(2018, 4, 4, 20, 0),\n", + " datetime.datetime(2018, 4, 4, 21, 0),\n", + " datetime.datetime(2018, 4, 4, 22, 0),\n", + " datetime.datetime(2018, 4, 4, 23, 0),\n", + " datetime.datetime(2018, 4, 5, 0, 0),\n", + " datetime.datetime(2018, 4, 5, 1, 0),\n", + " datetime.datetime(2018, 4, 5, 2, 0),\n", + " datetime.datetime(2018, 4, 5, 3, 0),\n", + " datetime.datetime(2018, 4, 5, 4, 0),\n", + " datetime.datetime(2018, 4, 5, 5, 0),\n", + " datetime.datetime(2018, 4, 5, 6, 0),\n", + " datetime.datetime(2018, 4, 5, 7, 0),\n", + " datetime.datetime(2018, 4, 5, 8, 0),\n", + " datetime.datetime(2018, 4, 5, 9, 0),\n", + " datetime.datetime(2018, 4, 5, 10, 0),\n", + " datetime.datetime(2018, 4, 5, 11, 0),\n", + " datetime.datetime(2018, 4, 5, 12, 0),\n", + " datetime.datetime(2018, 4, 5, 13, 0),\n", + " datetime.datetime(2018, 4, 5, 14, 0),\n", + " datetime.datetime(2018, 4, 5, 15, 0),\n", + " datetime.datetime(2018, 4, 5, 16, 0),\n", + " datetime.datetime(2018, 4, 5, 17, 0),\n", + " datetime.datetime(2018, 4, 5, 18, 0),\n", + " datetime.datetime(2018, 4, 5, 19, 0),\n", + " datetime.datetime(2018, 4, 5, 20, 0),\n", + " datetime.datetime(2018, 4, 5, 21, 0),\n", + " datetime.datetime(2018, 4, 5, 22, 0),\n", + " datetime.datetime(2018, 4, 5, 23, 0),\n", + " datetime.datetime(2018, 4, 6, 0, 0),\n", + " datetime.datetime(2018, 4, 6, 1, 0),\n", + " datetime.datetime(2018, 4, 6, 2, 0),\n", + " datetime.datetime(2018, 4, 6, 3, 0),\n", + " datetime.datetime(2018, 4, 6, 4, 0),\n", + " datetime.datetime(2018, 4, 6, 5, 0),\n", + " datetime.datetime(2018, 4, 6, 6, 0),\n", + " datetime.datetime(2018, 4, 6, 7, 0),\n", + " datetime.datetime(2018, 4, 6, 8, 0),\n", + " datetime.datetime(2018, 4, 6, 9, 0),\n", + " datetime.datetime(2018, 4, 6, 10, 0),\n", + " datetime.datetime(2018, 4, 6, 11, 0),\n", + " datetime.datetime(2018, 4, 6, 12, 0),\n", + " datetime.datetime(2018, 4, 6, 13, 0),\n", + " datetime.datetime(2018, 4, 6, 14, 0),\n", + " datetime.datetime(2018, 4, 6, 15, 0),\n", + " datetime.datetime(2018, 4, 6, 16, 0),\n", + " datetime.datetime(2018, 4, 6, 17, 0),\n", + " datetime.datetime(2018, 4, 6, 18, 0),\n", + " datetime.datetime(2018, 4, 6, 19, 0),\n", + " datetime.datetime(2018, 4, 6, 20, 0),\n", + " datetime.datetime(2018, 4, 6, 21, 0),\n", + " datetime.datetime(2018, 4, 6, 22, 0),\n", + " datetime.datetime(2018, 4, 6, 23, 0),\n", + " datetime.datetime(2018, 4, 7, 0, 0),\n", + " datetime.datetime(2018, 4, 7, 1, 0),\n", + " datetime.datetime(2018, 4, 7, 2, 0),\n", + " datetime.datetime(2018, 4, 7, 3, 0),\n", + " datetime.datetime(2018, 4, 7, 4, 0),\n", + " datetime.datetime(2018, 4, 7, 5, 0),\n", + " datetime.datetime(2018, 4, 7, 6, 0),\n", + " datetime.datetime(2018, 4, 7, 7, 0),\n", + " datetime.datetime(2018, 4, 7, 8, 0),\n", + " datetime.datetime(2018, 4, 7, 9, 0),\n", + " datetime.datetime(2018, 4, 7, 10, 0),\n", + " datetime.datetime(2018, 4, 7, 11, 0),\n", + " datetime.datetime(2018, 4, 7, 12, 0),\n", + " datetime.datetime(2018, 4, 7, 13, 0),\n", + " datetime.datetime(2018, 4, 7, 14, 0),\n", + " datetime.datetime(2018, 4, 7, 15, 0),\n", + " datetime.datetime(2018, 4, 7, 16, 0),\n", + " datetime.datetime(2018, 4, 7, 17, 0),\n", + " datetime.datetime(2018, 4, 7, 18, 0),\n", + " datetime.datetime(2018, 4, 7, 19, 0),\n", + " datetime.datetime(2018, 4, 7, 20, 0),\n", + " datetime.datetime(2018, 4, 7, 21, 0),\n", + " datetime.datetime(2018, 4, 7, 22, 0),\n", + " datetime.datetime(2018, 4, 7, 23, 0),\n", + " datetime.datetime(2018, 4, 8, 0, 0),\n", + " datetime.datetime(2018, 4, 8, 1, 0),\n", + " datetime.datetime(2018, 4, 8, 2, 0),\n", + " datetime.datetime(2018, 4, 8, 3, 0),\n", + " datetime.datetime(2018, 4, 8, 4, 0),\n", + " datetime.datetime(2018, 4, 8, 5, 0),\n", + " datetime.datetime(2018, 4, 8, 6, 0),\n", + " datetime.datetime(2018, 4, 8, 7, 0),\n", + " datetime.datetime(2018, 4, 8, 8, 0),\n", + " datetime.datetime(2018, 4, 8, 9, 0),\n", + " datetime.datetime(2018, 4, 8, 10, 0),\n", + " datetime.datetime(2018, 4, 8, 11, 0),\n", + " datetime.datetime(2018, 4, 8, 12, 0),\n", + " datetime.datetime(2018, 4, 8, 13, 0),\n", + " datetime.datetime(2018, 4, 8, 14, 0),\n", + " datetime.datetime(2018, 4, 8, 15, 0),\n", + " datetime.datetime(2018, 4, 8, 16, 0),\n", + " datetime.datetime(2018, 4, 8, 17, 0),\n", + " datetime.datetime(2018, 4, 8, 18, 0),\n", + " datetime.datetime(2018, 4, 8, 19, 0),\n", + " datetime.datetime(2018, 4, 8, 20, 0),\n", + " datetime.datetime(2018, 4, 8, 21, 0),\n", + " datetime.datetime(2018, 4, 8, 22, 0),\n", + " datetime.datetime(2018, 4, 8, 23, 0),\n", + " datetime.datetime(2018, 4, 9, 0, 0),\n", + " datetime.datetime(2018, 4, 9, 1, 0),\n", + " datetime.datetime(2018, 4, 9, 2, 0),\n", + " datetime.datetime(2018, 4, 9, 3, 0),\n", + " datetime.datetime(2018, 4, 9, 4, 0),\n", + " datetime.datetime(2018, 4, 9, 5, 0),\n", + " datetime.datetime(2018, 4, 9, 6, 0),\n", + " datetime.datetime(2018, 4, 9, 7, 0),\n", + " datetime.datetime(2018, 4, 9, 8, 0),\n", + " datetime.datetime(2018, 4, 9, 9, 0),\n", + " datetime.datetime(2018, 4, 9, 10, 0),\n", + " datetime.datetime(2018, 4, 9, 11, 0),\n", + " datetime.datetime(2018, 4, 9, 12, 0),\n", + " datetime.datetime(2018, 4, 9, 13, 0),\n", + " datetime.datetime(2018, 4, 9, 14, 0),\n", + " datetime.datetime(2018, 4, 9, 15, 0),\n", + " datetime.datetime(2018, 4, 9, 16, 0),\n", + " datetime.datetime(2018, 4, 9, 17, 0),\n", + " datetime.datetime(2018, 4, 9, 18, 0),\n", + " datetime.datetime(2018, 4, 9, 19, 0),\n", + " datetime.datetime(2018, 4, 9, 20, 0),\n", + " datetime.datetime(2018, 4, 9, 21, 0),\n", + " datetime.datetime(2018, 4, 9, 22, 0),\n", + " datetime.datetime(2018, 4, 9, 23, 0),\n", + " datetime.datetime(2018, 4, 10, 0, 0),\n", + " datetime.datetime(2018, 4, 10, 1, 0),\n", + " datetime.datetime(2018, 4, 10, 2, 0),\n", + " datetime.datetime(2018, 4, 10, 3, 0),\n", + " datetime.datetime(2018, 4, 10, 4, 0),\n", + " datetime.datetime(2018, 4, 10, 5, 0),\n", + " datetime.datetime(2018, 4, 10, 6, 0),\n", + " datetime.datetime(2018, 4, 10, 7, 0),\n", + " datetime.datetime(2018, 4, 10, 8, 0),\n", + " datetime.datetime(2018, 4, 10, 9, 0),\n", + " datetime.datetime(2018, 4, 10, 10, 0),\n", + " datetime.datetime(2018, 4, 10, 11, 0),\n", + " datetime.datetime(2018, 4, 10, 12, 0),\n", + " datetime.datetime(2018, 4, 10, 13, 0),\n", + " datetime.datetime(2018, 4, 10, 14, 0),\n", + " datetime.datetime(2018, 4, 10, 15, 0),\n", + " datetime.datetime(2018, 4, 10, 16, 0),\n", + " datetime.datetime(2018, 4, 10, 17, 0),\n", + " datetime.datetime(2018, 4, 10, 18, 0),\n", + " datetime.datetime(2018, 4, 10, 19, 0),\n", + " datetime.datetime(2018, 4, 10, 20, 0),\n", + " datetime.datetime(2018, 4, 10, 21, 0),\n", + " datetime.datetime(2018, 4, 10, 22, 0),\n", + " datetime.datetime(2018, 4, 10, 23, 0),\n", + " datetime.datetime(2018, 4, 11, 0, 0),\n", + " datetime.datetime(2018, 4, 11, 1, 0),\n", + " datetime.datetime(2018, 4, 11, 2, 0),\n", + " datetime.datetime(2018, 4, 11, 3, 0),\n", + " datetime.datetime(2018, 4, 11, 4, 0),\n", + " datetime.datetime(2018, 4, 11, 5, 0),\n", + " datetime.datetime(2018, 4, 11, 6, 0),\n", + " datetime.datetime(2018, 4, 11, 7, 0),\n", + " datetime.datetime(2018, 4, 11, 8, 0),\n", + " datetime.datetime(2018, 4, 11, 9, 0),\n", + " datetime.datetime(2018, 4, 11, 10, 0),\n", + " datetime.datetime(2018, 4, 11, 11, 0),\n", + " datetime.datetime(2018, 4, 11, 12, 0),\n", + " datetime.datetime(2018, 4, 11, 13, 0),\n", + " datetime.datetime(2018, 4, 11, 14, 0),\n", + " datetime.datetime(2018, 4, 11, 15, 0),\n", + " datetime.datetime(2018, 4, 11, 16, 0),\n", + " datetime.datetime(2018, 4, 11, 17, 0),\n", + " datetime.datetime(2018, 4, 11, 18, 0),\n", + " datetime.datetime(2018, 4, 11, 19, 0),\n", + " datetime.datetime(2018, 4, 11, 20, 0),\n", + " datetime.datetime(2018, 4, 11, 21, 0),\n", + " datetime.datetime(2018, 4, 11, 22, 0),\n", + " datetime.datetime(2018, 4, 11, 23, 0),\n", + " datetime.datetime(2018, 4, 12, 0, 0),\n", + " datetime.datetime(2018, 4, 12, 1, 0),\n", + " datetime.datetime(2018, 4, 12, 2, 0),\n", + " datetime.datetime(2018, 4, 12, 3, 0),\n", + " datetime.datetime(2018, 4, 12, 4, 0),\n", + " datetime.datetime(2018, 4, 12, 5, 0),\n", + " datetime.datetime(2018, 4, 12, 6, 0),\n", + " datetime.datetime(2018, 4, 12, 7, 0),\n", + " datetime.datetime(2018, 4, 12, 8, 0),\n", + " datetime.datetime(2018, 4, 12, 9, 0),\n", + " datetime.datetime(2018, 4, 12, 10, 0),\n", + " datetime.datetime(2018, 4, 12, 11, 0),\n", + " datetime.datetime(2018, 4, 12, 12, 0),\n", + " datetime.datetime(2018, 4, 12, 13, 0),\n", + " datetime.datetime(2018, 4, 12, 14, 0),\n", + " datetime.datetime(2018, 4, 12, 15, 0),\n", + " datetime.datetime(2018, 4, 12, 16, 0),\n", + " datetime.datetime(2018, 4, 12, 17, 0),\n", + " datetime.datetime(2018, 4, 12, 18, 0),\n", + " datetime.datetime(2018, 4, 12, 19, 0),\n", + " datetime.datetime(2018, 4, 12, 20, 0),\n", + " datetime.datetime(2018, 4, 12, 21, 0),\n", + " datetime.datetime(2018, 4, 12, 22, 0),\n", + " datetime.datetime(2018, 4, 12, 23, 0),\n", + " datetime.datetime(2018, 4, 13, 0, 0),\n", + " datetime.datetime(2018, 4, 13, 1, 0),\n", + " datetime.datetime(2018, 4, 13, 2, 0),\n", + " datetime.datetime(2018, 4, 13, 3, 0),\n", + " datetime.datetime(2018, 4, 13, 4, 0),\n", + " datetime.datetime(2018, 4, 13, 5, 0),\n", + " datetime.datetime(2018, 4, 13, 6, 0),\n", + " datetime.datetime(2018, 4, 13, 7, 0),\n", + " datetime.datetime(2018, 4, 13, 8, 0),\n", + " datetime.datetime(2018, 4, 13, 9, 0),\n", + " datetime.datetime(2018, 4, 13, 10, 0),\n", + " datetime.datetime(2018, 4, 13, 11, 0),\n", + " datetime.datetime(2018, 4, 13, 12, 0),\n", + " datetime.datetime(2018, 4, 13, 13, 0),\n", + " datetime.datetime(2018, 4, 13, 14, 0),\n", + " datetime.datetime(2018, 4, 13, 15, 0),\n", + " datetime.datetime(2018, 4, 13, 16, 0),\n", + " datetime.datetime(2018, 4, 13, 17, 0),\n", + " datetime.datetime(2018, 4, 13, 18, 0),\n", + " datetime.datetime(2018, 4, 13, 19, 0),\n", + " datetime.datetime(2018, 4, 13, 20, 0),\n", + " datetime.datetime(2018, 4, 13, 21, 0),\n", + " datetime.datetime(2018, 4, 13, 22, 0),\n", + " datetime.datetime(2018, 4, 13, 23, 0),\n", + " datetime.datetime(2018, 4, 14, 0, 0),\n", + " datetime.datetime(2018, 4, 14, 1, 0),\n", + " datetime.datetime(2018, 4, 14, 2, 0),\n", + " datetime.datetime(2018, 4, 14, 3, 0),\n", + " datetime.datetime(2018, 4, 14, 4, 0),\n", + " datetime.datetime(2018, 4, 14, 5, 0),\n", + " datetime.datetime(2018, 4, 14, 6, 0),\n", + " datetime.datetime(2018, 4, 14, 7, 0),\n", + " datetime.datetime(2018, 4, 14, 8, 0),\n", + " datetime.datetime(2018, 4, 14, 9, 0),\n", + " datetime.datetime(2018, 4, 14, 10, 0),\n", + " datetime.datetime(2018, 4, 14, 11, 0),\n", + " datetime.datetime(2018, 4, 14, 12, 0),\n", + " datetime.datetime(2018, 4, 14, 13, 0),\n", + " datetime.datetime(2018, 4, 14, 14, 0),\n", + " datetime.datetime(2018, 4, 14, 15, 0),\n", + " datetime.datetime(2018, 4, 14, 16, 0),\n", + " datetime.datetime(2018, 4, 14, 17, 0),\n", + " datetime.datetime(2018, 4, 14, 18, 0),\n", + " datetime.datetime(2018, 4, 14, 19, 0),\n", + " datetime.datetime(2018, 4, 14, 20, 0),\n", + " datetime.datetime(2018, 4, 14, 21, 0),\n", + " datetime.datetime(2018, 4, 14, 22, 0),\n", + " datetime.datetime(2018, 4, 14, 23, 0),\n", + " datetime.datetime(2018, 4, 15, 0, 0),\n", + " datetime.datetime(2018, 4, 15, 1, 0),\n", + " datetime.datetime(2018, 4, 15, 2, 0),\n", + " datetime.datetime(2018, 4, 15, 3, 0),\n", + " datetime.datetime(2018, 4, 15, 4, 0),\n", + " datetime.datetime(2018, 4, 15, 5, 0),\n", + " datetime.datetime(2018, 4, 15, 6, 0),\n", + " datetime.datetime(2018, 4, 15, 7, 0),\n", + " datetime.datetime(2018, 4, 15, 8, 0),\n", + " datetime.datetime(2018, 4, 15, 9, 0),\n", + " datetime.datetime(2018, 4, 15, 10, 0),\n", + " datetime.datetime(2018, 4, 15, 11, 0),\n", + " datetime.datetime(2018, 4, 15, 12, 0),\n", + " datetime.datetime(2018, 4, 15, 13, 0),\n", + " datetime.datetime(2018, 4, 15, 14, 0),\n", + " datetime.datetime(2018, 4, 15, 15, 0),\n", + " datetime.datetime(2018, 4, 15, 16, 0),\n", + " datetime.datetime(2018, 4, 15, 17, 0),\n", + " datetime.datetime(2018, 4, 15, 18, 0),\n", + " datetime.datetime(2018, 4, 15, 19, 0),\n", + " datetime.datetime(2018, 4, 15, 20, 0),\n", + " datetime.datetime(2018, 4, 15, 21, 0),\n", + " datetime.datetime(2018, 4, 15, 22, 0),\n", + " datetime.datetime(2018, 4, 15, 23, 0),\n", + " datetime.datetime(2018, 4, 16, 0, 0),\n", + " datetime.datetime(2018, 4, 16, 1, 0),\n", + " datetime.datetime(2018, 4, 16, 2, 0),\n", + " datetime.datetime(2018, 4, 16, 3, 0),\n", + " datetime.datetime(2018, 4, 16, 4, 0),\n", + " datetime.datetime(2018, 4, 16, 5, 0),\n", + " datetime.datetime(2018, 4, 16, 6, 0),\n", + " datetime.datetime(2018, 4, 16, 7, 0),\n", + " datetime.datetime(2018, 4, 16, 8, 0),\n", + " datetime.datetime(2018, 4, 16, 9, 0),\n", + " datetime.datetime(2018, 4, 16, 10, 0),\n", + " datetime.datetime(2018, 4, 16, 11, 0),\n", + " datetime.datetime(2018, 4, 16, 12, 0),\n", + " datetime.datetime(2018, 4, 16, 13, 0),\n", + " datetime.datetime(2018, 4, 16, 14, 0),\n", + " datetime.datetime(2018, 4, 16, 15, 0),\n", + " datetime.datetime(2018, 4, 16, 16, 0),\n", + " datetime.datetime(2018, 4, 16, 17, 0),\n", + " datetime.datetime(2018, 4, 16, 18, 0),\n", + " datetime.datetime(2018, 4, 16, 19, 0),\n", + " datetime.datetime(2018, 4, 16, 20, 0),\n", + " datetime.datetime(2018, 4, 16, 21, 0),\n", + " datetime.datetime(2018, 4, 16, 22, 0),\n", + " datetime.datetime(2018, 4, 16, 23, 0),\n", + " datetime.datetime(2018, 4, 17, 0, 0),\n", + " datetime.datetime(2018, 4, 17, 1, 0),\n", + " datetime.datetime(2018, 4, 17, 2, 0),\n", + " datetime.datetime(2018, 4, 17, 3, 0),\n", + " datetime.datetime(2018, 4, 17, 4, 0),\n", + " datetime.datetime(2018, 4, 17, 5, 0),\n", + " datetime.datetime(2018, 4, 17, 6, 0),\n", + " datetime.datetime(2018, 4, 17, 7, 0),\n", + " datetime.datetime(2018, 4, 17, 8, 0),\n", + " datetime.datetime(2018, 4, 17, 9, 0),\n", + " datetime.datetime(2018, 4, 17, 10, 0),\n", + " datetime.datetime(2018, 4, 17, 11, 0),\n", + " datetime.datetime(2018, 4, 17, 12, 0),\n", + " datetime.datetime(2018, 4, 17, 13, 0),\n", + " datetime.datetime(2018, 4, 17, 14, 0),\n", + " datetime.datetime(2018, 4, 17, 15, 0),\n", + " datetime.datetime(2018, 4, 17, 16, 0),\n", + " datetime.datetime(2018, 4, 17, 17, 0),\n", + " datetime.datetime(2018, 4, 17, 18, 0),\n", + " datetime.datetime(2018, 4, 17, 19, 0),\n", + " datetime.datetime(2018, 4, 17, 20, 0),\n", + " datetime.datetime(2018, 4, 17, 21, 0),\n", + " datetime.datetime(2018, 4, 17, 22, 0),\n", + " datetime.datetime(2018, 4, 17, 23, 0),\n", + " datetime.datetime(2018, 4, 18, 0, 0),\n", + " datetime.datetime(2018, 4, 18, 1, 0),\n", + " datetime.datetime(2018, 4, 18, 2, 0),\n", + " datetime.datetime(2018, 4, 18, 3, 0),\n", + " datetime.datetime(2018, 4, 18, 4, 0),\n", + " datetime.datetime(2018, 4, 18, 5, 0),\n", + " datetime.datetime(2018, 4, 18, 6, 0),\n", + " datetime.datetime(2018, 4, 18, 7, 0),\n", + " datetime.datetime(2018, 4, 18, 8, 0),\n", + " datetime.datetime(2018, 4, 18, 9, 0),\n", + " datetime.datetime(2018, 4, 18, 10, 0),\n", + " datetime.datetime(2018, 4, 18, 11, 0),\n", + " datetime.datetime(2018, 4, 18, 12, 0),\n", + " datetime.datetime(2018, 4, 18, 13, 0),\n", + " datetime.datetime(2018, 4, 18, 14, 0),\n", + " datetime.datetime(2018, 4, 18, 15, 0),\n", + " datetime.datetime(2018, 4, 18, 16, 0),\n", + " datetime.datetime(2018, 4, 18, 17, 0),\n", + " datetime.datetime(2018, 4, 18, 18, 0),\n", + " datetime.datetime(2018, 4, 18, 19, 0),\n", + " datetime.datetime(2018, 4, 18, 20, 0),\n", + " datetime.datetime(2018, 4, 18, 21, 0),\n", + " datetime.datetime(2018, 4, 18, 22, 0),\n", + " datetime.datetime(2018, 4, 18, 23, 0),\n", + " datetime.datetime(2018, 4, 19, 0, 0),\n", + " datetime.datetime(2018, 4, 19, 1, 0),\n", + " datetime.datetime(2018, 4, 19, 2, 0),\n", + " datetime.datetime(2018, 4, 19, 3, 0),\n", + " datetime.datetime(2018, 4, 19, 4, 0),\n", + " datetime.datetime(2018, 4, 19, 5, 0),\n", + " datetime.datetime(2018, 4, 19, 6, 0),\n", + " datetime.datetime(2018, 4, 19, 7, 0),\n", + " datetime.datetime(2018, 4, 19, 8, 0),\n", + " datetime.datetime(2018, 4, 19, 9, 0),\n", + " datetime.datetime(2018, 4, 19, 10, 0),\n", + " datetime.datetime(2018, 4, 19, 11, 0),\n", + " datetime.datetime(2018, 4, 19, 12, 0),\n", + " datetime.datetime(2018, 4, 19, 13, 0),\n", + " datetime.datetime(2018, 4, 19, 14, 0),\n", + " datetime.datetime(2018, 4, 19, 15, 0),\n", + " datetime.datetime(2018, 4, 19, 16, 0),\n", + " datetime.datetime(2018, 4, 19, 17, 0),\n", + " datetime.datetime(2018, 4, 19, 18, 0),\n", + " datetime.datetime(2018, 4, 19, 19, 0),\n", + " datetime.datetime(2018, 4, 19, 20, 0),\n", + " datetime.datetime(2018, 4, 19, 21, 0),\n", + " datetime.datetime(2018, 4, 19, 22, 0),\n", + " datetime.datetime(2018, 4, 19, 23, 0),\n", + " datetime.datetime(2018, 4, 20, 0, 0),\n", + " datetime.datetime(2018, 4, 20, 1, 0),\n", + " datetime.datetime(2018, 4, 20, 2, 0),\n", + " datetime.datetime(2018, 4, 20, 3, 0),\n", + " datetime.datetime(2018, 4, 20, 4, 0),\n", + " datetime.datetime(2018, 4, 20, 5, 0),\n", + " datetime.datetime(2018, 4, 20, 6, 0),\n", + " datetime.datetime(2018, 4, 20, 7, 0),\n", + " datetime.datetime(2018, 4, 20, 8, 0),\n", + " datetime.datetime(2018, 4, 20, 9, 0),\n", + " datetime.datetime(2018, 4, 20, 10, 0),\n", + " datetime.datetime(2018, 4, 20, 11, 0),\n", + " datetime.datetime(2018, 4, 20, 12, 0),\n", + " datetime.datetime(2018, 4, 20, 13, 0),\n", + " datetime.datetime(2018, 4, 20, 14, 0),\n", + " datetime.datetime(2018, 4, 20, 15, 0),\n", + " datetime.datetime(2018, 4, 20, 16, 0),\n", + " datetime.datetime(2018, 4, 20, 17, 0),\n", + " datetime.datetime(2018, 4, 20, 18, 0),\n", + " datetime.datetime(2018, 4, 20, 19, 0),\n", + " datetime.datetime(2018, 4, 20, 20, 0),\n", + " datetime.datetime(2018, 4, 20, 21, 0),\n", + " datetime.datetime(2018, 4, 20, 22, 0),\n", + " datetime.datetime(2018, 4, 20, 23, 0),\n", + " datetime.datetime(2018, 4, 21, 0, 0),\n", + " datetime.datetime(2018, 4, 21, 1, 0),\n", + " datetime.datetime(2018, 4, 21, 2, 0),\n", + " datetime.datetime(2018, 4, 21, 3, 0),\n", + " datetime.datetime(2018, 4, 21, 4, 0),\n", + " datetime.datetime(2018, 4, 21, 5, 0),\n", + " datetime.datetime(2018, 4, 21, 6, 0),\n", + " datetime.datetime(2018, 4, 21, 7, 0),\n", + " datetime.datetime(2018, 4, 21, 8, 0),\n", + " datetime.datetime(2018, 4, 21, 9, 0),\n", + " datetime.datetime(2018, 4, 21, 10, 0),\n", + " datetime.datetime(2018, 4, 21, 11, 0),\n", + " datetime.datetime(2018, 4, 21, 12, 0),\n", + " datetime.datetime(2018, 4, 21, 13, 0),\n", + " datetime.datetime(2018, 4, 21, 14, 0),\n", + " datetime.datetime(2018, 4, 21, 15, 0),\n", + " datetime.datetime(2018, 4, 21, 16, 0),\n", + " datetime.datetime(2018, 4, 21, 17, 0),\n", + " datetime.datetime(2018, 4, 21, 18, 0),\n", + " datetime.datetime(2018, 4, 21, 19, 0),\n", + " datetime.datetime(2018, 4, 21, 20, 0),\n", + " datetime.datetime(2018, 4, 21, 21, 0),\n", + " datetime.datetime(2018, 4, 21, 22, 0),\n", + " datetime.datetime(2018, 4, 21, 23, 0),\n", + " datetime.datetime(2018, 4, 22, 0, 0),\n", + " datetime.datetime(2018, 4, 22, 1, 0),\n", + " datetime.datetime(2018, 4, 22, 2, 0),\n", + " datetime.datetime(2018, 4, 22, 3, 0),\n", + " datetime.datetime(2018, 4, 22, 4, 0),\n", + " datetime.datetime(2018, 4, 22, 5, 0),\n", + " datetime.datetime(2018, 4, 22, 6, 0),\n", + " datetime.datetime(2018, 4, 22, 7, 0),\n", + " datetime.datetime(2018, 4, 22, 8, 0),\n", + " datetime.datetime(2018, 4, 22, 9, 0),\n", + " datetime.datetime(2018, 4, 22, 10, 0),\n", + " datetime.datetime(2018, 4, 22, 11, 0),\n", + " datetime.datetime(2018, 4, 22, 12, 0),\n", + " datetime.datetime(2018, 4, 22, 13, 0),\n", + " datetime.datetime(2018, 4, 22, 14, 0),\n", + " datetime.datetime(2018, 4, 22, 15, 0),\n", + " datetime.datetime(2018, 4, 22, 16, 0),\n", + " datetime.datetime(2018, 4, 22, 17, 0),\n", + " datetime.datetime(2018, 4, 22, 18, 0),\n", + " datetime.datetime(2018, 4, 22, 19, 0),\n", + " datetime.datetime(2018, 4, 22, 20, 0),\n", + " datetime.datetime(2018, 4, 22, 21, 0),\n", + " datetime.datetime(2018, 4, 22, 22, 0),\n", + " datetime.datetime(2018, 4, 22, 23, 0),\n", + " datetime.datetime(2018, 4, 23, 0, 0),\n", + " datetime.datetime(2018, 4, 23, 1, 0),\n", + " datetime.datetime(2018, 4, 23, 2, 0),\n", + " datetime.datetime(2018, 4, 23, 3, 0),\n", + " datetime.datetime(2018, 4, 23, 4, 0),\n", + " datetime.datetime(2018, 4, 23, 5, 0),\n", + " datetime.datetime(2018, 4, 23, 6, 0),\n", + " datetime.datetime(2018, 4, 23, 7, 0),\n", + " datetime.datetime(2018, 4, 23, 8, 0),\n", + " datetime.datetime(2018, 4, 23, 9, 0),\n", + " datetime.datetime(2018, 4, 23, 10, 0),\n", + " datetime.datetime(2018, 4, 23, 11, 0),\n", + " datetime.datetime(2018, 4, 23, 12, 0),\n", + " datetime.datetime(2018, 4, 23, 13, 0),\n", + " datetime.datetime(2018, 4, 23, 14, 0),\n", + " datetime.datetime(2018, 4, 23, 15, 0),\n", + " datetime.datetime(2018, 4, 23, 16, 0),\n", + " datetime.datetime(2018, 4, 23, 17, 0),\n", + " datetime.datetime(2018, 4, 23, 18, 0),\n", + " datetime.datetime(2018, 4, 23, 19, 0),\n", + " datetime.datetime(2018, 4, 23, 20, 0),\n", + " datetime.datetime(2018, 4, 23, 21, 0),\n", + " datetime.datetime(2018, 4, 23, 22, 0),\n", + " datetime.datetime(2018, 4, 23, 23, 0),\n", + " datetime.datetime(2018, 4, 24, 0, 0),\n", + " datetime.datetime(2018, 4, 24, 1, 0),\n", + " datetime.datetime(2018, 4, 24, 2, 0),\n", + " datetime.datetime(2018, 4, 24, 3, 0),\n", + " datetime.datetime(2018, 4, 24, 4, 0),\n", + " datetime.datetime(2018, 4, 24, 5, 0),\n", + " datetime.datetime(2018, 4, 24, 6, 0),\n", + " datetime.datetime(2018, 4, 24, 7, 0),\n", + " datetime.datetime(2018, 4, 24, 8, 0),\n", + " datetime.datetime(2018, 4, 24, 9, 0),\n", + " datetime.datetime(2018, 4, 24, 10, 0),\n", + " datetime.datetime(2018, 4, 24, 11, 0),\n", + " datetime.datetime(2018, 4, 24, 12, 0),\n", + " datetime.datetime(2018, 4, 24, 13, 0),\n", + " datetime.datetime(2018, 4, 24, 14, 0),\n", + " datetime.datetime(2018, 4, 24, 15, 0),\n", + " datetime.datetime(2018, 4, 24, 16, 0),\n", + " datetime.datetime(2018, 4, 24, 17, 0),\n", + " datetime.datetime(2018, 4, 24, 18, 0),\n", + " datetime.datetime(2018, 4, 24, 19, 0),\n", + " datetime.datetime(2018, 4, 24, 20, 0),\n", + " datetime.datetime(2018, 4, 24, 21, 0),\n", + " datetime.datetime(2018, 4, 24, 22, 0),\n", + " datetime.datetime(2018, 4, 24, 23, 0),\n", + " datetime.datetime(2018, 4, 25, 0, 0),\n", + " datetime.datetime(2018, 4, 25, 1, 0),\n", + " datetime.datetime(2018, 4, 25, 2, 0),\n", + " datetime.datetime(2018, 4, 25, 3, 0),\n", + " datetime.datetime(2018, 4, 25, 4, 0),\n", + " datetime.datetime(2018, 4, 25, 5, 0),\n", + " datetime.datetime(2018, 4, 25, 6, 0),\n", + " datetime.datetime(2018, 4, 25, 7, 0),\n", + " datetime.datetime(2018, 4, 25, 8, 0),\n", + " datetime.datetime(2018, 4, 25, 9, 0),\n", + " datetime.datetime(2018, 4, 25, 10, 0),\n", + " datetime.datetime(2018, 4, 25, 11, 0),\n", + " datetime.datetime(2018, 4, 25, 12, 0),\n", + " datetime.datetime(2018, 4, 25, 13, 0),\n", + " datetime.datetime(2018, 4, 25, 14, 0),\n", + " datetime.datetime(2018, 4, 25, 15, 0),\n", + " datetime.datetime(2018, 4, 25, 16, 0),\n", + " datetime.datetime(2018, 4, 25, 17, 0),\n", + " datetime.datetime(2018, 4, 25, 18, 0),\n", + " datetime.datetime(2018, 4, 25, 19, 0),\n", + " datetime.datetime(2018, 4, 25, 20, 0),\n", + " datetime.datetime(2018, 4, 25, 21, 0),\n", + " datetime.datetime(2018, 4, 25, 22, 0),\n", + " datetime.datetime(2018, 4, 25, 23, 0),\n", + " datetime.datetime(2018, 4, 26, 0, 0),\n", + " datetime.datetime(2018, 4, 26, 1, 0),\n", + " datetime.datetime(2018, 4, 26, 2, 0),\n", + " datetime.datetime(2018, 4, 26, 3, 0),\n", + " datetime.datetime(2018, 4, 26, 4, 0),\n", + " datetime.datetime(2018, 4, 26, 5, 0),\n", + " datetime.datetime(2018, 4, 26, 6, 0),\n", + " datetime.datetime(2018, 4, 26, 7, 0),\n", + " datetime.datetime(2018, 4, 26, 8, 0),\n", + " datetime.datetime(2018, 4, 26, 9, 0),\n", + " datetime.datetime(2018, 4, 26, 10, 0),\n", + " datetime.datetime(2018, 4, 26, 11, 0),\n", + " datetime.datetime(2018, 4, 26, 12, 0),\n", + " datetime.datetime(2018, 4, 26, 13, 0),\n", + " datetime.datetime(2018, 4, 26, 14, 0),\n", + " datetime.datetime(2018, 4, 26, 15, 0),\n", + " datetime.datetime(2018, 4, 26, 16, 0),\n", + " datetime.datetime(2018, 4, 26, 17, 0),\n", + " datetime.datetime(2018, 4, 26, 18, 0),\n", + " datetime.datetime(2018, 4, 26, 19, 0),\n", + " datetime.datetime(2018, 4, 26, 20, 0),\n", + " datetime.datetime(2018, 4, 26, 21, 0),\n", + " datetime.datetime(2018, 4, 26, 22, 0),\n", + " datetime.datetime(2018, 4, 26, 23, 0),\n", + " datetime.datetime(2018, 4, 27, 0, 0),\n", + " datetime.datetime(2018, 4, 27, 1, 0),\n", + " datetime.datetime(2018, 4, 27, 2, 0),\n", + " datetime.datetime(2018, 4, 27, 3, 0),\n", + " datetime.datetime(2018, 4, 27, 4, 0),\n", + " datetime.datetime(2018, 4, 27, 5, 0),\n", + " datetime.datetime(2018, 4, 27, 6, 0),\n", + " datetime.datetime(2018, 4, 27, 7, 0),\n", + " datetime.datetime(2018, 4, 27, 8, 0),\n", + " datetime.datetime(2018, 4, 27, 9, 0),\n", + " datetime.datetime(2018, 4, 27, 10, 0),\n", + " datetime.datetime(2018, 4, 27, 11, 0),\n", + " datetime.datetime(2018, 4, 27, 12, 0),\n", + " datetime.datetime(2018, 4, 27, 13, 0),\n", + " datetime.datetime(2018, 4, 27, 14, 0),\n", + " datetime.datetime(2018, 4, 27, 15, 0),\n", + " datetime.datetime(2018, 4, 27, 16, 0),\n", + " datetime.datetime(2018, 4, 27, 17, 0),\n", + " datetime.datetime(2018, 4, 27, 18, 0),\n", + " datetime.datetime(2018, 4, 27, 19, 0),\n", + " datetime.datetime(2018, 4, 27, 20, 0),\n", + " datetime.datetime(2018, 4, 27, 21, 0),\n", + " datetime.datetime(2018, 4, 27, 22, 0),\n", + " datetime.datetime(2018, 4, 27, 23, 0),\n", + " datetime.datetime(2018, 4, 28, 0, 0),\n", + " datetime.datetime(2018, 4, 28, 1, 0),\n", + " datetime.datetime(2018, 4, 28, 2, 0),\n", + " datetime.datetime(2018, 4, 28, 3, 0),\n", + " datetime.datetime(2018, 4, 28, 4, 0),\n", + " datetime.datetime(2018, 4, 28, 5, 0),\n", + " datetime.datetime(2018, 4, 28, 6, 0),\n", + " datetime.datetime(2018, 4, 28, 7, 0),\n", + " datetime.datetime(2018, 4, 28, 8, 0),\n", + " datetime.datetime(2018, 4, 28, 9, 0),\n", + " datetime.datetime(2018, 4, 28, 10, 0),\n", + " datetime.datetime(2018, 4, 28, 11, 0),\n", + " datetime.datetime(2018, 4, 28, 12, 0),\n", + " datetime.datetime(2018, 4, 28, 13, 0),\n", + " datetime.datetime(2018, 4, 28, 14, 0),\n", + " datetime.datetime(2018, 4, 28, 15, 0),\n", + " datetime.datetime(2018, 4, 28, 16, 0),\n", + " datetime.datetime(2018, 4, 28, 17, 0),\n", + " datetime.datetime(2018, 4, 28, 18, 0),\n", + " datetime.datetime(2018, 4, 28, 19, 0),\n", + " datetime.datetime(2018, 4, 28, 20, 0),\n", + " datetime.datetime(2018, 4, 28, 21, 0),\n", + " datetime.datetime(2018, 4, 28, 22, 0),\n", + " datetime.datetime(2018, 4, 28, 23, 0),\n", + " datetime.datetime(2018, 4, 29, 0, 0),\n", + " datetime.datetime(2018, 4, 29, 1, 0),\n", + " datetime.datetime(2018, 4, 29, 2, 0),\n", + " datetime.datetime(2018, 4, 29, 3, 0),\n", + " datetime.datetime(2018, 4, 29, 4, 0),\n", + " datetime.datetime(2018, 4, 29, 5, 0),\n", + " datetime.datetime(2018, 4, 29, 6, 0),\n", + " datetime.datetime(2018, 4, 29, 7, 0),\n", + " datetime.datetime(2018, 4, 29, 8, 0),\n", + " datetime.datetime(2018, 4, 29, 9, 0),\n", + " datetime.datetime(2018, 4, 29, 10, 0),\n", + " datetime.datetime(2018, 4, 29, 11, 0),\n", + " datetime.datetime(2018, 4, 29, 12, 0),\n", + " datetime.datetime(2018, 4, 29, 13, 0),\n", + " datetime.datetime(2018, 4, 29, 14, 0),\n", + " datetime.datetime(2018, 4, 29, 15, 0),\n", + " datetime.datetime(2018, 4, 29, 16, 0),\n", + " datetime.datetime(2018, 4, 29, 17, 0),\n", + " datetime.datetime(2018, 4, 29, 18, 0),\n", + " datetime.datetime(2018, 4, 29, 19, 0),\n", + " datetime.datetime(2018, 4, 29, 20, 0),\n", + " datetime.datetime(2018, 4, 29, 21, 0),\n", + " datetime.datetime(2018, 4, 29, 22, 0),\n", + " datetime.datetime(2018, 4, 29, 23, 0),\n", + " datetime.datetime(2018, 4, 30, 0, 0),\n", + " datetime.datetime(2018, 4, 30, 1, 0),\n", + " datetime.datetime(2018, 4, 30, 2, 0),\n", + " datetime.datetime(2018, 4, 30, 3, 0),\n", + " datetime.datetime(2018, 4, 30, 4, 0),\n", + " datetime.datetime(2018, 4, 30, 5, 0),\n", + " datetime.datetime(2018, 4, 30, 6, 0),\n", + " datetime.datetime(2018, 4, 30, 7, 0),\n", + " datetime.datetime(2018, 4, 30, 8, 0),\n", + " datetime.datetime(2018, 4, 30, 9, 0),\n", + " datetime.datetime(2018, 4, 30, 10, 0),\n", + " datetime.datetime(2018, 4, 30, 11, 0),\n", + " datetime.datetime(2018, 4, 30, 12, 0),\n", + " datetime.datetime(2018, 4, 30, 13, 0),\n", + " datetime.datetime(2018, 4, 30, 14, 0),\n", + " datetime.datetime(2018, 4, 30, 15, 0),\n", + " datetime.datetime(2018, 4, 30, 16, 0),\n", + " datetime.datetime(2018, 4, 30, 17, 0),\n", + " datetime.datetime(2018, 4, 30, 18, 0),\n", + " datetime.datetime(2018, 4, 30, 19, 0),\n", + " datetime.datetime(2018, 4, 30, 20, 0),\n", + " datetime.datetime(2018, 4, 30, 21, 0),\n", + " datetime.datetime(2018, 4, 30, 22, 0),\n", + " datetime.datetime(2018, 4, 30, 23, 0)]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.time" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0]), 'units': ''}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lev" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-64.24006 , -54.84846497, -22.10333333, -31.66861111,\n", + " 47.76666641, 46.677778 , 48.721111 , 47.529167 ,\n", + " 47.05407 , 46.693611 , 47.348056 , 47.973056 ,\n", + " 48.878611 , 48.106111 , 48.371111 , 48.334722 ,\n", + " 48.050833 , 47.838611 , 47.040277 , 47.06694444,\n", + " 49.877778 , 50.629421 , 50.503333 , 41.695833 ,\n", + " 32.27000046, 80.05000305, 46.5475 , 46.813056 ,\n", + " 47.479722 , 47.049722 , 47.0675 , 47.18961391,\n", + " -30.17254 , 16.86403 , 35.0381 , 49.73508444,\n", + " 49.573394 , 49.066667 , 54.925556 , 52.802222 ,\n", + " 47.914722 , 53.166667 , 50.65 , 54.4368 ,\n", + " 47.80149841, 47.4165 , -70.666 , 54.746495 ,\n", + " 81.6 , 55.693588 , 72.58000183, 56.290424 ,\n", + " 59.5 , 58.383333 , 39.54694 , 42.72056 ,\n", + " 39.87528 , 37.23722 , 43.43917 , 41.27417 ,\n", + " 42.31917 , 38.47278 , 39.08278 , 41.23889 ,\n", + " 41.39389 , 42.63472 , 37.05194 , 28.309 ,\n", + " 59.779167 , 60.53002 , 66.320278 , 67.97333333,\n", + " 48.5 , 49.9 , 47.266667 , 43.616667 ,\n", + " 47.3 , 46.65 , 45. , 45.8 ,\n", + " 48.633333 , 42.936667 , 48.70861111, 44.56944444,\n", + " 46.81472778, 45.772223 , 55.313056 , 54.443056 ,\n", + " 50.596389 , 54.334444 , 57.734444 , 52.503889 ,\n", + " 55.858611 , 53.398889 , 50.792778 , 52.293889 ,\n", + " 51.781784 , 52.298333 , 55.79216 , 52.950556 ,\n", + " 51.778056 , 60.13922 , -75.62 , 51.149617 ,\n", + " 38.366667 , 35.316667 , 46.966667 , 46.91 ,\n", + " -0.20194 , 51.939722 , 53.32583 , 45.8 ,\n", + " 44.183333 , 37.571111 , 35.5182 , 42.805462 ,\n", + " -69.005 , 39.0319 , 24.2883 , 24.466941 ,\n", + " 36.53833389, 33.293917 , 55.37611111, 56.161944 ,\n", + " 57.135278 , 41.536111 , 36.0722 , 52.083333 ,\n", + " 53.333889 , 51.541111 , 52.3 , 51.974444 ,\n", + " 58.38853 , 65.833333 , 62.783333 , 78.90715 ,\n", + " 59. , 69.45 , 59.2 , 60.372386 ,\n", + " -72.0117 , 59.2 , -41.40819168, -77.83200073,\n", + " -45.0379982 , 51.814408 , 50.736444 , 54.753894 ,\n", + " 54.15 , 43.4 , 71.58616638, 63.85 ,\n", + " 67.883333 , 57.394 , 57.1645 , 57.9525 ,\n", + " 56.0429 , 60.0858 , 57.816667 , 64.25 ,\n", + " 59.728 , 45.566667 , 46.428611 , 46.299444 ,\n", + " 48.933333 , 49.15 , 49.05 , 47.96 ,\n", + " 71.32301331, 40.12498 , 19.53623009, -89.99694824,\n", + " 41.05410004, 21.5731 , -34.35348 ],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'latitude',\n", + " 'units': 'decimal degrees North',\n", + " 'long_name': 'latitude',\n", + " 'description': 'Geodetic latitude of measuring instrument, in decimal degrees North, following the stated horizontal datum.',\n", + " 'axis': 'Y'}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lat" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': masked_array(data=[-5.66247800e+01, -6.83106918e+01, -6.56008333e+01,\n", + " -6.38819444e+01, 1.67666664e+01, 1.29722220e+01,\n", + " 1.59422220e+01, 9.92666700e+00, 1.29579400e+01,\n", + " 1.39150000e+01, 1.58822220e+01, 1.30161110e+01,\n", + " 1.50466670e+01, 1.59194440e+01, 1.55466670e+01,\n", + " 1.67305560e+01, 1.66766670e+01, 1.44413890e+01,\n", + " 1.43300000e+01, 1.54936111e+01, 5.20361100e+00,\n", + " 6.00101900e+00, 4.98944400e+00, 2.47386110e+01,\n", + " -6.48799973e+01, -8.64166565e+01, 7.98500000e+00,\n", + " 6.94472200e+00, 8.90472200e+00, 6.97944400e+00,\n", + " 8.46388900e+00, 8.17543368e+00, -7.07992300e+01,\n", + " -2.48675200e+01, 3.30578000e+01, 1.60341969e+01,\n", + " 1.50802780e+01, 1.36000000e+01, 8.30972200e+00,\n", + " 1.07594440e+01, 7.90861100e+00, 1.30333330e+01,\n", + " 1.07666670e+01, 1.27249000e+01, 1.10096197e+01,\n", + " 1.09796400e+01, -8.26600000e+00, 1.07361600e+01,\n", + " -1.66700000e+01, 1.20857970e+01, -3.84799995e+01,\n", + " 8.42748600e+00, 2.59000000e+01, 2.18166670e+01,\n", + " -4.35056000e+00, -8.92361000e+00, 4.31639000e+00,\n", + " -3.53417000e+00, -4.85000000e+00, -3.14250000e+00,\n", + " 3.31583000e+00, -6.92361000e+00, -1.10111000e+00,\n", + " -5.89750000e+00, 7.34720000e-01, -7.70472000e+00,\n", + " -6.55528000e+00, -1.64994000e+01, 2.13772220e+01,\n", + " 2.76675400e+01, 2.94016670e+01, 2.41161111e+01,\n", + " 7.13333300e+00, 4.63333300e+00, 4.08333300e+00,\n", + " 1.83333000e-01, 6.83333300e+00, -7.50000000e-01,\n", + " 6.46666700e+00, 2.06666700e+00, -4.50000000e-01,\n", + " 1.41944000e-01, 2.15888889e+00, 5.27897222e+00,\n", + " 2.61000833e+00, 2.96488600e+00, -3.20416700e+00,\n", + " -7.87000000e+00, -3.71305600e+00, -8.07500000e-01,\n", + " -4.77444400e+00, -3.03305600e+00, -3.20500000e+00,\n", + " -1.75333300e+00, 1.79444000e-01, 1.46305600e+00,\n", + " -4.69146200e+00, 2.92778000e-01, -3.24290000e+00,\n", + " 1.12194400e+00, 1.08223000e+00, -1.18531900e+00,\n", + " -2.61800000e+01, -1.43822800e+00, 2.30833330e+01,\n", + " 2.56666670e+01, 1.95833330e+01, 1.63200000e+01,\n", + " 1.00318100e+02, -1.02444440e+01, -9.89944000e+00,\n", + " 8.63333300e+00, 1.07000000e+01, 1.26597220e+01,\n", + " 1.26305000e+01, 1.25656450e+01, 3.95905556e+01,\n", + " 1.41822200e+02, 1.53983300e+02, 1.23010872e+02,\n", + " 1.26330002e+02, 1.26163111e+02, 2.10305556e+01,\n", + " 2.11730560e+01, 2.59055560e+01, 2.06938900e+01,\n", + " 1.42184000e+01, 6.56666700e+00, 6.27722200e+00,\n", + " 5.85361100e+00, 4.50000000e+00, 4.92361100e+00,\n", + " 8.25200000e+00, 1.39166670e+01, 8.88333300e+00,\n", + " 1.18866800e+01, 1.15333330e+01, 3.00333330e+01,\n", + " 5.20000000e+00, 1.10781420e+01, 2.53510000e+00,\n", + " 9.51666700e+00, 1.74870804e+02, 1.66660004e+02,\n", + " 1.69684006e+02, 2.19724190e+01, 1.57395000e+01,\n", + " 1.75342640e+01, 2.20666670e+01, 2.19500000e+01,\n", + " 1.28918823e+02, 1.53333330e+01, 2.10666670e+01,\n", + " 1.19140000e+01, 1.47825000e+01, 1.24030000e+01,\n", + " 1.31480000e+01, 1.75052800e+01, 1.55666670e+01,\n", + " 1.97666670e+01, 1.54720000e+01, 1.48666670e+01,\n", + " 1.50033330e+01, 1.45386110e+01, 1.95833330e+01,\n", + " 2.02833330e+01, 2.22666670e+01, 1.78605560e+01,\n", + " -1.56611465e+02, -1.05236800e+02, -1.55576157e+02,\n", + " -2.47999992e+01, -1.24151001e+02, 1.03515700e+02,\n", + " 1.84896800e+01],\n", + " mask=False,\n", + " fill_value=1e+20),\n", + " 'dimensions': ('station',),\n", + " 'standard_name': 'longitude',\n", + " 'units': 'decimal degrees East',\n", + " 'long_name': 'longitude',\n", + " 'description': 'Geodetic longitude of measuring instrument, in decimal degrees East, following the stated horizontal datum.',\n", + " 'axis': 'X'}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nessy_2.lon" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Loading grid_edge_latitude var (1/6)\n", + "Rank 000: Loaded grid_edge_latitude var ((1125,))\n", + "Rank 000: Loading grid_edge_longitude var (2/6)\n", + "Rank 000: Loaded grid_edge_longitude var ((1125,))\n", + "Rank 000: Loading model_centre_latitude var (3/6)\n", + "Rank 000: Loaded model_centre_latitude var ((211, 351))\n", + "Rank 000: Loading model_centre_longitude var (4/6)\n", + "Rank 000: Loaded model_centre_longitude var ((211, 351))\n", + "Rank 000: Loading sconco3 var (5/6)\n", + "Rank 000: Loaded sconco3 var ((175, 720))\n", + "Rank 000: Loading station_reference var (6/6)\n", + "Rank 000: Loaded station_reference var ((175,))\n" + ] + } + ], + "source": [ + "nessy_2.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Write" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rank 000: Creating providentia_exp_file.nc\n", + "Rank 000: NetCDF ready to write\n", + "Rank 000: Dimensions done\n", + "Rank 000: Writing grid_edge_latitude var (1/6)\n", + "**ERROR** an error has occurred while writing the 'grid_edge_latitude' variable\n" + ] + }, + { + "ename": "ValueError", + "evalue": "cannot find dimension grid_edge in this group or parent groups", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0mdim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdimensions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mdimname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dimensions'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'parent'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnessy_2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_netcdf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'providentia_exp_file.nc'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36mto_netcdf\u001b[0;34m(self, path, compression_level, serial, info, chunking)\u001b[0m\n\u001b[1;32m 1431\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1432\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1433\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__to_netcdf_py\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1434\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1435\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mold_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/default_nes.py\u001b[0m in \u001b[0;36m__to_netcdf_py\u001b[0;34m(self, path, chunking)\u001b[0m\n\u001b[1;32m 1381\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1382\u001b[0m \u001b[0;31m# Create variables\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1383\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_create_variables\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetcdf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mchunking\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1384\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1385\u001b[0m \u001b[0;31m# Create metadata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 439\u001b[0m \u001b[0;31m# print(\"**ERROR** an error hase occurred while writing the '{0}' variable\".format(var_name),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 440\u001b[0m \u001b[0;31m# file=sys.stderr)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 442\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 443\u001b[0m \u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'WARNING!!! '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/esarchive/scratch/avilanova/software/NES/nes/nc_projections/points_nes.py\u001b[0m in \u001b[0;36m_create_variables\u001b[0;34m(self, netcdf, chunking)\u001b[0m\n\u001b[1;32m 379\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunking\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 380\u001b[0m var = netcdf.createVariable(var_name, var_dtype, var_dims,\n\u001b[0;32m--> 381\u001b[0;31m zlib=self.zip_lvl > 0, complevel=self.zip_lvl)\n\u001b[0m\u001b[1;32m 382\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaster\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Dataset.createVariable\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mnetCDF4/_netCDF4.pyx\u001b[0m in \u001b[0;36mnetCDF4._netCDF4.Variable.__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/gpfs/projects/bsc32/software/suselinux/11/software/netcdf4-python/1.5.3-foss-2019b-Python-3.7.4/lib/python3.7/site-packages/netCDF4/utils.py\u001b[0m in \u001b[0;36m_find_dim\u001b[0;34m(grp, dimname)\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgroup\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparent\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"cannot find dimension %s in this group or parent groups\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mdimname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: cannot find dimension grid_edge in this group or parent groups" + ] + } + ], + "source": [ + "nessy_2.to_netcdf('providentia_exp_file.nc', info=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd new file mode 100644 index 0000000000000000000000000000000000000000..20819e939a150aab82341bed9e2f6ca797fa45d6 --- /dev/null +++ b/Jupyter_notebooks/Jupyter_bash_nord3v2.cmd @@ -0,0 +1,40 @@ +#!/bin/bash +#SBATCH --ntasks 1 +#SBATCH --time 03:00:00 +#SBATCH --job-name NES +#SBATCH --output log_jupyter-notebook-%J.out +#SBATCH --error log_jupyter-notebook-%J.err +#SBATCH --exclusive + +# get tunneling info +XDG_RUNTIME_DIR="" +port=$(shuf -i8000-9999 -n1) +node=$(hostname -s) +user=$(whoami) + +# print tunneling instructions jupyter-log +echo -e " + +MacOS or linux terminal command to create your ssh tunnel +ssh -N -L ${port}:${node}:${port} ${user}@nord4.bsc.es + +Use a Browser on your local machine to go to: +localhost:${port} (prefix w/ https:// if using password) +" + +# load modules or conda environments here +module load jupyterlab/3.0.9-foss-2019b-Python-3.7.4 +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load xarray/0.19.0-foss-2019b-Python-3.7.4 +module load cftime/1.0.1-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 + +export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} + +# DON'T USE ADDRESS BELOW. +# DO USE TOKEN BELOW +jupyter-lab --no-browser --port=${port} --ip=${node} diff --git a/Jupyter_notebooks/input/Dades_2017.xlsx b/Jupyter_notebooks/input/Dades_2017.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..6385974cf23a4772e2125ca1edda5c5758252fc3 Binary files /dev/null and b/Jupyter_notebooks/input/Dades_2017.xlsx differ diff --git a/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx b/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..4aca3769586044b3906cc5a6a901ce4376ed07ec Binary files /dev/null and b/Jupyter_notebooks/input/Dades_Port_Barcelona_2017-2021_corr.xlsx differ diff --git a/Jupyter_notebooks/input/XVPCA_info.csv b/Jupyter_notebooks/input/XVPCA_info.csv new file mode 100644 index 0000000000000000000000000000000000000000..41b94c9e79a49e9fa8bab191efe6f04705189ff3 --- /dev/null +++ b/Jupyter_notebooks/input/XVPCA_info.csv @@ -0,0 +1,135 @@ +station.code,lat,lon,standardised_network_provided_area_classification +ES0266A,41.379322448,2.086139959,urban-centre +ES0392A,41.727703559,1.838530912,urban-suburban +ES0395A,41.567823582,2.014598316,urban-centre +ES0559A,41.387423958,2.164918317,urban-centre +ES0567A,41.384906375,2.119573944,urban-centre +ES0584A,41.482016279,2.188296656,urban-suburban +ES0586A,41.413621183,2.015985703,urban-centre +ES0691A,41.403716,2.204736,urban-centre +ES0692A,41.37076,2.114771,urban-centre +ES0694A,41.392157459,2.009802277,urban-suburban +ES0700A,41.515609252,2.124996708,urban-centre +ES0704A,41.55242119,2.265250427,urban-suburban +ES0963A,41.473378006,1.982016549,urban-suburban +ES0971A,41.450745,1.975021,urban-suburban +ES0991A,41.88489741,2.874243477,urban-suburban +ES1018A,41.556115398,2.007401267,urban-centre +ES1117A,41.111995,1.151879,urban-suburban +ES1120A,41.115880526,1.191975478,urban-suburban +ES1122A,41.193743,1.236904,rural-near_city +ES1123A,41.155004,1.217734,urban-suburban +ES1124A,41.159532,1.239709,urban-suburban +ES1125A,41.730280261,1.825306423,urban-centre +ES1126A,41.475573452,1.923189624,urban-suburban +ES1135A,41.577626892,1.625893365,urban-suburban +ES1148A,41.425620904,2.222244805,urban-centre +ES1201A,42.391975687,2.842138412,rural-regional +ES1208A,41.150778895,1.120171923,urban-suburban +ES1215A,40.706708761,0.581651482,urban-suburban +ES1220A,41.830977786,1.755282654,rural +ES1222A,41.691273195,2.440944864,rural-regional +ES1225A,41.615794867,0.615725897,urban-centre +ES1231A,41.476769365,2.088977264,urban-centre +ES1248A,42.405378523,1.129930371,rural-regional +ES1262A,41.561264,2.101288,urban-centre +ES1275A,41.689049442,2.495746675,urban-suburban +ES1310A,42.311957,2.213146,rural-regional +ES1311A,41.958676874,3.212854412,rural +ES1312A,41.103677887,1.200765062,urban-suburban +ES1339A,41.219036461,1.721248774,urban-suburban +ES1347A,42.143260622,2.510206967,rural-regional +ES1348A,42.36839,1.776814,rural-regional +ES1362A,41.383226589,2.044453466,urban-centre +ES1379A,41.058203798,0.439711691,rural-regional +ES1390A,41.530418632,2.422048428,urban-suburban +ES1396A,41.378802802,2.133098078,urban-centre +ES1397A,42.003359222,2.287072698,urban-suburban +ES1408A,41.26805333,1.595034746,urban-suburban +ES1413A,41.531750031,2.432573332,urban-suburban +ES1438A,41.385366,2.15403,urban-centre +ES1447A,41.23429,1.72692,urban-suburban +ES1453A,41.447350254,2.209511187,urban-centre +ES1480A,41.398762,2.153472,urban-centre +ES1506A,41.122896353,1.246696221,urban-centre +ES1551A,41.512675808,2.125383709,urban-centre +ES1555A,41.224091559,1.726292388,urban-centre +ES1559A,41.959328421,3.037425681,urban-suburban +ES1588A,41.906647654,1.192974755,rural +ES1642A,41.93501,2.239901,urban-suburban +ES1663A,41.39881978,2.002128908,urban-suburban +ES1665A,41.479955339,2.187268328,urban-suburban +ES1666A,41.117387934,1.241649688,urban-centre +ES1679A,41.386413934,2.187416735,urban-centre +ES1680A,41.634656545,2.162349596,urban-suburban +ES1684A,41.492082863,2.042497085,urban-centre +ES1754A,40.643004752,0.288445976,rural-near_city +ES1773A,41.20223184,1.672200024,urban-suburban +ES1775A,41.608655219,2.135874806,urban-suburban +ES1776A,41.452115161,2.208196282,urban-centre +ES1778A,41.779343452,2.358018901,rural-remote +ES1812A,41.278381095,1.179916582,rural-near_city +ES1813A,41.100692784,0.755100288,rural-regional +ES1814A,41.54924,2.212144,urban-suburban +ES1815A,41.346823038,1.686575492,urban-suburban +ES1816A,41.547159712,2.443253787,urban-centre +ES1817A,41.526705931,2.183795315,urban-suburban +ES1839A,41.617943017,2.087078604,urban-suburban +ES1841A,41.551859569,2.43729,urban-suburban +ES1842A,41.481513156,2.26915353,urban-suburban +ES1843A,42.102313372,1.858062466,urban-suburban +ES1851A,42.097931,1.848338,urban-suburban +ES1852A,41.42068447,2.170571748,urban-suburban +ES1853A,41.244890143,1.617704646,rural-near_city +ES1854A,41.00821168,0.831084709,rural-regional +ES1855A,41.009505689,0.912875859,rural-near_city +ES1856A,41.4260772,2.147992032,urban-centre +ES1861A,41.722992004,1.826905859,urban-suburban +ES1870A,41.374943279,2.186841902,urban-suburban +ES1871A,41.436016001,2.007867622,urban-suburban +ES1872A,41.548225196,2.105157341,urban-centre +ES1874A,41.919593952,2.257149921,urban-suburban +ES1887A,41.848441044,2.224108021,rural-near_city +ES1891A,41.598666901,2.287117688,urban-centre +ES1892A,41.443983561,2.237875306,urban-centre +ES1895A,41.494082865,2.029636956,urban-suburban +ES1896A,41.320516155,1.664199192,urban-suburban +ES1899A,41.308402497,1.647339954,rural-near_city +ES1900A,41.387237826,2.186736695,urban-centre +ES1903A,41.313475208,2.013824628,urban-suburban +ES1909A,40.880656635,0.799176536,rural-near_city +ES1910A,41.303112534,1.991523957,urban-suburban +ES1923A,41.846711554,2.217443537,rural-near_city +ES1928A,41.446265477,2.227517323,urban-centre +ES1929A,41.32149521,2.09772526,urban-suburban +ES1930A,40.902692904,0.809795431,rural-near_city +ES1931A,41.469800555,2.184233488,urban-suburban +ES1936A,41.746333141,2.556661265,rural-near_city +ES1948A,40.939553197,0.831336974,rural-near_city +ES1964A,41.386638989,2.057392493,urban-suburban +ES1965A,41.38193008,2.066347853,urban-suburban +ES1982A,42.051340687,0.729555647,rural-remote +ES1983A,41.321768405,2.082140639,urban-suburban +ES1992A,41.387273,2.115661,urban-centre +ES1999A,41.976385751,2.816547298,urban-centre +ES2009A,40.577777716,0.546796795,rural-near_city +ES2011A,41.400770767,1.999634718,urban-suburban +ES2012A,41.415319291,1.990521216,urban-suburban +ES2017A,40.552819474,0.529982528,rural +ES2027A,41.45131069,2.248236084,urban-centre +ES2033A,41.242375047,1.859334489,rural +ES2034A,41.544104651,0.829933196,rural +ES2035A,41.567703441,1.637614343,urban-suburban +ES2071A,41.120064,1.254472,urban-centre +ES2079A,41.559607414,1.995963067,urban-suburban +ES2043A,41.230073,0.547183, +ES2090A,41.418413,2.123899, +ES0554A,40.2891666667,0.289166666667, +ES0977A,41.6047222222,1.60472222222, +ES1398A,41.53667,2.18361111111, +ES1200A,40.2813888889,0.281388888889, +ES2087A,41.929283,2.257302, +ES2091A,40.5799,0.5535, +ES2088A,41.77106,2.250647, +ES1908A,41.239068799,1.856563752, +ES9994A,42.358363,1.459455, diff --git a/Jupyter_notebooks/input/estaciones.csv b/Jupyter_notebooks/input/estaciones.csv new file mode 100644 index 0000000000000000000000000000000000000000..ea01b3a0489c8a1a0699114db579c675d3492f03 --- /dev/null +++ b/Jupyter_notebooks/input/estaciones.csv @@ -0,0 +1,4 @@ +station.code,lat,lon,standardised_network_provided_area_classification +Dàrsena sud,41.332889028571,2.1408072098364, +Unitat Mobil,41.3737770961323,2.18451410008966, +ZAL Prat,41.3172766511896,2.13450079021309, diff --git a/Jupyter_notebooks/input/mercator_grid_example.nc b/Jupyter_notebooks/input/mercator_grid_example.nc new file mode 100644 index 0000000000000000000000000000000000000000..f3c3b0866b358adc9bb15f72f10d31a3c56d96e4 Binary files /dev/null and b/Jupyter_notebooks/input/mercator_grid_example.nc differ diff --git a/README.md b/README.md index c49354d3c212bcb364b78046449d1ad191a2aa2f..95ad6c17febf3a5bd178894d65f3ee51bcaac208 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ # NES -NetCDF for Earth Science \ No newline at end of file +NetCDF for Earth Science + +test local \ No newline at end of file diff --git a/environment.yml b/environment.yml new file mode 100755 index 0000000000000000000000000000000000000000..810b716ad0272f873492d22d3f8b0cceb4b3f5b6 --- /dev/null +++ b/environment.yml @@ -0,0 +1,14 @@ +--- + +name: nes + +channels: + - conda-forge + - anaconda + +dependencies: + - python = 3 + # Testing + - pytest + - pytest-cov + - pycodestyle \ No newline at end of file diff --git a/nes/__init__.py b/nes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a597aeec8aea8391855767a4cc79976e867c3ec5 --- /dev/null +++ b/nes/__init__.py @@ -0,0 +1,6 @@ +__date__ = "2022-08-12" +__version__ = "0.9.0" + +from .load_nes import open_netcdf, concatenate_netcdfs +from .create_nes import create_nes +from .nc_projections import * diff --git a/nes/create_nes.py b/nes/create_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..6c321a36a074179925118e9df300f39147db19de --- /dev/null +++ b/nes/create_nes.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python + +import warnings +from netCDF4 import num2date +from mpi4py import MPI +from .nc_projections import * + + +def create_nes(comm=None, info=False, projection=None, parallel_method='Y', balanced=False, + strlen=75, times=None, avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, **kwargs): + + if comm is None: + comm = MPI.COMM_WORLD + else: + comm = comm + + # Create time array + if times is None: + units = 'days since 1996-12-31 00:00:00' + calendar = 'standard' + times = num2date([0], units=units, calendar=calendar) + times = [aux.replace(second=0, microsecond=0) for aux in times] + else: + if not isinstance(times, list): + times = times.tolist() + + # Check if the parameters that are required to create the object have been defined in kwargs + kwargs_list = [] + for name, value in kwargs.items(): + kwargs_list.append(name) + + if projection is None: + required_vars = ['lat', 'lon'] + elif projection == 'regular': + required_vars = ['lat_orig', 'lon_orig', 'inc_lat', 'inc_lon', 'n_lat', 'n_lon'] + elif projection == 'rotated': + required_vars = ['centre_lat', 'centre_lon', 'west_boundary', 'south_boundary', 'inc_rlat', 'inc_rlon'] + elif projection == 'lcc': + required_vars = ['lat_1', 'lat_2', 'lon_0', 'lat_0', 'nx', 'ny', 'inc_x', 'inc_y', 'x_0', 'y_0'] + elif projection == 'mercator': + required_vars = ['lat_ts', 'lon_0', 'nx', 'ny', 'inc_x', 'inc_y', 'x_0', 'y_0'] + else: + raise ValueError("Unknown projection: {0}".format(projection)) + + for var in required_vars: + if var not in kwargs_list: + msg = 'WARNING!!! ' + msg += 'Variable {0} has not been defined.'.format(var) + warnings.warn(msg) + + if projection is None: + if parallel_method == 'Y': + warnings.warn("Parallel method cannot be 'Y' to create points NES. Setting it to 'X'") + parallel_method = 'X' + elif parallel_method == 'T': + raise NotImplementedError("Parallel method T not implemented yet") + nessy = PointsNes(comm=comm, dataset=None, xarray=False, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, balanced=balanced, + create_nes=True, strlen=strlen, times=times, **kwargs) + elif projection == 'regular': + nessy = LatLonNes(comm=comm, dataset=None, xarray=False, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, balanced=balanced, + create_nes=True, times=times, **kwargs) + elif projection == 'rotated': + nessy = RotatedNes(comm=comm, dataset=None, xarray=False, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, balanced=balanced, + create_nes=True, times=times, **kwargs) + elif projection == 'lcc': + nessy = LCCNes(comm=comm, dataset=None, xarray=False, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, balanced=balanced, + create_nes=True, times=times, **kwargs) + elif projection == 'mercator': + nessy = MercatorNes(comm=comm, dataset=None, xarray=False, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, balanced=balanced, + create_nes=True, times=times, **kwargs) + else: + raise NotImplementedError(projection) + + return nessy + \ No newline at end of file diff --git a/nes/interpolation/horizontal_interpolation.py b/nes/interpolation/horizontal_interpolation.py new file mode 100644 index 0000000000000000000000000000000000000000..e2aff54c1589fa1d7e1fedae54ca68dba7bb11cf --- /dev/null +++ b/nes/interpolation/horizontal_interpolation.py @@ -0,0 +1,418 @@ +#!/usr/bin/env python + +import sys +import numpy as np +import os +import nes +from mpi4py import MPI +from scipy import spatial +from filelock import FileLock +from datetime import datetime +from warnings import warn + + +def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='NearestNeighbour', n_neighbours=4, + info=False): + """ + Horizontal interpolation from one grid to another one. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object + dst_grid : nes.Nes + Final projection Nes object + weight_matrix_path : str, None + Path to the weight matrix to read/create + kind : str + Kind of horizontal interpolation. choices = ['NearestNeighbour'] + n_neighbours : int + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + info: bool + Indicates if you want to print extra info during the interpolation process. + """ + # Obtain weight matrix + if self.parallel_method == 'T': + weights, idx = get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours) + elif self.parallel_method in ['Y', 'X']: + weights, idx = get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours) + else: + raise NotImplemented("Parallel method {0} is not implemented yet for horizontal interpolations. Use 'T'".format( + self.parallel_method)) + + # Apply weights + final_dst = dst_grid.copy() + final_dst.set_communicator(dst_grid.comm) + # return final_dst + final_dst.lev = self.lev + final_dst._lev = self._lev + final_dst.time = self.time + final_dst._time = self._time + final_dst.hours_start = self.hours_start + final_dst.hours_end = self.hours_end + + for var_name, var_info in self.variables.items(): + if info and self.master: + print("\t{var} horizontal interpolation".format(var=var_name)) + sys.stdout.flush() + src_shape = var_info['data'].shape + if isinstance(dst_grid, nes.PointsNes): + dst_shape = (src_shape[0], src_shape[1], idx.shape[-1]) + else: + dst_shape = (src_shape[0], src_shape[1], idx.shape[-2], idx.shape[-1]) + # Creating new variable without data + final_dst.variables[var_name] = {attr_name: attr_value for attr_name, attr_value in var_info.items() + if attr_name != 'data'} + # Creating empty data + final_dst.variables[var_name]['data'] = np.empty(dst_shape) + + # src_data = var_info['data'].reshape((src_shape[0], src_shape[1], src_shape[2] * src_shape[3])) + for time in range(dst_shape[0]): + for lev in range(dst_shape[1]): + src_aux = get_src_data(self.comm, var_info['data'][time, lev], idx, self.parallel_method) + # src_aux = np.take(src_data[time, lev], idx) + final_dst.variables[var_name]['data'][time, lev] = np.sum(weights * src_aux, axis=1) + if isinstance(dst_grid, nes.PointsNes): + # Removing level axis + if src_shape[1] != 1: + raise IndexError("Data with vertical levels cannot be interpolated to points") + final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].reshape( + (src_shape[0], idx.shape[-1])) + if isinstance(dst_grid, nes.PointsNesGHOST): + final_dst.erase_flags() + print('pre final shape:', final_dst.variables[var_name]['data'].shape) + final_dst.variables[var_name]['data'] = final_dst.variables[var_name]['data'].T + # final_dst.variables[var_name]['dtype'] = final_dst.variables[var_name]['data'].dtype + final_dst.variables[var_name]['dimensions'] = ('station', 'time') + print('final shape:', final_dst.variables[var_name]['data'].shape) + + final_dst.global_attrs = self.global_attrs + + return final_dst + + +def get_src_data(comm, var_data, idx, parallel_method): + """ + To obtain the needed src data to interpolate. + + Parameters + ---------- + comm : MPI.Communicator + + var_data : np.array + Rank source data. + idx : np.array + Index of the needed data in a 2D flatten way + parallel_method: str + Source parallel method + + Returns + ------- + np.array + Flatten source needed data + """ + if parallel_method == 'T': + var_data = var_data.flatten() + else: + var_data = comm.gather(var_data, root=0) + if comm.Get_rank() == 0: + if parallel_method == 'Y': + axis = 0 + elif parallel_method == 'X': + axis = 1 + else: + raise NotImplementedError(parallel_method) + var_data = np.concatenate(var_data, axis=axis) + var_data = var_data.flatten() + + var_data = comm.bcast(var_data) + + var_data = np.take(var_data, idx) + return var_data + + +# noinspection DuplicatedCode +def get_weights_idx_t_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours): + """ + To obtain the weights and source data index through the T axis. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object + dst_grid : nes.Nes + Final projection Nes object + weight_matrix_path : str, None + Path to the weight matrix to read/create + kind : str + Kind of horizontal interpolation. choices = ['NearestNeighbour'] + n_neighbours : int + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + + Returns + ------- + tuple + weights and source data index + + """ + if weight_matrix_path is not None: + with FileLock(weight_matrix_path.replace('.nc', '.lock')): + if self.master: + if os.path.isfile(weight_matrix_path): + weight_matrix = read_weight_matrix(weight_matrix_path, comm=MPI.COMM_SELF) + if len(weight_matrix.lev['data']) != n_neighbours: + warn("The selected weight matrix does not have the same number of nearest neighbours." + + "Re-calculating again but not saving it.") + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + raise NotImplementedError(kind) + else: + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + raise NotImplementedError(kind) + if weight_matrix_path is not None: + weight_matrix.to_netcdf(weight_matrix_path) + else: + weight_matrix = None + else: + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + if self.master: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + weight_matrix = None + else: + raise NotImplementedError(kind) + + # Normalize to 1 + if self.master: + weights = np.array(np.array(weight_matrix.variables['inverse_dists']['data'], dtype=np.float64) / + np.array(weight_matrix.variables['inverse_dists']['data'], dtype=np.float64).sum(axis=1), + dtype=np.float64) + idx = np.array(weight_matrix.variables['idx']['data'][0], dtype=int) + else: + weights = None + idx = None + + weights = self.comm.bcast(weights, root=0) + idx = self.comm.bcast(idx, root=0) + + return weights, idx + + +# noinspection DuplicatedCode +def get_weights_idx_xy_axis(self, dst_grid, weight_matrix_path, kind, n_neighbours): + """ + To obtain the weights and source data index through the X or Y axis. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object + dst_grid : nes.Nes + Final projection Nes object + weight_matrix_path : str, None + Path to the weight matrix to read/create + kind : str + Kind of horizontal interpolation. choices = ['NearestNeighbour'] + n_neighbours : int + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + + Returns + ------- + tuple + weights and source data index + + """ + if isinstance(dst_grid, nes.PointsNes) and weight_matrix_path is not None: + if self.master: + warn("To point weight matrix cannot be saved.") + weight_matrix_path = None + + if weight_matrix_path is not None: + with FileLock(weight_matrix_path.replace('.nc', '.lock')): + if self.master: + if os.path.isfile(weight_matrix_path): + weight_matrix = read_weight_matrix(weight_matrix_path, comm=MPI.COMM_SELF) + if len(weight_matrix.lev['data']) != n_neighbours: + warn("The selected weight matrix does not have the same number of nearest neighbours." + + "Re-calculating again but not saving it.") + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + raise NotImplementedError(kind) + else: + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + raise NotImplementedError(kind) + if weight_matrix_path is not None: + weight_matrix.to_netcdf(weight_matrix_path) + else: + weight_matrix = None + else: + if kind in ['NearestNeighbour', 'NearestNeighbours', 'nn', 'NN']: + if self.master: + weight_matrix = create_nn_weight_matrix(self, dst_grid, n_neighbours=n_neighbours) + else: + weight_matrix = None + else: + raise NotImplementedError(kind) + + # Normalize to 1 + if self.master: + weights = np.array(np.array(weight_matrix.variables['inverse_dists']['data'], dtype=np.float64) / + np.array(weight_matrix.variables['inverse_dists']['data'], dtype=np.float64).sum(axis=1), + dtype=np.float64) + idx = np.array(weight_matrix.variables['idx']['data'][0], dtype=int) + else: + weights = None + idx = None + + weights = self.comm.bcast(weights, root=0) + idx = self.comm.bcast(idx, root=0) + # if isinstance(dst_grid, nes.PointsNes): + # print("weights 1 ->", weights.shape) + # print("idx 1 ->", idx.shape) + # weights = weights[:, dst_grid.write_axis_limits['x_min']:dst_grid.write_axis_limits['x_max']] + # idx = idx[dst_grid.write_axis_limits['x_min']:dst_grid.write_axis_limits['x_max']] + # else: + weights = weights[:, :, dst_grid.write_axis_limits['y_min']:dst_grid.write_axis_limits['y_max'], + dst_grid.write_axis_limits['x_min']:dst_grid.write_axis_limits['x_max']] + idx = idx[:, dst_grid.write_axis_limits['y_min']:dst_grid.write_axis_limits['y_max'], + dst_grid.write_axis_limits['x_min']:dst_grid.write_axis_limits['x_max']] + # print("weights 2 ->", weights.shape) + # print("idx 2 ->", idx.shape) + + return weights, idx + + +def read_weight_matrix(weight_matrix_path, comm=None, parallel_method='T'): + """ + + Parameters + ---------- + weight_matrix_path : str + Path of the weight matrix + comm : MPI.Communicator + Communicator to read the weight matrix + parallel_method : str + Nes parallel method to read the weight matrix. + + Returns + ------- + nes.Nes + Weight matrix + """ + weight_matrix = nes.open_netcdf(path=weight_matrix_path, comm=comm, parallel_method=parallel_method, balanced=True) + weight_matrix.load() + + return weight_matrix + + +def create_nn_weight_matrix(self, dst_grid, n_neighbours=4, info=False): + """ + To create the weight matrix with the nearest neighbours method. + + Parameters + ---------- + self : nes.Nes + Source projection Nes Object + dst_grid : nes.Nes + Final projection Nes object + n_neighbours : int + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + info: bool + Indicates if you want to print extra info during the interpolation process. + + Returns + ------- + nes.Nes + Weight Matrix + + """ + if info and self.master: + print("\tCreating Nearest Neighbour Weight Matrix with {0} neighbours".format(n_neighbours)) + sys.stdout.flush() + # Source + src_lat = np.array(self._lat['data'], dtype=np.float32) + src_lon = np.array(self._lon['data'], dtype=np.float32) + + # 1D to 2D coordinates + if len(src_lon.shape) == 1: + src_lon, src_lat = np.meshgrid(src_lon, src_lat) + + # Destination + dst_lat = np.array(dst_grid._lat['data'], dtype=np.float32) + dst_lon = np.array(dst_grid._lon['data'], dtype=np.float32) + + if isinstance(dst_grid, nes.PointsNes): + dst_lat = np.expand_dims(dst_grid._lat['data'], axis=0) + dst_lon = np.expand_dims(dst_grid._lon['data'], axis=0) + else: + # 1D to 2D coordinates + if len(dst_lon.shape) == 1: + dst_lon, dst_lat = np.meshgrid(dst_lon, dst_lat) + + # calculate N nearest neighbour inverse distance weights (and indices) + # from gridcells centres of model 1 to each gridcell centre of model 2 + # model geographic longitude/latitude coordinates are first converted + # to cartesian ECEF (Earth Centred, Earth Fixed) coordinates, before + # calculating distances. + + src_mod_xy = lon_lat_to_cartesian(src_lon.flatten(), src_lat.flatten()) + dst_mod_xy = lon_lat_to_cartesian(dst_lon.flatten(), dst_lat.flatten()) + + # generate KDtree using model 1 coordinates (i.e. the model grid you are + # interpolating from) + src_tree = spatial.cKDTree(src_mod_xy) + + # get n-neighbour nearest distances/indices (ravel form) of model 1 gridcell + # centres from each model 2 gridcell centre + + dists, idx = src_tree.query(dst_mod_xy, k=n_neighbours) + # self.nearest_neighbour_inds = \ + # np.column_stack(np.unravel_index(idx, lon.shape)) + + weight_matrix = dst_grid.copy() + weight_matrix.time = [datetime(year=2000, month=1, day=1, hour=0, second=0, microsecond=0)] + weight_matrix._time = [datetime(year=2000, month=1, day=1, hour=0, second=0, microsecond=0)] + weight_matrix.last_level = None + weight_matrix.first_level = 0 + weight_matrix.hours_start = 0 + weight_matrix.hours_end = 0 + + weight_matrix.set_communicator(MPI.COMM_SELF) + # take the reciprocals of the nearest neighbours distances + inverse_dists = np.reciprocal(dists) + inverse_dists_transf = inverse_dists.T.reshape((1, n_neighbours, dst_lon.shape[0], dst_lon.shape[1])) + weight_matrix.variables['inverse_dists'] = {'data': inverse_dists_transf, 'units': 'm'} + idx_transf = idx.T.reshape((1, n_neighbours, dst_lon.shape[0], dst_lon.shape[1])) + weight_matrix.variables['idx'] = {'data': idx_transf, 'units': ''} + weight_matrix.lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} + weight_matrix._lev = {'data': np.arange(inverse_dists_transf.shape[1]), 'units': ''} + return weight_matrix + + +def lon_lat_to_cartesian(lon, lat, radius=1.0): + """ + To calculate lon, lat coordinates of a point on a sphere + + Parameters + ---------- + lon : np.array + Longitude values + lat : np.array + Latitude values + radius : float + Radius of the sphere to get the distances. + """ + lon_r = np.radians(lon) + lat_r = np.radians(lat) + + x = radius * np.cos(lat_r) * np.cos(lon_r) + y = radius * np.cos(lat_r) * np.sin(lon_r) + z = radius * np.sin(lat_r) + + return np.column_stack([x, y, z]) diff --git a/nes/interpolation/vertical_interpolation.py b/nes/interpolation/vertical_interpolation.py new file mode 100644 index 0000000000000000000000000000000000000000..30eff96dd75ecd92ba2a2f6279a668ff04a72b8c --- /dev/null +++ b/nes/interpolation/vertical_interpolation.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python + +import sys +import nes +from scipy.interpolate import interp1d +import numpy as np +from copy import copy + + +def add_4d_vertical_info(self, info_to_add): + """ + To add the vertical information from other source. + + Parameters + ---------- + self : nes.Nes + Source Nes object + info_to_add : nes.Nes, str + Nes object with the vertical information as variable or str with the path to the NetCDF file that contains + the vertical data. + """ + vertical_var = list(self.concatenate(info_to_add)) + self.vertical_var_name = vertical_var[0] + + return None + + +def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): + """ + Vertical interpolation method + + Parameters + ---------- + self : Nes + Source Nes object + new_levels : list + List of new vertical levels + new_src_vertical + kind : str + Vertical interpolation type. + extrapolate : None, tuple, str + Extrapolate method (for non linear operations) + info: None, bool + Indicates if you want to print extra information + """ + if info is None: + info = self.print_info + + if new_src_vertical is not None: + self.add_4d_vertical_info(new_src_vertical) + + nz_new = len(new_levels) + + if self.vertical_var_name is None: + # To use current level data + for var_name, var_info in self.variables.items(): + if var_info['data'] is None: + self.load(var_name) + if var_name != self.vertical_var_name: + if info and self.master: + print("\t{var} vertical interpolation".format(var=var_name)) + sys.stdout.flush() + src_data = np.flip(var_info['data'], axis=1) + nt, nz, ny, nx = src_data.shape + dst_data = np.ma.masked_all((nt, nz_new, ny, nx)) + + for t in range(nt): + if info and self.master: + print('\t\tTime step: {0} ({1}/{2}))'.format(self.time[t], t + 1, nt)) + sys.stdout.flush() + for j in range(ny): + for i in range(nx): + if extrapolate is None: + fill_value = (np.float64(src_data[t, 0, j, i]), np.float64(src_data[t, -1, j, i])) + else: + fill_value = extrapolate + try: + # f = interp1d(np.array(self.lev['data'], dtype=np.float64), + # np.array(src_data[t, :, j, i], dtype=np.float64), + # kind=kind, + # bounds_error=False, + # fill_value=fill_value) + # dst_data[t, :, j, i] = np.array(f(new_levels), dtype=np.float32) + if kind == 'linear': + dst_data[t, :, j, i] = np.array( + np.interp(new_levels, + np.array(self.lev['data'], dtype=np.float64), + np.array(src_data[t, :, j, i], dtype=np.float64)), + dtype=src_data.dtype) + else: + dst_data[t, :, j, i] = np.array( + interp1d(np.array(self.lev['data'], dtype=np.float64), + np.array(src_data[t, :, j, i], dtype=np.float64), + kind=kind, + bounds_error=False, + fill_value=fill_value)(new_levels), + dtype=src_data.dtype) + except Exception as e: + print("time lat lon", t, j, i) + print("***********************") + print("LEVELS", self.lev['data']) + print("VAR", src_data[t, :, j, i]) + print("+++++++++++++++++++++++") + raise Exception(str(e)) + self.variables[var_name]['data'] = copy(dst_data) + # Updating level information + self.lev['data'] = new_levels + self._lev['data'] = new_levels + # raise NotImplementedError('Vertical information with no 4D vertical data is not implemented') + else: + src_levels = self.variables[self.vertical_var_name]['data'] + if self.vertical_var_name == 'layer_thickness': + src_levels = np.cumsum(np.flip(src_levels, axis=1), axis=1) + else: + src_levels = np.flip(src_levels, axis=1) + for var_name, var_info in self.variables.items(): + if var_info['data'] is None: + self.load(var_name) + if var_name != self.vertical_var_name: + if info and self.master: + print("\t{var} vertical interpolation".format(var=var_name)) + sys.stdout.flush() + src_data = np.flip(var_info['data'], axis=1) + nt, nz, ny, nx = src_data.shape + dst_data = np.empty((nt, nz_new, ny, nx), dtype=np.float32) + + for t in range(nt): + # if info and self.rank == self.size - 1: + if self.print_info and self.master: + print('\t\t{3} time step {0} ({1}/{2}))'.format(self.time[t], t+1, nt, var_name)) + sys.stdout.flush() + for j in range(ny): + for i in range(nx): + # level_array = None + # nl = src_levels[t, 0, j, i] - src_levels[t, 1, j, i] + # if nl > 0: + # level_max = np.max(src_levels[t, :, j, i]) + # level_array = np.asarray(new_levels) + # level_array = level_array.astype('float32') + # level_array = np.where(level_array > level_max, level_max, level_array) + # if nl < 0: + # level_min = np.min(src_levels[t, :, j, i]) + # level_array = np.asarray(new_levels) + # level_array = level_array.astype('float32') + # level_array = np.where(level_array < level_min, level_min, level_array) + curr_level_values = src_levels[t, :, j, i] + try: + # check if all values are identical or masked + if ((isinstance(curr_level_values, np.ndarray) and + (curr_level_values == curr_level_values[0]).all()) or + (isinstance(curr_level_values, np.ma.core.MaskedArray) and + curr_level_values.mask.all())): + kind = 'slinear' + else: + kind = kind # 'cubic' + if extrapolate is None: + fill_value = (np.float64(src_data[t, 0, j, i]), np.float64(src_data[t, -1, j, i])) + else: + fill_value = extrapolate + + if kind == 'linear': + dst_data[t, :, j, i] = np.array( + np.interp(new_levels, + np.array(src_levels[t, :, j, i], dtype=np.float64), + np.array(src_data[t, :, j, i], dtype=np.float64)), + dtype=src_data.dtype) + else: + dst_data[t, :, j, i] = np.array( + interp1d(np.array(src_levels[t, :, j, i], dtype=np.float64), + np.array(src_data[t, :, j, i], dtype=np.float64), + kind=kind, + bounds_error=False, + fill_value=fill_value)(new_levels), + dtype=src_data.dtype) + except Exception as e: + print("time lat lon", t, j, i) + print("***********************") + print("LEVELS", np.array(src_levels[t, :, j, i], dtype=np.float64)) + print("DATA", np.array(src_data[t, :, j, i], dtype=np.float64)) + print("METHOD", kind) + print("FILL_VALUE", fill_value) + print("+++++++++++++++++++++++") + raise Exception(str(e)) + # if level_array is not None: + # dst_data[t, :, j, i] = np.array(f(level_array), dtype=np.float32) + + self.variables[var_name]['data'] = copy(dst_data) + # print(self.variables[var_name]['data']) + + # Updating level information + new_lev_info = {'data': new_levels} + for var_attr, attr_info in self.variables[self.vertical_var_name].items(): + if var_attr not in ['data', 'dimensions', 'crs', 'grid_mapping']: + new_lev_info[var_attr] = copy(attr_info) + self.lev = new_lev_info + self._lev = new_lev_info + self.free_vars(self.vertical_var_name) + self.vertical_var_name = None + + return None diff --git a/nes/load_nes.py b/nes/load_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..6d43b4d0841581d3d602a52274d86d37cd636c10 --- /dev/null +++ b/nes/load_nes.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python + +import os +from mpi4py import MPI +from netCDF4 import Dataset +import warnings + +from .nc_projections import * + + +def open_netcdf(path, comm=None, xarray=False, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, + first_level=0, last_level=None, balanced=False): + """ + Open a netCDF file + + Parameters + ---------- + path : str + Path to the NetCDF file to read + comm : MPI.COMM + MPI communicator to use in that netCDF. Default: MPI.COMM_WORLD + xarray : bool + (Not working) Indicates if you want to use xarray. Default: False + info : bool + Indicates if you want to print (stdout) the reading/writing steps + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default: 'Y' (over Y axis) + accepted values: ['X', 'Y', 'T'] + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + + Returns + ------- + Nes + Nes object. Variables read in lazy mode (only metadata) + """ + if comm is None: + comm = MPI.COMM_WORLD + else: + comm = comm + + if not os.path.exists(path): + raise FileNotFoundError(path) + if xarray: + dataset = None + else: + # dataset = Dataset(path, format="NETCDF4", mode='r', parallel=False) + if comm.Get_size() == 1: + dataset = Dataset(path, format="NETCDF4", mode='r', parallel=False) + else: + dataset = Dataset(path, format="NETCDF4", mode='r', parallel=True, comm=comm, info=MPI.Info()) + if __is_rotated(dataset): + # Rotated grids + nessy = RotatedNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + elif __is_points(dataset): + if parallel_method == 'Y': + warnings.warn("Parallel method cannot be 'Y' to create points NES. Setting it to 'X'") + parallel_method = 'X' + if __is_points_ghost(dataset): + # Points - GHOST + nessy = PointsNesGHOST(comm=comm, dataset=dataset, xarray=xarray, info=info, + parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + else: + # Points - non-GHOST + nessy = PointsNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + elif __is_lcc(dataset): + # Lambert conformal conic grids + nessy = LCCNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + elif __is_mercator(dataset): + # Mercator grids + nessy = MercatorNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + else: + # Regular grids + nessy = LatLonNes(comm=comm, dataset=dataset, xarray=xarray, info=info, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=False, balanced=balanced,) + + return nessy + + +def __is_rotated(dataset): + """ + Check if the netCDF is in rotated pole projection or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object + + Returns + ------- + value : bool + Indicated if the netCDF is a rotated one + """ + if 'rotated_pole' in dataset.variables.keys(): + return True + else: + return False + + +def __is_points(dataset): + """ + Check if the netCDF is a points dataset in non-GHOST format or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object + + Returns + ------- + value : bool + Indicated if the netCDF is a points non-GHOST one + """ + if 'station' in dataset.dimensions: + return True + else: + return False + + +def __is_points_ghost(dataset): + """ + Check if the netCDF is a points dataset in GHOST format or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object + + Returns + ------- + value : bool + Indicated if the netCDF is a points GHOST one + """ + if 'N_flag_codes' in dataset.dimensions and 'N_qa_codes' in dataset.dimensions: + return True + else: + return False + + +def __is_lcc(dataset): + """ + Check if the netCDF is in Lambert Conformal Conic (LCC) projection or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object + + Returns + ------- + value : bool + Indicated if the netCDF is a LCC one + """ + if 'Lambert_conformal' in dataset.variables.keys(): + return True + else: + return False + + +def __is_mercator(dataset): + """ + Check if the netCDF is in Mercator projection or not. + + Parameters + ---------- + dataset : Dataset + netcdf4-python opened dataset object + + Returns + ------- + value : bool + Indicated if the netCDF is a Mercator one + """ + if 'mercator' in dataset.variables.keys(): + return True + else: + return False + + +def concatenate_netcdfs(nessy_list, comm=None, info=False, parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, + first_level=0, last_level=None, balanced=False): + """ + Concatenate variables form different sources + + Parameters + ---------- + nessy_list : list + List of Nes objects or list of paths to concatenate + comm : MPI.Communicator + info : bool + Indicates if you want to print (stdout) the reading/writing steps + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default: 'Y' (over Y axis) + accepted values: ['X', 'Y', 'T'] + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + + Returns + ------- + Nes + Nes object with all the variables + """ + if not isinstance(nessy_list, list): + raise AttributeError("You must pass a list of NES objects or paths.") + + if isinstance(nessy_list[0], str): + nessy_first = open_netcdf(nessy_list[0], + comm=comm, + parallel_method=parallel_method, + info=info, + avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, + first_level=first_level, + last_level=last_level, + balanced=balanced + ) + nessy_first.load() + else: + nessy_first = nessy_list[0] + for i, aux_nessy in enumerate(nessy_list[1:]): + if isinstance(aux_nessy, str): + aux_nessy = open_netcdf(aux_nessy, + comm=comm, + parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, + first_level=first_level, + last_level=last_level, + balanced=balanced + ) + nessy_first.concatenate(aux_nessy) + return nessy_first diff --git a/nes/nc_projections/__init__.py b/nes/nc_projections/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..384265791dea5ec6fb4dcada082558f858e5628c --- /dev/null +++ b/nes/nc_projections/__init__.py @@ -0,0 +1,7 @@ +from .default_nes import Nes +from .latlon_nes import LatLonNes +from .rotated_nes import RotatedNes +from .points_nes import PointsNes +from .points_nes_ghost import PointsNesGHOST +from .lcc_nes import LCCNes +from .mercator_nes import MercatorNes diff --git a/nes/nc_projections/default_nes.py b/nes/nc_projections/default_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..25280237464d9ede43933dddff8f7dcb5a01599e --- /dev/null +++ b/nes/nc_projections/default_nes.py @@ -0,0 +1,2077 @@ +#!/usr/bin/env python + +import sys +import os +import warnings +import numpy as np +import datetime +from xarray import open_dataset +from netCDF4 import Dataset, num2date, date2num +from mpi4py import MPI +from cfunits import Units +from numpy.ma.core import MaskError +from copy import deepcopy +import datetime +from ..interpolation import vertical_interpolation +from ..interpolation import horizontal_interpolation + + +class Nes(object): + """ + + Attributes + ---------- + comm : MPI.Communicator + rank : int + MPI rank + master : bool + True when rank == 0 + size : int + Size of the communicator + + print_info : bool + Indicates if you want to print reading/writing info + is_xarray : bool + (Not working) Indicates if you want to use xarray as default + __ini_path : str + Path to the original file to read when open_netcdf is called + hours_start : int + Number of hours to avoid from the first original values + hours_end : int + Number of hours to avoid from the last original values + dataset : xr.Dataset + (not working) xArray Dataset + netcdf : Dataset + netcdf4-python Dataset + variables : dict + Variables information. + The variables are stored in a dictionary with the var_name as key and another dictionary with the information. + The information dictionary contains the 'data' key with None (if the variable is not loaded) or the array values + and the other keys are the variable attributes or description. + _time : list + Complete list of original time step values. + _lev : dict + Vertical level dictionary with the complete 'data' key for all the values and the rest of the attributes. + _lat : dict + Latitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. + _lon _ dict + Longitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. + parallel_method : str + Parallel method to read/write. + Can be chosen any of the following axis to parallelize: 'T', 'Y' or 'X' + read_axis_limits : dict + Dictionary with the 4D limits of the rank data to read. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + write_axis_limits : dict + Dictionary with the 4D limits of the rank data to write. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + time : list + List of time steps of the rank data. + lev : dict + Vertical levels dictionary with the portion of 'data' corresponding to the rank values. + lat : dict + Latitudes dictionary with the portion of 'data' corresponding to the rank values. + lon : dict + Longitudes dictionary with the portion of 'data' corresponding to the rank values. + global_attrs : dict + Global attributes with the attribute name as key and data as values. + _var_dim : None, tuple + Tuple with the name of the Y and X dimensions for the variables + _lat_dim : None, tuple + Tuple with the name of the dimensions of the Latitude values + _lon_dim : None, tuple + Tuple with the name of the dimensions of the Longitude values + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + balanced=False, times=None, **kwargs): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if ypu want to create the object from scratch (True) or trough an existen file. + times : list, None + List of times to substitute the current ones while creation. + kwargs : + Projection dependent parameters to create it from scratch + """ + + # MPI Initialization + if comm is None: + self.comm = MPI.COMM_WORLD + else: + self.comm = comm + self.rank = self.comm.Get_rank() + self.master = self.rank == 0 + self.size = self.comm.Get_size() + + # General info + self.print_info = info + self.is_xarray = xarray + self.__ini_path = path + + # Selecting info + self.hours_start = avoid_first_hours + self.hours_end = avoid_last_hours + self.first_level = first_level + self.last_level = last_level + self.balanced = balanced + + # Define parallel method + self.parallel_method = parallel_method + + # NetCDF object + if create_nes: + + self.netcdf = None + self.dataset = None + + # Initialize variables + self.variables = {} + + # Complete dimensions + self._time = times + self._time_bnds = self.__get_time_bnds(create_nes) + self._lev = {'data': np.array([0]), + 'units': '', + 'positive': 'up'} + self._lat, self._lon = self._create_centroids(**kwargs) + + # Set axis limits for parallel reading + self.read_axis_limits = self.get_read_axis_limits() + + # Dimensions screening + self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + self.time_bnds = self._time_bnds + self.lev = deepcopy(self._lev) + + # Set NetCDF attributes + self.global_attrs = self.__get_global_attributes(create_nes) + + else: + + if dataset is not None: + if self.is_xarray: + self.dataset = dataset + self.netcdf = None + else: + self.dataset = None + self.netcdf = dataset + elif self.__ini_path is not None: + if self.is_xarray: + self.dataset = self.__open_dataset() + self.netcdf = None + else: + self.dataset = None + self.netcdf = self.__open_netcdf4() + + # Lazy variables + self.variables = self._get_lazy_variables() + + # Complete dimensions + self._time = self.__get_time() + self._time_bnds = self.__get_time_bnds() + self._lev = self._get_coordinate_dimension(['lev', 'level', 'lm', 'plev']) + self._lat = self._get_coordinate_dimension(['lat', 'latitude']) + self._lon = self._get_coordinate_dimension(['lon', 'longitude']) + + # Set axis limits for parallel reading + self.read_axis_limits = self.get_read_axis_limits() + + # Dimensions screening + self.time = self._time[self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + self.time_bnds = self._time_bnds + self.lev = self._get_coordinate_values(self._lev, 'Z') + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + # Set NetCDF attributes + self.global_attrs = self.__get_global_attributes() + + # Writing options + self.zip_lvl = 0 + + # Dimensions information + self._var_dim = None + self._lat_dim = None + self._lon_dim = None + + self.vertical_var_name = None + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, + parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if ypu want to create the object from scratch (True) or trough an existen file. + """ + new = Nes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, first_level=first_level, + last_level=last_level, create=create_nes, balanced=balanced) + return new + + def __del__(self): + """ + To delete the Nes object and close all the opened Datasets + """ + self.close() + for var_name, var_info in self.variables.items(): + del var_info['data'] + del self.variables + try: + del self.time + del self._time + del self.time_bnds + del self._time_bnds + del self.lev + del self._lev + del self.lat + del self._lat + del self.lon + del self._lon + except AttributeError: + pass + + del self + + return None + + def __getstate__(self): + """ + Read the CSV file that contains all the Reduce variable specifications. + + Returns + ------- + state : dict + Dictionary with the class parameters + """ + d = self.__dict__ + state = {k: d[k] for k in d if k not in ['comm', 'variables', 'netcdf']} + + return state + + def __setstate__(self, state): + """ + Set the state of the class + + Parameters + ---------- + state: dict + Dictionary with the class parameters + """ + self.__dict__ = state + + return None + + def copy(self, copy_vars=False): + """ + Copy the Nes object. + + The copy will avoid to copy the communicator, dataset and variables by default + + Parameters + ---------- + copy_vars: bool + Indicates if you want to copy the variables (in lazy mode) + + Returns + ------- + nessy : Nes + Copy of the Nes object + """ + nessy = deepcopy(self) + nessy.netcdf = None + if copy_vars: + nessy.variables = nessy._get_lazy_variables() + else: + nessy.variables = {} + + return nessy + + def get_full_times(self): + return self._time + + def get_full_levels(self): + return self._lev + + def clear_communicator(self): + """ + Erase the communicator and the parallelization indexes. + """ + self.comm = None + self.rank = 0 + self.master = 0 + self.size = 0 + + return None + + def set_communicator(self, comm): + """ + Set a new communicator and the correspondent parallelization indexes + + Parameters + ---------- + comm: MPI.COMM + Communicator to be set + """ + self.comm = comm + self.rank = self.comm.Get_rank() + self.master = self.rank == 0 + self.size = self.comm.Get_size() + + self.read_axis_limits = self.get_read_axis_limits() + self.write_axis_limits = self.get_write_axis_limits() + return None + + def set_levels(self, levels): + """ + Modify the original level values with new ones. + + Parameters + ---------- + levels : dict + Dictionary with the new level information to be set + """ + self._lev = deepcopy(levels) + self.lev = deepcopy(levels) + + return None + + def set_time_bnds(self, time_bnds): + """ + Modify the original time bounds values with new ones. + + Parameters + ---------- + time_bnds : list + List with the new time bounds information to be set + """ + correct_format = True + for time_bnd in np.array(time_bnds).flatten(): + if not isinstance(time_bnd, datetime.datetime): + print("{0} is not a datetime object".format(time_bnd)) + correct_format = False + if correct_format: + if len(self._time) == len(time_bnds): + self._time_bnds = deepcopy(time_bnds) + self.time_bnds = deepcopy(time_bnds) + else: + msg = "WARNING!!! " + msg += "The given time bounds list has a different length than the time array. " + msg += "(time:{0}, bnds:{1}). Time bounds will not be set.".format(len(self._time), len(time_bnds)) + warnings.warn(msg) + else: + msg = 'WARNING!!! ' + msg += 'There is at least one element in the time bounds to be set that is not a datetime object. ' + msg += 'Time bounds will not be set.' + warnings.warn(msg) + + return None + + def free_vars(self, var_list): + """ + Erase the selected variables from the variables information. + + Parameters + ---------- + var_list : list, str + List (or single string) of the variables to be loaded + """ + if isinstance(var_list, str): + var_list = [var_list] + + if self.is_xarray: + self.dataset = self.dataset.drop_vars(var_list) + self.variables = self._get_lazy_variables() + else: + for var_name in var_list: + if self.variables is not None: + if var_name in self.variables: + del self.variables[var_name] + + return None + + def keep_vars(self, var_list): + """ + Keep the selected variables and erases the rest. + + Parameters + ---------- + var_list : list, str + List (or single string) of the variables to be loaded + """ + if isinstance(var_list, str): + var_list = [var_list] + + to_remove = list(set(self.variables.keys()).difference(set(var_list))) + + self.free_vars(to_remove) + + return None + + def get_time_interval(self): + """ + Calculate the interrval of hours between time steps + + Returns + ------- + int + Number of hours between time steps + """ + time_interval = self._time[1] - self._time[0] + time_interval = int(time_interval.seconds // 3600) + + return time_interval + + def sel_time(self, time, copy=False): + """ + To select only one time step + + Parameters + ---------- + time : datetime.datetime + Time stamp to select + copy : bool + Indicates if you want a copy with the selected time step (True) or to modify te existen one (False) + + Returns + ------- + Nes + Nes object with the data (and metadata) of the selected time step + """ + if copy: + aux_nessy = self.copy(copy_vars=False) + aux_nessy.comm = self.comm + else: + aux_nessy = self + + aux_nessy.hours_start = 0 + aux_nessy.hours_end = 0 + + idx_time = aux_nessy.time.index(time) + + aux_nessy.time = [self.time[idx_time]] + aux_nessy._time = aux_nessy.time + for var_name, var_info in self.variables.items(): + if copy: + aux_nessy.variables[var_name] = {} + for att_name, att_value in var_info.items(): + if att_name == 'data': + if att_value is None: + raise ValueError("{} data not loaded".format(var_name)) + aux_nessy.variables[var_name][att_name] = att_value[[idx_time]] + else: + aux_nessy.variables[var_name][att_name] = att_value + else: + aux_nessy.variables[var_name]['data'] = aux_nessy.variables[var_name]['data'][[idx_time]] + return aux_nessy + + # ================================================================================================================== + # Statistics + # ================================================================================================================== + + def last_time_step(self): + """ + Modify variables to keep only the last time step + """ + if self.parallel_method == 'T': + raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") + aux_time = self._time[0].replace(hour=0, minute=0, second=0, microsecond=0) + self._time = [aux_time] + self.time = [aux_time] + + for var_name, var_info in self.variables.items(): + if var_info['data'] is None: + self.load(var_name) + aux_data = var_info['data'][-1, :] + if len(aux_data.shape) == 3: + aux_data = aux_data.reshape((1, aux_data.shape[0], aux_data.shape[1], aux_data.shape[2])) + self.variables[var_name]['data'] = aux_data + self.hours_start = 0 + self.hours_end = 0 + + return None + + def daily_statistic(self, op, type_op='calendar'): + """ + Calculate daily statistic + + Parameters + ---------- + op : str + Statistic to perform. Accepted values: "max", "mean" and "min" + type_op : str + Type of statistic to perform. Accepted values: "calendar", "alltsteps", and "withoutt0" + - "calendar": Calculate the statistic using the time metadata. + It will avoid single time step by day calculations + - "alltsteps": Calculate a single time statistic with all the time steps. + - "withoutt0": Calculate a single time statistic with all the time steps avoiding the first one. + """ + if self.parallel_method == 'T': + raise NotImplementedError("Statistics are not implemented on time axis paralelitation method.") + time_interval = self.get_time_interval() + if type_op == 'calendar': + aux_time_bounds = [] + aux_time = [] + day_list = [date_aux.day for date_aux in self.time] + for var_name, var_info in self.variables.items(): + if var_info['data'] is None: + self.load(var_name) + stat_data = None + for day in np.unique(day_list): + idx_first = next(i for i, val in enumerate(day_list, 0) if val == day) + idx_last = len(day_list) - next(i for i, val in enumerate(reversed(day_list), 1) if val == day) + if idx_first != idx_last: # To avoid single time step statistic + if idx_last != len(day_list): + if op == 'mean': + data_aux = var_info['data'][idx_first:idx_last + 1, :, :, :].mean(axis=0) + elif op == 'max': + data_aux = var_info['data'][idx_first:idx_last + 1, :, :, :].max(axis=0) + elif op == 'min': + data_aux = var_info['data'][idx_first:idx_last + 1, :, :, :].min(axis=0) + else: + raise NotImplementedError("Statistic operation '{0}' is not implemented.".format(op)) + aux_time_bounds.append([self.time[idx_first], self.time[idx_last]]) + else: + if op == 'mean': + data_aux = var_info['data'][idx_first:, :, :, :].mean(axis=0) + elif op == 'max': + data_aux = var_info['data'][idx_first:, :, :, :].max(axis=0) + elif op == 'min': + data_aux = var_info['data'][idx_first:, :, :, :].min(axis=0) + else: + raise NotImplementedError("Statistic operation '{0}' is not implemented.".format(op)) + aux_time_bounds.append([self.time[idx_first], self.time[-1]]) + + data_aux = data_aux.reshape((1, data_aux.shape[0], data_aux.shape[1], data_aux.shape[2])) + aux_time.append(self.time[idx_first].replace(hour=0, minute=0, second=0)) + # Append over time dimension + if stat_data is None: + stat_data = data_aux.copy() + else: + stat_data = np.vstack([stat_data, data_aux]) + self.variables[var_name]['data'] = stat_data + self.variables[var_name]['cell_methods'] = "time: {0} (interval: {1}hr)".format(op, time_interval) + self.time = aux_time + self._time = self.time + + self.set_time_bnds(aux_time_bounds) + + elif type_op == 'alltsteps': + for var_name, var_info in self.variables.items(): + if var_info['data'] is None: + self.load(var_name) + if op == 'mean': + aux_data = var_info['data'].mean(axis=0) + elif op == 'max': + aux_data = var_info['data'].max(axis=0) + elif op == 'min': + aux_data = var_info['data'].min(axis=0) + else: + raise NotImplementedError("Statistic operation '{0}' is not implemented.".format(op)) + if len(aux_data.shape) == 3: + aux_data = aux_data.reshape((1, aux_data.shape[0], aux_data.shape[1], aux_data.shape[2])) + self.variables[var_name]['data'] = aux_data + self.variables[var_name]['cell_methods'] = "time: {0} (interval: {1}hr)".format(op, time_interval) + + aux_time = self.time[0].replace(hour=0, minute=0, second=0, microsecond=0) + aux_time_bounds = [[self.time[0], self.time[-1]]] + self.time = [aux_time] + self._time = self.time + + self.set_time_bnds(aux_time_bounds) + + elif type_op == 'withoutt0': + for var_name, var_info in self.variables.items (): + if var_info['data'] is None: + self.load(var_name) + if op == 'mean': + aux_data = var_info['data'][1:, :].mean(axis=0) + elif op == 'max': + aux_data = var_info['data'][1:, :].max(axis=0) + elif op == 'min': + aux_data = var_info['data'][1:, :].min(axis=0) + else: + raise NotImplementedError("Statistic operation '{0}' is not implemented.".format(op)) + if len(aux_data.shape) == 3: + aux_data = aux_data.reshape((1, aux_data.shape[0], aux_data.shape[1], aux_data.shape[2])) + self.variables[var_name]['data'] = aux_data + self.variables[var_name]['cell_methods'] = "time: {0} (interval: {1}hr)".format(op, time_interval) + aux_time = self._time[1].replace(hour=0, minute=0, second=0, microsecond=0) + aux_time_bounds = [[self._time[1], self._time[-1]]] + self.time = [aux_time] + self._time = self.time + + self.set_time_bnds(aux_time_bounds) + else: + raise NotImplementedError("Statistic operation type '{0}' is not implemented.".format(type_op)) + self.hours_start = 0 + self.hours_end = 0 + + return None + + # ================================================================================================================== + # Reading + # ================================================================================================================== + def get_read_axis_limits(self): + """ + Calculate the 4D reading axis limits depending on if them have to balanced or not. + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to read. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + if self.balanced: + return self.get_read_axis_limits_balanced() + else: + return self.get_read_axis_limits_unbalanced() + + def get_read_axis_limits_unbalanced(self): + """ + Calculate the 4D reading axis limits + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to read. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + axis_limits = {'x_min': None, 'x_max': None, + 'y_min': None, 'y_max': None, + 'z_min': None, 'z_max': None, + 't_min': None, 't_max': None} + + if self.parallel_method == 'Y': + y_len = self._lat['data'].shape[0] + if y_len < self.size: + raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format(self.size, y_len)) + axis_limits['y_min'] = (y_len // self.size) * self.rank + if self.rank + 1 < self.size: + axis_limits['y_max'] = (y_len // self.size) * (self.rank + 1) + # Spin up + axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) + axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + elif self.parallel_method == 'X': + x_len = self._lon['data'].shape[-1] + if x_len < self.size: + raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format(self.size, x_len)) + axis_limits['x_min'] = (x_len // self.size) * self.rank + if self.rank + 1 < self.size: + axis_limits['x_max'] = (x_len // self.size) * (self.rank + 1) + # Spin up + axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) + axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + elif self.parallel_method == 'T': + first_time_idx = self.get_time_id(self.hours_start, first=True) + last_time_idx = self.get_time_id(self.hours_end, first=False) + t_len = last_time_idx - first_time_idx + if t_len < self.size: + raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format(self.size, t_len)) + axis_limits['t_min'] = ((t_len // self.size) * self.rank) + first_time_idx + if self.rank + 1 < self.size: + axis_limits['t_max'] = ((t_len // self.size) * (self.rank + 1)) + first_time_idx + + else: + raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + + # Vertical levels selection: + axis_limits['z_min'] = self.first_level + if self.last_level == -1 or self.last_level is None: + self.last_level = None + elif self.last_level +1 == len(self._lev['data']): + self.last_level = None + else: + self.last_level += 1 + axis_limits['z_max'] = self.last_level + + return axis_limits + + def get_read_axis_limits_balanced(self): + """ + Calculate the 4D reading balanced axis limits + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to read. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + fid_dist = {} + to_add = None + if self.parallel_method == 'Y': + len_to_split = self._lat['data'].shape[0] + if len_to_split < self.size: + raise IndexError('More processors (size={0}) selected than Y elements (size={1})'.format( + self.size, len_to_split)) + min_axis = 'y_min' + max_axis = 'y_max' + elif self.parallel_method == 'X': + len_to_split = self._lon['data'].shape[-1] + if len_to_split < self.size: + raise IndexError('More processors (size={0}) selected than X elements (size={1})'.format( + self.size, len_to_split)) + min_axis = 'x_min' + max_axis = 'x_max' + elif self.parallel_method == 'T': + first_time_idx = self.get_time_id(self.hours_start, first=True) + last_time_idx = self.get_time_id(self.hours_end, first=False) + len_to_split = last_time_idx - first_time_idx + if len_to_split < self.size: + raise IndexError('More processors (size={0}) selected than T elements (size={1})'.format( + self.size, len_to_split)) + min_axis = 't_min' + max_axis = 't_max' + to_add = first_time_idx + else: + raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + + procs_len = len_to_split // self.size + procs_rows_extended = len_to_split - (procs_len * self.size) + + rows_sum = 0 + for proc in range(self.size): + fid_dist[proc] = {'x_min': None, 'x_max': None, + 'y_min': None, 'y_max': None, + 'z_min': None, 'z_max': None, + 't_min': None, 't_max': None} + if proc < procs_rows_extended: + aux_rows = procs_len + 1 + else: + aux_rows = procs_len + + len_to_split -= aux_rows + if len_to_split < 0: + rows = len_to_split + aux_rows + else: + rows = aux_rows + + fid_dist[proc][min_axis] = rows_sum + fid_dist[proc][max_axis] = rows_sum + rows + + if to_add is not None: + fid_dist[proc][min_axis] += to_add + fid_dist[proc][max_axis] += to_add + + # # Last element + # if len_to_split == 0 and to_add == 0: + # fid_dist[proc][max_axis] = None + + rows_sum += rows + + axis_limits = fid_dist[self.rank] + + if self.parallel_method != 'T': + # Spin up + axis_limits['t_min'] = self.get_time_id(self.hours_start, first=True) + axis_limits['t_max'] = self.get_time_id(self.hours_end, first=False) + + # Vertical levels selection: + axis_limits['z_min'] = self.first_level + if self.last_level == -1 or self.last_level is None: + self.last_level = None + elif self.last_level + 1 == len(self._lev['data']): + self.last_level = None + else: + self.last_level += 1 + axis_limits['z_max'] = self.last_level + + return axis_limits + + def get_time_id(self, hours, first=True): + """ + Get the index of the corresponding time value. + + Parameters + ---------- + hours : int + Number of hours to avoid + first : bool + Indicates if you want to avoid from the first hours (True) or from the last (False) + Default: True + + Returns + ------- + int + Index of the time array + """ + from datetime import timedelta + + if first: + idx = self._time.index(self._time[0] + timedelta(hours=hours)) + else: + idx = self._time.index(self._time[-1] - timedelta(hours=hours)) + 1 + + return idx + + def open(self): + """ + Open the NetCDF + """ + if self.is_xarray: + self.dataset = self.__open_dataset() + self.netcdf = None + else: + self.dataset = None + self.netcdf = self.__open_netcdf4() + + return None + + def __open_dataset(self): + """ + Open the NetCDF with xarray + + Returns + ------- + dataset : xr.Dataset + Opened dataset + """ + if self.master: + warnings.filterwarnings('ignore') # Disabling warnings while reading MONARCH original file + dataset = open_dataset(self.__ini_path, decode_coords='all') + warnings.filterwarnings('default') # Re-activating warnings + else: + dataset = None + dataset = self.comm.bcast(dataset, root=0) + self.dataset = dataset + + return dataset + + def __open_netcdf4(self, mode='r'): + """ + Open the NetCDF with netcdf4-python + + Parameters + ---------- + mode : str + Inheritance from mode parameter from https://unidata.github.io/netcdf4-python/#Dataset.__init__ + Default: 'r' (read-only) + Returns + ------- + netcdf : Dataset + Opened dataset + """ + if self.size == 1: + netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=False) + else: + netcdf = Dataset(self.__ini_path, format="NETCDF4", mode=mode, parallel=True, comm=self.comm, + info=MPI.Info()) + self.netcdf = netcdf + + return netcdf + + def close(self): + """ + Close the NetCDF with netcdf4-python + """ + if self.netcdf is not None: + self.netcdf.close() + self.netcdf = None + + return None + + def __get_dates_from_months(self, time, units, calendar): + """ + Calculates the number of days since the first date + in the 'time' list and store in new list: + This is useful when the units are 'months since', + which cannot be transformed to dates using num2date + + Parameter + --------- + time: list + Original time + units: str + CF compliant time units + calendar: str + Original calendar + + Returns + ------- + time: list + CF compliant time + """ + + start_date_str = time.units.split('since')[1].lstrip() + start_date = datetime.datetime(int(start_date_str[0:4]), + int(start_date_str[5:7]), + int(start_date_str[8:10])) + new_time = [] + + for current_date in time: + + # Transform current_date into number of days since base date + current_date = num2date(current_date, self.__parse_time_unit(units), calendar=calendar) + + # Calculate number of days between base date and the other dates + n_days = (current_date - start_date).days + + # Store in list + new_time.append(n_days) + + return new_time + + def __parse_time(self, time): + """ + Parses the time to be CF compliant + + Parameters + ---------- + time: str + Original time + + Returns + ------- + time : str + CF compliant time + """ + + units = time.units + if not hasattr(time, 'calendar'): + calendar = 'standard' + else: + calendar = time.calendar + + if 'months since' in time.units: + units = 'days since ' + time.units.split('since')[1].lstrip() + time = self.__get_dates_from_months(time, units, calendar) + + return time, units, calendar + + @staticmethod + def __parse_time_unit(t_units): + """ + Parses the time units to be CF compliant + + Parameters + ---------- + t_units : str + Original time units + + Returns + ------- + t_units : str + CF compliant time units + """ + if 'h @' in t_units: + t_units = 'hour since {0}-{1}-{2} {3}:{4}:{5} UTC'.format( + t_units[4:8], t_units[8:10], t_units[10:12], t_units[13:15], t_units[15:17], t_units[17:-4]) + + return t_units + + def __get_time(self): + """ + Get the NetCDF file time values + + Returns + ------- + time : list + List of times (datetime.datetime) of the NetCDF data + """ + if self.is_xarray: + time = self.variables['time'] + else: + if self.master: + nc_var = self.netcdf.variables['time'] + nc_var, units, calendar = self.__parse_time(nc_var) + time = num2date(nc_var[:], self.__parse_time_unit(units), calendar=calendar) + time = [aux.replace(second=0, microsecond=0) for aux in time] + else: + time = None + time = self.comm.bcast(time, root=0) + self.free_vars('time') + + return time + + def __get_time_bnds(self, create_nes=False): + """ + Get the NetCDF time bounds values + + Parameters + ---------- + create_nes : bool + Indicated if the object is created from scratch or from an existing file + + Returns + ------- + time : list + List of time bounds (datetime) of the NetCDF data + """ + if self.is_xarray: + time_bnds = self.variables['time_bnds'] + else: + if self.master: + if not create_nes: + if 'time_bnds' in self.netcdf.variables.keys(): + time = self.netcdf.variables['time'] + nc_var = self.netcdf.variables['time_bnds'] + time_bnds = num2date(nc_var[:], self.__parse_time_unit(time.units), calendar=time.calendar).tolist() + else: + time_bnds = None + else: + time_bnds = None + else: + time_bnds = None + time_bnds = self.comm.bcast(time_bnds, root=0) + self.free_vars('time_bnds') + return time_bnds + + def _get_coordinate_dimension(self, possible_names): + """ + Read the coordinate dimension data. + + This will read the complete data of the coordinate + + Parameters + ---------- + possible_names: list, str + List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) + + Returns + ------- + nc_var : dict + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + """ + if isinstance(possible_names, str): + possible_names = [possible_names] + + try: + dimension_name = set(possible_names).intersection(set(self.variables.keys())).pop() + if self.is_xarray: + nc_var = self.dataset[dimension_name] + else: + nc_var = self.variables[dimension_name].copy() + nc_var['data'] = self.netcdf.variables[dimension_name][:] + if hasattr(nc_var, 'units'): + if nc_var['units'] in ['unitless', '-']: + nc_var['units'] = '' + self.free_vars(dimension_name) + except KeyError: + nc_var = {'data': np.array([0]), + 'units': '' + } + + return nc_var + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['Z', 'Y', 'X'] + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank + """ + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'Y': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['y_min']:self.read_axis_limits['y_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + elif coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + elif coordinate_axis == 'Z': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['z_min']:self.read_axis_limits['z_max']] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + + return values + + def _get_lazy_variables(self): + """ + Get all the variables information. + + Returns + ------- + variables : dict + Dictionary with the variable name as key and another dictionary as value. + De value dictionary will have the 'data' key with None as value and all the variable attributes as the + other keys. + e.g. + {'var_name_1': {'data': None, 'attr_1': value_1_1, 'attr_2': value_1_2, ...}, + 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, + ...} + """ + if self.is_xarray: + variables = self.dataset.variables + else: + if self.master: + variables = {} + for var_name, var_info in self.netcdf.variables.items(): + variables[var_name] = {} + variables[var_name]['data'] = None + variables[var_name]['dimensions'] = var_info.dimensions + + for attrname in var_info.ncattrs(): + # Avoiding some attributes + if attrname not in ['missing_value', '_FillValue']: + value = getattr(var_info, attrname) + if value in ['unitless', '-']: + value = '' + variables[var_name][attrname] = value + else: + variables = None + variables = self.comm.bcast(variables, root=0) + + return variables + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Read data in 4 dimensions + if len(var_dims) < 2: + data = nc_var[:] + elif len(var_dims) == 2: + data = nc_var[self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + data = data.reshape(1, 1, data.shape[-2], data.shape[-1]) + elif len(var_dims) == 3: + data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + data = data.reshape(data.shape[-3], 1, data.shape[-2], data.shape[-1]) + elif len(var_dims) == 4: + data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['z_min']:self.read_axis_limits['z_max'], + self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + # elif len(var_dims) == 5: + # data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + # :, + # self.read_axis_limits['z_min']:self.read_axis_limits['z_max'], + # self.read_axis_limits['y_min']:self.read_axis_limits['y_max'], + # self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 4 dimensions or less'.format( + var_name)) + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def load(self, var_list=None): + """ + Load of the selected variables. + + That function will fill the variable 'data' key with the corresponding values. + + Parameters + ---------- + var_list : list, str + List (or single string) of the variables to be loaded + """ + if self.netcdf is None: + self.__open_dataset() + close = True + else: + close = False + + if isinstance(var_list, str): + var_list = [var_list] + elif var_list is None: + var_list = list(self.variables.keys()) + + for i, var_name in enumerate(var_list): + if self.print_info: + print("Rank {0:03d}: Loading {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(var_list))) + if self.variables[var_name]['data'] is None: + self.variables[var_name]['data'] = self._read_variable(var_name) + if self.print_info: + print("Rank {0:03d}: Loaded {1} var ({2})".format( + self.rank, var_name, self.variables[var_name]['data'].shape)) + + if close: + self.close() + + return None + + def to_dtype(self, data_type='float32'): + for var_name, var_info in self.variables.items(): + if var_info['data'] is not None: + self.variables[var_name]['data'] = self.variables[var_name]['data'].astype(data_type) + return None + + def concatenate(self, aux_nessy): + """ + Concatenate different variables into the same nes object + + Parameters + ---------- + aux_nessy : Nes, str + Nes object or str with the path to the NetCDF file that contains the variables to add. + + Returns + ------- + list + List of var names added + """ + if isinstance(aux_nessy, str): + aux_nessy = self.new(path=aux_nessy, comm=self.comm, parallel_method=self.parallel_method, + xarray=self.is_xarray, + avoid_first_hours=self.hours_start, avoid_last_hours=self.hours_end, + first_level=self.first_level, last_level=self.last_level) + new = True + else: + new = False + for var_name, var_info in aux_nessy.variables.items(): + if var_info['data'] is None: + aux_nessy.load(var_name) + + new_vars_added = [] + for new_var_name, new_var_data in aux_nessy.variables.items(): + if new_var_name not in self.variables.keys(): + self.variables[new_var_name] = deepcopy(new_var_data) + new_vars_added.append(new_var_name) + + if new: + del aux_nessy + + return new_vars_added + + def __get_global_attributes(self, create_nes=False): + """ + Read the netcdf global attributes + + Parameters + ---------- + create_nes : bool + Indicated if the object is created from scratch or from an existing file + + Returns + ------- + gl_attrs : dict + Dictionary with the netCDF global attributes + """ + gl_attrs = {} + if self.is_xarray: + gl_attrs = self.dataset.attrs + else: + if not create_nes: + for attrname in self.netcdf.ncattrs(): + gl_attrs[attrname] = getattr(self.netcdf, attrname) + + return gl_attrs + + # ================================================================================================================== + # Writing + # ================================================================================================================== + def get_write_axis_limits(self): + """ + Calculate the 4D writing axis limits depending on if them have to balanced or not. + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to write. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + if self.balanced: + return self.get_write_axis_limits_balanced() + else: + return self.get_write_axis_limits_unbalanced() + + def get_write_axis_limits_unbalanced(self): + """ + Calculate the 4D writing axis limits + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to write. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + axis_limits = {'x_min': None, 'x_max': None, + 'y_min': None, 'y_max': None, + 'z_min': None, 'z_max': None, + 't_min': None, 't_max': None} + + if self.parallel_method == 'Y': + y_len = self._lat['data'].shape[0] + axis_limits['y_min'] = (y_len // self.size) * self.rank + if self.rank + 1 < self.size: + axis_limits['y_max'] = (y_len // self.size) * (self.rank + 1) + elif self.parallel_method == 'X': + x_len = self._lon['data'].shape[-1] + axis_limits['x_min'] = (x_len // self.size) * self.rank + if self.rank + 1 < self.size: + axis_limits['x_max'] = (x_len // self.size) * (self.rank + 1) + elif self.parallel_method == 'T': + t_len = len(self._time) + axis_limits['t_min'] = ((t_len // self.size) * self.rank) + if self.rank + 1 < self.size: + axis_limits['t_max'] = (t_len // self.size) * (self.rank + 1) + else: + raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + + return axis_limits + + def get_write_axis_limits_balanced(self): + """ + Calculate the 4D reading balanced axis limits + + Returns + ------- + dict + Dictionary with the 4D limits of the rank data to read. + t_min, t_max, z_min, z_max, y_min, y_max, x_min and x_max + """ + fid_dist = {} + if self.parallel_method == 'Y': + len_to_split = self._lat['data'].shape[0] + min_axis = 'y_min' + max_axis = 'y_max' + elif self.parallel_method == 'X': + len_to_split = self._lon['data'].shape[-1] + min_axis = 'x_min' + max_axis = 'x_max' + elif self.parallel_method == 'T': + len_to_split = len(self._time) + min_axis = 't_min' + max_axis = 't_max' + else: + raise NotImplementedError("Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + + procs_len = len_to_split // self.size + procs_rows_extended = len_to_split - (procs_len * self.size) + + rows_sum = 0 + for proc in range(self.size): + fid_dist[proc] = {'x_min': None, 'x_max': None, + 'y_min': None, 'y_max': None, + 'z_min': None, 'z_max': None, + 't_min': None, 't_max': None} + if proc < procs_rows_extended: + aux_rows = procs_len + 1 + else: + aux_rows = procs_len + + len_to_split -= aux_rows + if len_to_split < 0: + rows = len_to_split + aux_rows + else: + rows = aux_rows + + fid_dist[proc][min_axis] = rows_sum + fid_dist[proc][max_axis] = rows_sum + rows + + # Last element + if len_to_split == 0: + fid_dist[proc][max_axis] = None + + rows_sum += rows + + axis_limits = fid_dist[self.rank] + + return axis_limits + + def _create_dimensions(self, netcdf): + """ + Create the 'lev' and 'time' dimension. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + """ + netcdf.createDimension('time', None) + if self._time_bnds is not None: + netcdf.createDimension('time_nv', 2) + netcdf.createDimension('lev', len(self.lev['data'])) + netcdf.createDimension('lon', len(self._lon['data'])) + netcdf.createDimension('lat', len(self._lat['data'])) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'lev' and 'time' variables. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + """ + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + time_var.units = 'hours since {0}'.format( + self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) + time_var.standard_name = "time" + time_var.calendar = 'standard' + time_var.long_name = "time" + if self._time_bnds is not None: + time_var.bounds = 'time_bnds' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + # TIME BOUNDS + if self._time_bnds is not None: + time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, + complevel=self.zip_lvl) + if self.size > 1: + time_bnds_var.set_collective(True) + time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') + + # LEVELS + lev = netcdf.createVariable('lev', np.float64, ('lev',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + if 'units' in self._lev.keys(): + lev.units = Units(self._lev['units'], formatted=True).units + else: + lev.units = '' + lev.positive = 'up' + if self.size > 1: + lev.set_collective(True) + lev[:] = self._lev['data'] + + # LATITUDES + lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if self.size > 1: + lats.set_collective(True) + lats[:] = self._lat['data'] + + # LONGITUDES + lons = netcdf.createVariable('lon', np.float64, self._lon_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if self.size > 1: + lons.set_collective(True) + lons[:] = self._lon['data'] + + return None + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + chunking : bool + Indicates if you want to chunk the output netCDF + """ + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + if self.print_info: + print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, len(self.variables))) + try: + if not chunking: + var = netcdf.createVariable(var_name, var_dict['data'].dtype, ('time', 'lev',) + self._var_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + else: + if self.balanced: + raise NotImplementedError("A balanced data cannot be chunked.") + if self.master: + chunk_size = var_dict['data'].shape + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dict['data'].dtype, ('time', 'lev',) + self._var_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl, chunksizes=chunk_size) + if self.print_info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.print_info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if self.print_info: + print("Rank {0:03d}: Filling {1})".format(self.rank, var_name)) + try: + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['z_min']:self.write_axis_limits['z_max'], + self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except ValueError: + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + 0, + self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + # msg = "*WARNING* '{0}' variable is a 3D field. Setting it on first (0) layer.".format( + # var_name) + # warn(msg) + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['z_min']:self.write_axis_limits['z_max'], + self.write_axis_limits['y_min']:self.write_axis_limits['y_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + if self.print_info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.print_info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def _create_centroids(self): + """ + Must be implemented on inner class + """ + return None + + def _create_metadata(self, netcdf): + """ + Must be implemented on inner class + """ + return None + + def _set_crs(self, netcdf): + """ + Must be implemented on inner class + + Parameters + ---------- + netcdf : Dataset + netcdf4-python Dataset + """ + return None + + @staticmethod + def _set_var_crs(var): + """ + Must be implemented on inner class + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + return None + + def __to_netcdf_py(self, path, chunking=False): + """ + Create the NetCDF using netcdf4-python methods + + Parameters + ---------- + path : str + Path to the output netCDF file. + chunking: bool + Indicates if you want to chunk the output netCDF + """ + # Open NetCDF + if self.print_info: + print("Rank {0:03d}: Creating {1}".format(self.rank, path)) + if self.size > 1: + netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=True, comm=self.comm, info=MPI.Info()) + else: + netcdf = Dataset(path, format="NETCDF4", mode='w', parallel=False) + if self.print_info: + print("Rank {0:03d}: NetCDF ready to write".format(self.rank)) + # Create Dimensions + self._create_dimensions(netcdf) + # Create dimension variables + self._create_dimension_variables(netcdf) + if self.print_info: + print("Rank {0:03d}: Dimensions done".format(self.rank)) + + # Create variables + self._create_variables(netcdf, chunking=chunking) + + # Create metadata + self._create_metadata(netcdf) + + # Close NetCDF + if self.global_attrs is not None: + for att_name, att_value in self.global_attrs.items(): + netcdf.setncattr(att_name, att_value) + netcdf.setncattr('Conventions', 'CF-1.7') + + netcdf.close() + + return None + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file + + Parameters + ---------- + path : str + Path to the output netCDF file + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression) + serial : bool + Indicates if you want to write in serial or not. Default: False + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False + """ + old_info = self.print_info + self.print_info = info + + self.zip_lvl = compression_level + if self.is_xarray: + raise NotImplementedError("Writing with xarray not implemented") + else: + # if serial: + if serial and self.size > 1: + data = self._gather_data() + if self.master: + new_nc = self.copy(copy_vars=False) + new_nc.set_communicator(MPI.COMM_SELF) + new_nc.variables = data + new_nc.__to_netcdf_py(path) + + else: + self.__to_netcdf_py(path, chunking=chunking) + + self.print_info = old_info + + return None + + def __to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Private method to write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + from eccodes import codes_grib_new_from_file + from eccodes import codes_keys_iterator_new + from eccodes import codes_keys_iterator_next + from eccodes import codes_keys_iterator_get_name + from eccodes import codes_get_string + from eccodes import codes_keys_iterator_delete + from eccodes import codes_clone + from eccodes import codes_set + from eccodes import codes_set_values + from eccodes import codes_write + from eccodes import codes_release + + fout = open(path, 'wb') + + # read template + fin = open(grib_template_path, 'rb') + + gid = codes_grib_new_from_file(fin) + if gid is None: + sys.exit(1) + + iterid = codes_keys_iterator_new(gid, 'ls') + while codes_keys_iterator_next(iterid): + keyname = codes_keys_iterator_get_name(iterid) + keyval = codes_get_string(gid, keyname) + if info: + print("%s = %s" % (keyname, keyval)) + + codes_keys_iterator_delete(iterid) + for var_name, var_info in self.variables.items(): + for i_time, time in enumerate(self.time): + for i_lev, lev in enumerate(self.lev['data']): + clone_id = codes_clone(gid) + + # Adding grib2 keys to file + for key, value in grib_keys.items(): + if key not in ['typeOfFirstFixedSurface', 'level']: + if info: + print('key:', key, 'val:', value, 'type:', type(value)) + codes_set(clone_id, key, value) + # codes_set(clone_id, key, value) + + # Level dependent keys + if 'typeOfFirstFixedSurface' in grib_keys.keys(): + if float(lev) == 0: + codes_set(clone_id, 'typeOfFirstFixedSurface', 1) + # grib_keys['typeOfFirstFixedSurface'] = 1 + else: + codes_set(clone_id, 'typeOfFirstFixedSurface', 103) + # grib_keys['typeOfFirstFixedSurface'] = 103 + if 'level' in grib_keys.keys(): + codes_set(clone_id, 'level', float(lev)) + # grib_keys['level'] = float(lev) + + # # Adding grib2 keys to file + # for key, value in grib_keys.items(): + # print('key:', key, 'val:', value, 'type:', type(value)) + # codes_set(clone_id, key, value) + + # newval = vardata[step, nlev].round(int(keys['decimalPrecision'])) + newval = var_info['data'][i_time, i_lev] + newval = np.flipud(newval) + # newval = newval.reshape(newval.shape[-1], newval.shape[-2])[::-1, :] + # print(newval.dtype, newval) + codes_set_values(clone_id, newval.ravel()) + # print('write') + codes_write(clone_id, fout) + codes_release(gid) + fout.close() + fin.close() + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + + # if serial: + if self.parallel_method in ['X', 'Y'] and self.size > 1: + data = self._gather_data() + if self.master: + new_nc = self.copy(copy_vars=False) + new_nc.set_communicator(MPI.COMM_SELF) + new_nc.variables = data + new_nc.__to_grib2(path, grib_keys, grib_template_path, info=info) + else: + self.__to_grib2(path, grib_keys, grib_template_path, info=info) + return None + + def __gather_data_py_object(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + data_list = deepcopy(self.variables) + for var_name in data_list.keys(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + add_dimension = False # to Add a dimension + if self.parallel_method == 'Y': + if shp_len == 2: + # if is a 2D concatenate over first axis + axis = 0 + elif shp_len == 3: + # if is a 3D concatenate over second axis + axis = 1 + else: + # if is a 4D concatenate over third axis + axis = 2 + elif self.parallel_method == 'X': + if shp_len == 2: + # if is a 2D concatenate over second axis + axis = 1 + elif shp_len == 3: + # if is a 3D concatenate over third axis + axis = 2 + else: + # if is a 4D concatenate over forth axis + axis = 3 + elif self.parallel_method == 'T': + if shp_len == 2: + # if is a 2D add dimension + add_dimension = True + axis = None # Not used + elif shp_len == 3: + # if is a 3D concatenate over first axis + axis = 0 + else: + # if is a 4D concatenate over second axis + axis = 0 + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + if add_dimension: + data_list[var_name]['data'] = np.stack(data_aux) + else: + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + data_list = deepcopy(self.variables) + for var_name in data_list.keys(): + if self.print_info and self.master: + print("Gathering {0}".format(var_name)) + shp_len = len(data_list[var_name]['data'].shape) + try: + # Collect local array sizes using the high-level mpi4py gather + rank_shapes = np.array(self.comm.gather(data_list[var_name]['data'].shape, root=0)) + sendbuf = data_list[var_name]['data'].flatten() + sendcounts = np.array(self.comm.gather(len(sendbuf), root=0)) + if self.master: + recvbuf = np.empty(sum(sendcounts), dtype=type(sendbuf[0])) + else: + recvbuf = None + self.comm.Gatherv(sendbuf=sendbuf, recvbuf=(recvbuf, sendcounts), root=0) + if self.master: + recvbuf = np.split(recvbuf, np.cumsum(sendcounts)) + # TODO ask + # I don't understand why it is giving one more split + if len(recvbuf) > len(sendcounts): + recvbuf = recvbuf[:-1] + for i, shape in enumerate(rank_shapes): + recvbuf[i] = recvbuf[i].reshape(shape) + add_dimension = False # to Add a dimension + if self.parallel_method == 'Y': + if shp_len == 2: + # if is a 2D concatenate over first axis + axis = 0 + elif shp_len == 3: + # if is a 3D concatenate over second axis + axis = 1 + else: + # if is a 4D concatenate over third axis + axis = 2 + elif self.parallel_method == 'X': + if shp_len == 2: + # if is a 2D concatenate over second axis + axis = 1 + elif shp_len == 3: + # if is a 3D concatenate over third axis + axis = 2 + else: + # if is a 4D concatenate over forth axis + axis = 3 + elif self.parallel_method == 'T': + if shp_len == 2: + # if is a 2D add dimension + add_dimension = True + axis = None # Not used + elif shp_len == 3: + # if is a 3D concatenate over first axis + axis = 0 + else: + # if is a 4D concatenate over second axis + axis = 0 + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'Y', 'T'])) + if add_dimension: + data_list[var_name]['data'] = np.stack(recvbuf) + else: + data_list[var_name]['data'] = np.concatenate(recvbuf, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + + # ================================================================================================================== + # Extra Methods + # ================================================================================================================== + def add_4d_vertical_info(self, info_to_add): + """ + To add the vertical information from other source. + + Parameters + ---------- + self : nes.Nes + + info_to_add : nes.Nes, str + Nes object with the vertical information as variable or str with the path to the NetCDF file that contains + the vertical data. + """ + return vertical_interpolation.add_4d_vertical_info(self, info_to_add) + + def interpolate_vertical(self, new_levels, new_src_vertical=None, kind='linear', extrapolate=None, info=None): + """ + Vertical interpolation method + + Parameters + ---------- + self : Nes + Source Nes object + + new_levels : list + List of new vertical levels + + new_src_vertical + + kind : str + Vertical interpolation type. + + extrapolate : None, tuple, str + Extrapolate method (for non linear operations) + + info: None, bool + Indicates if you want to print extra information + + """ + return vertical_interpolation.interpolate_vertical( + self, new_levels, new_src_vertical=new_src_vertical, kind=kind, extrapolate=extrapolate, info=info) + + def interpolate_horizontal(self, dst_grid, weight_matrix_path=None, kind='NearestNeighbour', n_neighbours=4, + info=False): + """ + Horizontal interpolation from the current grid to another one. + + Parameters + ---------- + dst_grid : nes.Nes + Final projection Nes object + weight_matrix_path : str, None + Path to the weight matrix to read/create + kind : str + Kind of horizontal interpolation. choices = ['NearestNeighbour'] + n_neighbours: int + Used if kind == NearestNeighbour. Number of nearest neighbours to interpolate. default = 4 + info: bool + Indicates if you want to print extra info during the interpolation process. + """ + return horizontal_interpolation.interpolate_horizontal( + self, dst_grid, weight_matrix_path=weight_matrix_path, kind=kind, n_neighbours=n_neighbours, info=info) diff --git a/nes/nc_projections/latlon_nes.py b/nes/nc_projections/latlon_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..b24796e66c648dbd8878bd73f6da69a86e5325a7 --- /dev/null +++ b/nes/nc_projections/latlon_nes.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python + +import numpy as np +from .default_nes import Nes + + +class LatLonNes(Nes): + """ + + Attributes + ---------- + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('lat', 'lon') for a regular latitude-longitude projection. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('lat',) for a regular latitude-longitude projection. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('lon',) for a regular latitude-longitude projection. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + balanced=False, times=None, **kwargs): + """ + Initialize the LatLonNes class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + super(LatLonNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, balanced=balanced, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + if create_nes: + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + self._var_dim = ('lat', 'lon') + self._lat_dim = ('lat',) + self._lon_dim = ('lon',) + + self.free_vars('crs') + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, + parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + """ + new = LatLonNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def _create_centroids(self, **kwargs): + """ + Calculate center latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Calculate center latitudes + lat_c_orig = kwargs['lat_orig'] + (kwargs['inc_lat'] / 2) + self.center_lats = np.linspace( + lat_c_orig, lat_c_orig + (kwargs['inc_lat'] * (kwargs['n_lat'] - 1)), kwargs['n_lat']) + + # Calculate center longitudes + lon_c_orig = kwargs['lon_orig'] + (kwargs['inc_lon'] / 2) + self.center_lons = np.linspace( + lon_c_orig, lon_c_orig + (kwargs['inc_lon'] * (kwargs['n_lon'] - 1)), kwargs['n_lon']) + + return {'data': self.center_lats}, {'data': self.center_lons} + + def _create_bounds(self, **kwargs): + + # This function is not being used + spatial_nv = 2 + boundary_lats = self.create_bounds(self.center_lats, kwargs['inc_lat'], spatial_nv) + boundary_lons = self.create_bounds(self.center_lons, kwargs['inc_lon'], spatial_nv) + + return boundary_lats, boundary_lons + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping to 'crs'. + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + var.grid_mapping = 'crs' + + return None + + def _create_metadata(self, netcdf): + """ + Create the 'crs' variable for the rotated latitude longitude grid_mapping. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python Dataset + """ + + mapping = netcdf.createVariable('crs', 'c') + # mapping = netcdf.createVariable('crs', 'i') + mapping.grid_mapping_name = "latitude_longitude" + mapping.semi_major_axis = 6371000.0 + mapping.inverse_flattening = 0 + + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + return super(LatLonNes, self).to_grib2(path, grib_keys, grib_template_path, info=info) diff --git a/nes/nc_projections/lcc_nes.py b/nes/nc_projections/lcc_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..6bf09f4c6de858bfeb89cfc430b876577edf9626 --- /dev/null +++ b/nes/nc_projections/lcc_nes.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python + +import numpy as np +from cfunits import Units +from pyproj import Proj +from .default_nes import Nes + + +class LCCNes(Nes): + """ + + Attributes + ---------- + _y : dict + Y coordinates dictionary with the complete 'data' key for all the values and the rest of the attributes. + _x : dict + X coordinates dictionary with the complete 'data' key for all the values and the rest of the attributes. + y : dict + Y coordinates dictionary with the portion of 'data' corresponding to the rank values. + x : dict + X coordinates dictionary with the portion of 'data' corresponding to the rank values. + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('y', 'x',) for a LCC projection. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('y', 'x',) for a LCC projection. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('y', 'x') for a LCC projection. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + balanced=False, times=None, **kwargs): + """ + Initialize the LCCNes class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + super(LCCNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, balanced=balanced, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + if create_nes: + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + else: + # Complete dimensions + self._y = self._get_coordinate_dimension('y') + self._x = self._get_coordinate_dimension('x') + + # Dimensions screening + self.y = self._get_coordinate_values(self._y, 'Y') + self.x = self._get_coordinate_values(self._x, 'X') + + # Get projection details + self.projection_data = self.get_projection_data(create_nes, **kwargs) + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + self._var_dim = ('y', 'x') + self._lat_dim = ('y', 'x') + self._lon_dim = ('y', 'x') + + self.free_vars('crs') + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, + parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + """ + new = LCCNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def get_projection_data(self, create_nes, **kwargs): + """ + Read the projection data + + Returns + ------- + projection : dict + Dictionary with the projection data + """ + + if create_nes: + projection = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'lambert_conformal_conic', + 'standard_parallel': [kwargs['lat_2'], kwargs['lat_1']], + 'longitude_of_central_meridian': kwargs['lon_0'], + 'latitude_of_projection_origin': kwargs['lat_0'], + } + + else: + projection = self.variables['Lambert_conformal'] + self.free_vars('Lambert_conformal') + + return projection + + def _create_dimensions(self, netcdf): + """ + Create the 'y', 'x' dimensions and the super dimensions ('lev', 'time'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + super(LCCNes, self)._create_dimensions(netcdf) + + netcdf.createDimension('y', len(self._y['data'])) + netcdf.createDimension('x', len(self._x['data'])) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'y' and 'x' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(LCCNes, self)._create_dimension_variables(netcdf) + + # LCC Y COORDINATES + y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) + y.long_name = "y coordinate of projection" + if 'units' in self._y.keys(): + y.units = Units(self._y['units'], formatted=True).units + else: + y.units = 'm' + y.standard_name = "projection_y_coordinate" + if self.size > 1: + y.set_collective(True) + y[:] = self._y['data'] + + # LCC X COORDINATES + x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) + x.long_name = "x coordinate of projection" + if 'units' in self._x.keys(): + x.units = Units(self._x['units'], formatted=True).units + else: + x.units = 'm' + x.standard_name = "projection_x_coordinate" + if self.size > 1: + x.set_collective(True) + x[:] = self._x['data'] + + return None + + def _create_centroids(self, **kwargs): + """ + Calculate center latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Create a regular grid in metres (1D) + self._x = {'data': np.linspace(kwargs['x_0'] + (kwargs['inc_x'] / 2), + kwargs['x_0'] + (kwargs['inc_x'] / 2) + + (kwargs['inc_x'] * (kwargs['nx'] - 1)), kwargs['nx'], + dtype=np.float)} + self._y = {'data': np.linspace(kwargs['y_0'] + (kwargs['inc_y'] / 2), + kwargs['y_0'] + (kwargs['inc_y'] / 2) + + (kwargs['inc_y'] * (kwargs['ny'] - 1)), kwargs['ny'], + dtype=np.float)} + + # Create a regular grid in metres (1D to 2D) + x = np.array([self._x['data']] * len(self._y['data'])) + y = np.array([self._y['data']] * len(self._x['data'])).T + + projection = Proj( + proj='lcc', + ellps='WGS84', + R=6370000.00, + lat_1=kwargs['lat_1'], + lat_2=kwargs['lat_2'], + lon_0=kwargs['lon_0'], + lat_0=kwargs['lat_0'], + to_meter=1, + x_0=0, + y_0=0, + a=6370000.00, + k_0=1.0) + + # Calculate center latitudes and longitudes (UTM to LCC) + self.center_lons, self.center_lats = projection(x, y, inverse=True) + + return {'data': self.center_lats}, {'data': self.center_lons} + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping to 'Lambert_conformal'. + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + var.grid_mapping = 'Lambert_conformal' + + return None + + def _create_metadata(self, netcdf): + """ + Create the 'crs' variable for the lamber conformal grid_mapping. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python Dataset + """ + + mapping = netcdf.createVariable('Lambert_conformal', 'c') + mapping.grid_mapping_name = self.projection_data['grid_mapping_name'] + mapping.standard_parallel = self.projection_data['standard_parallel'] + mapping.longitude_of_central_meridian = self.projection_data['longitude_of_central_meridian'] + mapping.latitude_of_projection_origin = self.projection_data['latitude_of_projection_origin'] + + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + raise NotImplementedError("Grib2 format cannot be write in a Lambert Conformal Conic projection.") diff --git a/nes/nc_projections/mercator_nes.py b/nes/nc_projections/mercator_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..37dd6fdb166091204a09b2799a4e33508c4805ec --- /dev/null +++ b/nes/nc_projections/mercator_nes.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python + +import numpy as np +from cfunits import Units +from pyproj import Proj +from nes.nc_projections.default_nes import Nes + + +class MercatorNes(Nes): + """ + + Attributes + ---------- + _y : dict + Y coordinates dictionary with the complete 'data' key for all the values and the rest of the attributes. + _x : dict + X coordinates dictionary with the complete 'data' key for all the values and the rest of the attributes. + y : dict + Y coordinates dictionary with the portion of 'data' corresponding to the rank values. + x : dict + X coordinates dictionary with the portion of 'data' corresponding to the rank values. + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('y', 'x') for a Mercator projection. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('y', 'x') for a Mercator projection. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('y', 'x') for a Mercator projection. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + balanced=False, times=None, **kwargs): + """ + Initialize the MercatorNes class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + super(MercatorNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, balanced=balanced, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + if create_nes: + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + else: + # Complete dimensions + self._y = self._get_coordinate_dimension('y') + self._x = self._get_coordinate_dimension('x') + + # Dimensions screening + self.y = self._get_coordinate_values(self._y, 'Y') + self.x = self._get_coordinate_values(self._x, 'X') + + # Get projection details + self.projection_data = self.get_projection_data(create_nes, **kwargs) + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + self._var_dim = ('y', 'x') + self._lat_dim = ('y', 'x') + self._lon_dim = ('y', 'x') + + self.free_vars('crs') + + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, + parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + """ + new = MercatorNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def get_projection_data(self, create_nes, **kwargs): + """ + Read the projection data + + Returns + ------- + projection : dict + Dictionary with the projection data + """ + + if create_nes: + projection = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'mercator', + 'standard_parallel': [kwargs['lat_ts']], # TODO: Check if True + 'longitude_of_projection_origin': kwargs['lon_0'], + } + + else: + projection = self.variables['mercator'] + self.free_vars('mercator') + + return projection + + def _create_dimensions(self, netcdf): + """ + Create the 'y', 'x' dimensions and the super dimensions ('lev', 'time'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + super(MercatorNes, self)._create_dimensions(netcdf) + + netcdf.createDimension('y', len(self._y['data'])) + netcdf.createDimension('x', len(self._x['data'])) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'y' and 'x' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(MercatorNes, self)._create_dimension_variables(netcdf) + + # MERCATOR Y COORDINATES + y = netcdf.createVariable('y', self._y['data'].dtype, ('y',)) + y.long_name = "y coordinate of projection" + if 'units' in self._y.keys(): + y.units = Units(self._y['units'], formatted=True).units + else: + y.units = 'm' + y.standard_name = "projection_y_coordinate" + if self.size > 1: + y.set_collective(True) + y[:] = self._y['data'] + + # MERCATOR X COORDINATES + x = netcdf.createVariable('x', self._x['data'].dtype, ('x',)) + x.long_name = "x coordinate of projection" + if 'units' in self._x.keys(): + x.units = Units(self._x['units'], formatted=True).units + else: + x.units = 'm' + x.standard_name = "projection_x_coordinate" + if self.size > 1: + x.set_collective(True) + x[:] = self._x['data'] + + return None + + def _create_centroids(self, **kwargs): + """ + Calculate center latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Create a regular grid in metres (1D) + self._x = {'data': np.linspace(kwargs['x_0'] + (kwargs['inc_x'] / 2), + kwargs['x_0'] + (kwargs['inc_x'] / 2) + + (kwargs['inc_x'] * (kwargs['nx'] - 1)), kwargs['nx'], + dtype=np.float)} + + + self._y = {'data': np.linspace(kwargs['y_0'] + (kwargs['inc_y'] / 2), + kwargs['y_0'] + (kwargs['inc_y'] / 2) + + (kwargs['inc_y'] * (kwargs['ny'] - 1)), kwargs['ny'], + dtype=np.float)} + + # Create a regular grid in metres (1D to 2D) + x = np.array([self._x['data']] * len(self._y['data'])) + y = np.array([self._y['data']] * len(self._x['data'])).T + + projection = Proj( + proj='merc', + a=6370000.00, + b=6370000.00, + lat_ts=kwargs['lat_ts'], + lon_0=kwargs['lon_0'], + ) + + # Calculate center latitudes and longitudes (UTM to Mercator) + self.center_lons, self.center_lats = projection(x, y, inverse=True) + + return {'data': self.center_lats}, {'data': self.center_lons} + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping to 'mercator'. + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + var.grid_mapping = 'mercator' + + return None + + def _create_metadata(self, netcdf): + """ + Create the 'crs' variable for the Mercator grid_mapping. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python Dataset + """ + + mapping = netcdf.createVariable('mercator', 'c') + mapping.grid_mapping_name = self.projection_data['grid_mapping_name'] + mapping.standard_parallel = self.projection_data['standard_parallel'] + mapping.longitude_of_projection_origin = self.projection_data['longitude_of_projection_origin'] + + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + raise NotImplementedError("Grib2 format cannot be write in a Mercator projection.") diff --git a/nes/nc_projections/points_nes.py b/nes/nc_projections/points_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..3dd99455534488d692299f711afba6fcedb68d9e --- /dev/null +++ b/nes/nc_projections/points_nes.py @@ -0,0 +1,635 @@ +#!/usr/bin/env python + +import sys +import warnings +from copy import deepcopy + +import numpy as np +from netCDF4 import Dataset, date2num, stringtochar +from numpy.ma.core import MaskError + +from .default_nes import Nes + + +class PointsNes(Nes): + """ + + Attributes + ---------- + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('lat', 'lon') for a points grid. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('lat',) for a points grid. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('lon',) for a points grid. + _station : tuple + Tuple with the name of the dimensions of the station values. + ('station',) for a points grid. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + times=None, strlen=75, **kwargs): + """ + Initialize the PointsNes class + + Parameters + ---------- + comm: MPI.Communicator + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset, None + NetCDF4-python Dataset to initialize the class + xarray: bool + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + + super(PointsNes, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + if create_nes: + # Complete dimensions + self._station = {'data': np.arange(len(self._lon['data']))} + + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'X') + self.lon = self._get_coordinate_values(self._lon, 'X') + self.station = deepcopy(self._station) + self.strlen = strlen + else: + self._station = self._get_coordinate_dimension(['station']) + self.station = self._get_coordinate_values(self._station, 'X') + + self.strlen = self._get_strlen() + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + self._var_dim = ('station',) + self._lat_dim = ('station',) + self._lon_dim = ('station',) + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, + parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'T'] + """ + new = PointsNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def _create_dimensions(self, netcdf): + """ + Create the 'lev', 'time_nv', 'station' dimensions. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + netcdf.createDimension('time', None) + if self._time_bnds is not None: + netcdf.createDimension('time_nv', 2) + + # The number of longitudes is equal to the number of stations + netcdf.createDimension('station', len(self._lon['data'])) + + if hasattr(self, 'strlen'): + if self.strlen is not None: + netcdf.createDimension('strlen', self.strlen) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'time', 'time_bnds' and 'station' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # TIMES + time_var = netcdf.createVariable('time', np.float64, ('time',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + time_var.units = 'hours since {0}'.format( + self._time[self.get_time_id(self.hours_start, first=True)].strftime("%Y-%m-%d %H:%M:%S")) + time_var.standard_name = "time" + time_var.calendar = 'standard' + time_var.long_name = "time" + if self._time_bnds is not None: + time_var.bounds = 'time_bnds' + if self.size > 1: + time_var.set_collective(True) + time_var[:] = date2num(self._time[self.get_time_id(self.hours_start, first=True): + self.get_time_id(self.hours_end, first=False)], + time_var.units, time_var.calendar) + + # TIME BOUNDS + if self._time_bnds is not None: + time_bnds_var = netcdf.createVariable('time_bnds', np.float64, ('time', 'time_nv',), zlib=self.zip_lvl, + complevel=self.zip_lvl) + if self.size > 1: + time_bnds_var.set_collective(True) + time_bnds_var[:] = date2num(self._time_bnds, time_var.units, calendar='standard') + + # STATIONS + stations = netcdf.createVariable('station', np.float64, ('station',), zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + stations.units = "" + stations.axis = "X" + stations.long_name = "" + stations.standard_name = "station" + if self.size > 1: + stations.set_collective(True) + stations[:] = self._station['data'] + + return None + + def _get_coordinate_dimension(self, possible_names): + """ + Read the coordinate dimension data. + + This will read the complete data of the coordinate + + Parameters + ---------- + possible_names: list, str + List (or single string) of the possible names of the coordinate (e.g. ['lat', 'latitude']) + + Returns + ------- + nc_var : dict + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + """ + + nc_var = super(PointsNes, self)._get_coordinate_dimension(possible_names) + + if isinstance(possible_names, str): + possible_names = [possible_names] + + if 'station' in possible_names: + nc_var['data'] = np.arange(len(self._lon['data'])) + + return nc_var + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X'] + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + + return values + + def _get_lazy_variables_not_used(self): + """ + Get all the variables information. + + Returns + ------- + variables : dict + Dictionary with the variable name as key and another dictionary as value. + De value dictionary will have the 'data' key with None as value and all the variable attributes as the + other keys. + e.g. + {'var_name_1': {'data': None, 'attr_1': value_1_1, 'attr_2': value_1_2, ...}, + 'var_name_2': {'data': None, 'attr_1': value_2_1, 'attr_2': value_2_2, ...}, + ...} + """ + + if self.is_xarray: + variables = self.dataset.variables + else: + if self.master: + variables = {} + for var_name, var_info in self.netcdf.variables.items(): + variables[var_name] = {} + variables[var_name]['data'] = None + # Remove strlen as a dimension + if 'strlen' in var_info.dimensions: + variables[var_name]['dimensions'] = tuple([dim for dim in var_info.dimensions + if dim != 'strlen']) + else: + variables[var_name]['dimensions'] = var_info.dimensions + + for attrname in var_info.ncattrs(): + # Avoiding some attributes + if attrname not in ['missing_value', '_FillValue']: + value = getattr(var_info, attrname) + if value in ['unitless', '-']: + value = '' + variables[var_name][attrname] = value + else: + variables = None + variables = self.comm.bcast(variables, root=0) + + return variables + + def _get_strlen(self): + """ + Read the string length dimension of some variables. + + Returns + ------- + int, None + String length. None means no string data + """ + + if 'strlen' in self.netcdf.dimensions: + strlen = self.netcdf.dimensions['strlen'].size + else: + strlen = None + + return strlen + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Read data in 1 or 2 dimensions + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + if 'strlen' in nc_var.dimensions: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], :] + data = np.array([''.join(i) for i in np.char.decode(data)]) + else: + data = nc_var[self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 2 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + chunking : bool + Indicates if you want to chunk the output netCDF + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + + if var_dict['data'] is not None: + + # Get dimensions when reading datasets + if 'dimensions' in var_dict.keys(): + # Get dimensions + var_dims = var_dict['dimensions'] + # Get dimensions when creating new datasets + else: + if len(var_dict['data'].shape) == 1: + # For data that depends only on station (e.g. station_code) + var_dims = self._var_dim + else: + # For data that is dependent on time and station (e.g. PM10) + var_dims = ('time',) + self._var_dim + + if var_dict['data'].dtype == np.str: + # Add strlen as a dimension if needed + var_dims += ('strlen',) + + # Convert list of strings to chars + try: + unicode_type = len(max(var_dict['data'], key=len)) + if ((var_dict['data'].dtype == np.dtype(' 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + # TODO: Change chunk size (add strlen) as tuple + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl, + chunksizes=chunk_size) + + if self.print_info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.print_info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + if len(att_value.shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 2: + if 'strlen' in var_dict['dimensions']: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], :].shape, + att_value.shape)) + else: + try: + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + if self.print_info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions', 'dtype']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.print_info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + data_list = deepcopy(self.variables) + for var_name, var_info in data_list.items(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + if self.parallel_method == 'X': + # concatenate over station + if shp_len == 1: + # dimensions = (station) + axis = 0 + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # dimensions = (station, strlen) + axis = 0 + else: + # dimensions = (time, station) + axis = 1 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + elif self.parallel_method == 'T': + # concatenate over time + if shp_len == 1: + # dimensions = (station) + axis = None + continue + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # dimensions = (station, strlen) + axis = None + continue + else: + # dimensions = (time, station) + axis = 0 + else: + raise NotImplementedError('The points NetCDF must only have surface values (without levels).') + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'T'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list + + def _create_centroids(self, **kwargs): + """ + Calculate center latitudes and longitudes from points. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Calculate center latitudes + self.center_lats = kwargs['lat'] + + # Calculate center longitudes + self.center_lons = kwargs['lon'] + + return {'data': self.center_lats}, {'data': self.center_lons} + + def _create_metadata(self, netcdf): + """ + Create metadata variables + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # LATITUDES + lats = netcdf.createVariable('lat', np.float64, self._lat_dim, zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lats.units = "degrees_north" + lats.axis = "Y" + lats.long_name = "latitude coordinate" + lats.standard_name = "latitude" + if self.size > 1: + lats.set_collective(True) + lats[:] = self._lat['data'] + + # LONGITUDES + lons = netcdf.createVariable('lon', np.float64, self._lon_dim, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + lons.units = "degrees_east" + lons.axis = "X" + lons.long_name = "longitude coordinate" + lons.standard_name = "longitude" + if self.size > 1: + lons.set_collective(True) + lons[:] = self._lon['data'] + + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + raise NotImplementedError("Grib2 format cannot be write with point data.") diff --git a/nes/nc_projections/points_nes_ghost.py b/nes/nc_projections/points_nes_ghost.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8ab4c75d4af6660b6fcc2eaf2dcf40b95c7f39 --- /dev/null +++ b/nes/nc_projections/points_nes_ghost.py @@ -0,0 +1,481 @@ +#!/usr/bin/env python + +import sys +import warnings +import numpy as np +from numpy.ma.core import MaskError +from copy import deepcopy +from .points_nes import PointsNes + + +class PointsNesGHOST(PointsNes): + """ + + Attributes + ---------- + _qa : tuple + Tuple with the name of the dimensions of the quality assurance (qa) flag values. + ('qa',) for a points grid. + _flag : tuple + Tuple with the name of the dimensions of the data flag values. + ('flag',) for a points grid. + """ + + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='X', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + times=None, **kwargs): + """ + Initialize the PointsNesGHOST class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if ypu want to create the object from scratch (True) or trough an existen file. + times : list, None + List of times to substitute the current ones while creation. + kwargs : + Projection dependent parameters to create it from scratch + """ + + super(PointsNesGHOST, self).__init__(comm=comm, path=path, info=info, dataset=dataset, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + self._flag = self._get_coordinate_dimension(['flag']) + self.flag = self._get_coordinate_values(self._flag, 'X') + + self._qa = self._get_coordinate_dimension(['qa']) + self.qa = self._get_coordinate_values(self._qa, 'X') + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create_nes=False, balanced=False, + parallel_method='X', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X'] + balanced : bool + Indicates if you want a balanced parallelization or not. Balanced dataset cannot be written in chunking mode + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + first_level : int + Index of the first level to use + last_level : int, None + Index of the last level to use. None if it is the last. + create_nes : bool + Indicates if ypu want to create the object from scratch (True) or trough an existen file.:q + + """ + new = PointsNesGHOST(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def _create_dimensions(self, netcdf): + """ + Create the 'N_flag_codes' and 'N_qa_codes' dimensions and the super dimensions ('time', 'station'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesGHOST, self)._create_dimensions(netcdf) + + netcdf.createDimension('N_flag_codes', self._flag['data'].shape[2]) + netcdf.createDimension('N_qa_codes', self._qa['data'].shape[2]) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'station' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(PointsNesGHOST, self)._create_dimension_variables(netcdf) + + # N FLAG CODES + flag = netcdf.createVariable('flag', np.int64, ('station', 'time', 'N_flag_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + flag.units = "" + flag.axis = "" + flag.long_name = "" + flag.standard_name = "flag" + if self.size > 1: + flag.set_collective(True) + flag[:] = self._flag['data'] + + # N QA CODES + qa = netcdf.createVariable('qa', np.int64, ('station', 'time', 'N_qa_codes',), + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + qa.units = "" + qa.axis = "" + qa.long_name = "" + qa.standard_name = "N_qa_codes" + if self.size > 1: + qa.set_collective(True) + qa[:] = self._qa['data'] + + self.free_vars(('flag', 'qa')) + + def erase_flags(self): + first_time_idx = self.get_time_id(self.hours_start, first=True) + last_time_idx = self.get_time_id(self.hours_end, first=False) + t_len = last_time_idx - first_time_idx + + self._qa['data'] = np.empty((len(self._lon['data']), t_len, 0)) + self._flag['data'] = np.empty((len(self._lon['data']), t_len, 0)) + return None + + def _get_coordinate_values(self, coordinate_info, coordinate_axis): + """ + Get the coordinate data of the current portion + + Parameters + ---------- + coordinate_info : dict, list + Dictionary with the 'data' key with the coordinate variable values. and the attributes as other keys. + coordinate_axis : str + Name of the coordinate to extract. Accepted values: ['X'] + Returns + ------- + values : dict + Dictionary with the portion of data corresponding to the rank + """ + + values = deepcopy(coordinate_info) + if isinstance(coordinate_info, list): + values = {'data': deepcopy(coordinate_info)} + coordinate_len = len(values['data'].shape) + + if coordinate_axis == 'X': + if coordinate_len == 1: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif coordinate_len == 2: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif coordinate_len == 3: + values['data'] = values['data'][self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], :] + else: + raise NotImplementedError("The coordinate has wrong dimensions: {dim}".format( + dim=values['data'].shape)) + + return values + + def _read_variable(self, var_name): + """ + Read the corresponding variable data according to the current rank. + + Parameters + ---------- + var_name : str + Name of the variable to read + + Returns + ------- + data: np.array + Portion of the variable data corresponding to the rank. + """ + + nc_var = self.netcdf.variables[var_name] + var_dims = nc_var.dimensions + + # Remove strlen (maximum number of characters that a string can have) from dimensions and join characters + if 'strlen' in nc_var.dimensions: + nc_var = np.array([''.join(i) for i in np.char.decode(nc_var[:].data)]) + var_dims = tuple([', '.join(dim for dim in var_dims if dim != 'strlen')]) + + # Read data in 1 or 2 dimensions + # TODO: Ask Dene why x, t instead of t, x + if len(var_dims) < 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max']] + elif len(var_dims) == 2: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max']] + elif len(var_dims) == 3: + data = nc_var[self.read_axis_limits['x_min']:self.read_axis_limits['x_max'], + self.read_axis_limits['t_min']:self.read_axis_limits['t_max'], + :] + else: + raise NotImplementedError('Error with {0}. Only can be read netCDF with 3 dimensions or less'.format( + var_name)) + + # Missing to nan + try: + data[data.mask == True] = np.nan + except (AttributeError, MaskError, ValueError): + pass + + return data + + def _create_variables(self, netcdf, chunking=False): + """ + Create the netCDF file variables + + Parameters + ---------- + netcdf : Dataset + netcdf4-python opened Dataset + chunking : bool + Indicates if you want to chunk the output netCDF + """ + + if self.variables is not None: + for i, (var_name, var_dict) in enumerate(self.variables.items()): + if var_dict['data'] is not None: + + # Define dimensions depending on the type of variable + if len(var_dict['data'].shape) == 1: + # Metadata + var_dims = self._var_dim + elif len(var_dict['data'].shape) == 2: + # Different from metadata (e.g. concentrations of pm10) + var_dims = self._var_dim + ('time',) + else: + # Flags and qa variables + if var_name == 'flag': + var_dims = self._var_dim + ('time', 'N_flag_codes',) + elif var_name == 'qa': + var_dims = self._var_dim + ('time', 'N_qa_codes',) + + # ESDAC iwahashi landform and other vars are given as objects, transform to strings + if var_dict['data'].dtype == np.dtype(object): + var_dtype = np.dtype(str) + else: + var_dtype = var_dict['data'].dtype + + if self.print_info: + print("Rank {0:03d}: Writing {1} var ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + + try: + if not chunking: + var = netcdf.createVariable(var_name, var_dtype, var_dims, + zlib=self.zip_lvl > 0, complevel=self.zip_lvl) + else: + if self.master: + chunk_size = var_dict['data'].shape + else: + chunk_size = None + chunk_size = self.comm.bcast(chunk_size, root=0) + var = netcdf.createVariable(var_name, var_dtype, var_dims, zlib=self.zip_lvl > 0, + complevel=self.zip_lvl, chunksizes=chunk_size) + + if self.print_info: + print("Rank {0:03d}: Var {1} created ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + if self.size > 1: + var.set_collective(True) + if self.print_info: + print("Rank {0:03d}: Var {1} collective ({2}/{3})".format( + self.rank, var_name, i + 1, len(self.variables))) + + for att_name, att_value in var_dict.items(): + if att_name == 'data': + print(att_value) + print(att_value.shape) + if len(att_value.shape) == 1: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 2: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max']].shape, + att_value.shape)) + elif len(att_value.shape) == 3: + try: + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :] = att_value + except IndexError: + raise IndexError("Different shapes. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + except ValueError: + raise ValueError("Axis limits cannot be accessed. out_shape={0}, data_shp={1}".format( + var[self.write_axis_limits['x_min']:self.write_axis_limits['x_max'], + self.write_axis_limits['t_min']:self.write_axis_limits['t_max'], + :].shape, + att_value.shape)) + + if self.print_info: + print("Rank {0:03d}: Var {1} data ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + elif att_name not in ['chunk_size', 'var_dims', 'dimensions']: + var.setncattr(att_name, att_value) + self._set_var_crs(var) + if self.print_info: + print("Rank {0:03d}: Var {1} completed ({2}/{3})".format(self.rank, var_name, i + 1, + len(self.variables))) + except Exception as e: + print("**ERROR** an error has occurred while writing the '{0}' variable".format(var_name)) + # print("**ERROR** an error hase occurred while writing the '{0}' variable".format(var_name), + # file=sys.stderr) + raise e + else: + msg = 'WARNING!!! ' + msg += 'Variable {0} was not loaded. It will not be written.'.format(var_name) + warnings.warn(msg) + + def to_netcdf(self, path, compression_level=0, serial=False, info=False, chunking=False): + """ + Write the netCDF output file + + Parameters + ---------- + path : str + Path to the output netCDF file + compression_level : int + Level of compression (0 to 9) Default: 0 (no compression) + serial : bool + Indicates if you want to write in serial or not. Default: False + info : bool + Indicates if you want to print the information of each writing step by stdout Default: False + chunking : bool + Indicates if you want a chunked netCDF output. Only available with non serial writes. Default: False + """ + + if not serial: + msg = 'WARNING!!! ' + msg += 'GHOST datasets cannot be written in parallel yet.' + msg += 'Changing to serial mode.' + warnings.warn(msg) + super(PointsNesGHOST, self).to_netcdf(path, compression_level=compression_level, + serial=True, info=info, chunking=chunking) + + return None + + def _gather_data(self): + """ + Gather all the variable data into the MPI rank 0 to perform a serial write. + + Returns + ------- + data_list: dict + Variables dictionary with all the data from all the ranks. + """ + data_list = deepcopy(self.variables) + for var_name, var_info in data_list.items(): + try: + # noinspection PyArgumentList + data_aux = self.comm.gather(data_list[var_name]['data'], root=0) + if self.rank == 0: + shp_len = len(data_list[var_name]['data'].shape) + # concatenate over station + if self.parallel_method == 'X': + if shp_len == 1: + # dimensions = (station) + axis = 0 + elif shp_len == 2: + # dimensions = (station, strlen) or + # dimensions = (station, time) + axis = 0 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + elif self.parallel_method == 'T': + # concatenate over time + if shp_len == 1: + # dimensions = (station) + axis = None + continue + elif shp_len == 2: + if 'strlen' in var_info['dimensions']: + # dimensions = (station, strlen) + axis = None + continue + else: + # dimensions = (station, time) + axis = 1 + else: + msg = 'The points NetCDF must have ' + msg += 'surface values (without levels).' + raise NotImplementedError(msg) + else: + raise NotImplementedError( + "Parallel method '{meth}' is not implemented. Use one of these: {accept}".format( + meth=self.parallel_method, accept=['X', 'T'])) + data_list[var_name]['data'] = np.concatenate(data_aux, axis=axis) + except Exception as e: + print("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + sys.stderr.write("**ERROR** an error hase occur while gathering the '{0}' variable.\n".format(var_name)) + print(e) + sys.stderr.write(str(e)) + # print(e, file=sys.stderr) + sys.stderr.flush() + self.comm.Abort(1) + raise e + + return data_list \ No newline at end of file diff --git a/nes/nc_projections/rotated_nes.py b/nes/nc_projections/rotated_nes.py new file mode 100644 index 0000000000000000000000000000000000000000..bdeb992ff771233477826c41ac16d8dc0c6112a5 --- /dev/null +++ b/nes/nc_projections/rotated_nes.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python + +import numpy as np +import math +from cfunits import Units +from .default_nes import Nes + + +class RotatedNes(Nes): + """ + + Attributes + ---------- + _rlat : dict + Rotated latitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. + _rlon : dict + Rotated longitudes dictionary with the complete 'data' key for all the values and the rest of the attributes. + rlat : dict + Rotated latitudes dictionary with the portion of 'data' corresponding to the rank values. + rlon : dict + Rotated longitudes dictionary with the portion of 'data' corresponding to the rank values. + projection_data : dict + Dictionary with the projection information. + 'grid_north_pole_latitude' and 'grid_north_pole_longitude' keys + _var_dim : tuple + Tuple with the name of the Y and X dimensions for the variables. + ('rlat', 'rlon') for a rotated projection. + _lat_dim : tuple + Tuple with the name of the dimensions of the Latitude values. + ('rlat', 'rlon') for a rotated projection. + _lon_dim : tuple + Tuple with the name of the dimensions of the Longitude values. + ('rlat', 'rlon') for a rotated projection. + """ + def __init__(self, comm=None, path=None, info=False, dataset=None, xarray=False, parallel_method='Y', + avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None, create_nes=False, + balanced=False, times=None, **kwargs): + """ + Initialize the RotatedNes class + + Parameters + ---------- + comm: MPI.COMM + Path to the CSV file that contains all the information. + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + """ + super(RotatedNes, self).__init__(comm=comm, path=path, + info=info, dataset=dataset, balanced=balanced, + xarray=xarray, parallel_method=parallel_method, + avoid_first_hours=avoid_first_hours, avoid_last_hours=avoid_last_hours, + first_level=first_level, last_level=last_level, create_nes=create_nes, + times=times, **kwargs) + + if create_nes: + # Dimensions screening + self.lat = self._get_coordinate_values(self._lat, 'Y') + self.lon = self._get_coordinate_values(self._lon, 'X') + else: + # Complete dimensions + self._rlat = self._get_coordinate_dimension('rlat') + self._rlon = self._get_coordinate_dimension('rlon') + + # Dimensions screening + self.rlat = self._get_coordinate_values(self._rlat, 'Y') + self.rlon = self._get_coordinate_values(self._rlon, 'X') + + # Get projection details + self.projection_data = self.get_projection_data(create_nes, **kwargs) + + # Set axis limits for parallel writing + self.write_axis_limits = self.get_write_axis_limits() + + self._var_dim = ('rlat', 'rlon') + self._lat_dim = ('rlat', 'rlon') + self._lon_dim = ('rlat', 'rlon') + + @staticmethod + def new(comm=None, path=None, info=False, dataset=None, xarray=False, create=False, balanced=False, + parallel_method='Y', avoid_first_hours=0, avoid_last_hours=0, first_level=0, last_level=None): + """ + Initialize the Nes class + + Parameters + ---------- + comm: MPI.COMM + MPI Communicator + path: str + Path to the NetCDF to initialize the object + info: bool + Indicates if you want to get reading/writing info + dataset: Dataset + NetCDF4-python Dataset to initialize the class + xarray: bool: + (Not working) Indicates if you want to use xarray as default + avoid_first_hours : int + Number of hours to remove from first time steps. + avoid_last_hours : int + Number of hours to remove from last time steps. + parallel_method : str + Indicates the parallelization method that you want. Default over Y axis + accepted values: ['X', 'Y', 'T'] + """ + new = RotatedNes(comm=comm, path=path, info=info, dataset=dataset, xarray=xarray, balanced=balanced, + parallel_method=parallel_method, avoid_first_hours=avoid_first_hours, + avoid_last_hours=avoid_last_hours, first_level=first_level, last_level=last_level) + return new + + def get_projection_data(self, create_nes, **kwargs): + """ + Read the projection data + + Returns + ------- + projection : dict + Dictionary with the projection data + """ + + if create_nes: + projection = {'data': None, + 'dimensions': (), + 'grid_mapping_name': 'rotated_latitude_longitude', + 'grid_north_pole_latitude': 90 - kwargs['centre_lat'], + 'grid_north_pole_longitude': -180 + kwargs['centre_lon'], + } + + else: + projection = self.variables['rotated_pole'] + self.free_vars('rotated_pole') + + return projection + + def _create_dimensions(self, netcdf): + """ + Create the 'rlat', 'rlon' dimensions and the super dimensions ('lev', 'time'). + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + super(RotatedNes, self)._create_dimensions(netcdf) + + netcdf.createDimension('rlon', len(self._rlon['data'])) + netcdf.createDimension('rlat', len(self._rlat['data'])) + + return None + + def _create_dimension_variables(self, netcdf): + """ + Create the 'rlat' and 'rlon' variables. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + super(RotatedNes, self)._create_dimension_variables(netcdf) + + # ROTATED LATITUDES + rlat = netcdf.createVariable('rlat', self._rlat['data'].dtype, ('rlat',)) + rlat.long_name = "latitude in rotated pole grid" + if 'units' in self._rlat.keys(): + rlat.units = Units(self._rlat['units'], formatted=True).units + else: + rlat.units = 'degrees' + rlat.standard_name = "grid_latitude" + if self.size > 1: + rlat.set_collective(True) + rlat[:] = self._rlat['data'] + + # ROTATED LONGITUDES + rlon = netcdf.createVariable('rlon', self._rlon['data'].dtype, ('rlon',)) + rlon.long_name = "longitude in rotated pole grid" + if 'units' in self._rlon.keys(): + rlon.units = Units(self._rlon['units'], formatted=True).units + else: + rlon.units = 'degrees' + rlon.standard_name = "grid_longitude" + if self.size > 1: + rlon.set_collective(True) + rlon[:] = self._rlon['data'] + + return None + + def _create_rotated_coordinates(self, **kwargs): + """ + Calculate rotated latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Calculate rotated latitudes + self.n_lat = int((abs(kwargs['south_boundary']) / kwargs['inc_rlat']) * 2 + 1) + self.rotated_lats = np.linspace(kwargs['south_boundary'], kwargs['south_boundary'] + + (kwargs['inc_rlat'] * (self.n_lat - 1)), self.n_lat) + # Calculate rotated longitudes + self.n_lon = int((abs(kwargs['west_boundary']) / kwargs['inc_rlon']) * 2 + 1) + self.rotated_lons = np.linspace(kwargs['west_boundary'], kwargs['west_boundary'] + + (kwargs['inc_rlon'] * (self.n_lon - 1)), self.n_lon) + + return {'data': self.rotated_lats}, {'data': self.rotated_lons} + + def rotated2latlon(self, lon_deg, lat_deg, lon_min=-180, **kwargs): + """ + Calculate the unrotated coordinates using the rotated ones. + + :param lon_deg: Rotated longitude coordinate. + :type lon_deg: numpy.array + + :param lat_deg: Rotated latitude coordinate. + :type lat_deg: numpy.array + + :param lon_min: Minimum value for the longitudes: -180 (-180 to 180) or 0 (0 to 360) + :type lon_min: float + + :return: Unrotated coordinates. Longitudes, Latitudes + :rtype: tuple(numpy.array, numpy.array) + """ + + degrees_to_radians = math.pi / 180. + + tph0 = kwargs['centre_lat'] * degrees_to_radians + tlm = lon_deg * degrees_to_radians + tph = lat_deg * degrees_to_radians + tlm0d = -180 + kwargs['centre_lon'] + ctph0 = np.cos(tph0) + stph0 = np.sin(tph0) + + stlm = np.sin(tlm) + ctlm = np.cos(tlm) + stph = np.sin(tph) + ctph = np.cos(tph) + + # Latitudes + sph = (ctph0 * stph) + (stph0 * ctph * ctlm) + sph[sph > 1.] = 1. + sph[sph < -1.] = -1. + aph = np.arcsin(sph) + aphd = aph / degrees_to_radians + + # Longitudes + anum = ctph * stlm + denom = (ctlm * ctph - stph0 * sph) / ctph0 + relm = np.arctan2(anum, denom) - math.pi + almd = relm / degrees_to_radians + tlm0d + almd[almd > (lon_min + 360)] -= 360 + almd[almd < lon_min] += 360 + + return almd, aphd + + def _create_centroids(self, **kwargs): + """ + Calculate center latitudes and longitudes from grid details. + + Parameters + ---------- + netcdf : Dataset + NetCDF object. + """ + + # Complete dimensions + self._rlat, self._rlon = self._create_rotated_coordinates(**kwargs) + + # Calculate center latitudes and longitudes (1D to 2D) + self.center_lons, self.center_lats = self.rotated2latlon(np.array([self.rotated_lons] * len(self.rotated_lats)), + np.array([self.rotated_lats] * len(self.rotated_lons)).T, + **kwargs) + + return {'data': self.center_lats}, {'data': self.center_lons} + + @staticmethod + def _set_var_crs(var): + """ + Set the grid_mapping to 'rotated_pole'. + + Parameters + ---------- + var : Variable + netCDF4-python variable object. + """ + var.grid_mapping = 'rotated_pole' + + return None + + def _create_metadata(self, netcdf): + """ + Create the 'crs' variable for the rotated latitude longitude grid_mapping. + + Parameters + ---------- + netcdf : Dataset + netcdf4-python Dataset + """ + + mapping = netcdf.createVariable('rotated_pole', 'c') + mapping.grid_mapping_name = self.projection_data['grid_mapping_name'] + mapping.grid_north_pole_latitude = self.projection_data['grid_north_pole_latitude'] + mapping.grid_north_pole_longitude = self.projection_data['grid_north_pole_longitude'] + + return None + + def to_grib2(self, path, grib_keys, grib_template_path, info=False): + """ + Write output file with grib2 format. + + Parameters + ---------- + path : str + Path to the output file. + grib_keys : dict + Dictionary with the grib2 keys + grib_template_path : str + Path to the grib2 file to use as template + info : bool + Indicates if you want to print extra information during the process. + """ + raise NotImplementedError("Grib2 format cannot be write in a Rotated pole projection.") diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..74a35d2b0177f006ea26eeb08b997d3cc0c4bc1f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,14 @@ +pycodestyle~=2.8.0 +geopandas~=0.10.2 +pandas~=1.2.2 +netcdf4 +numpy~=1.21.5 +timezonefinder~=5.2.0 +pyproj~=3.2.1 +setuptools~=47.1.0 +pytest~=6.2.5 +Shapely~=1.8.0 +scipy +filelock +pyproj~=3.2.1 +eccodes-python~=0.9.5 \ No newline at end of file diff --git a/run_test.py b/run_test.py new file mode 100755 index 0000000000000000000000000000000000000000..03a4fe0da567d750aab7af7e0850e523a0890e78 --- /dev/null +++ b/run_test.py @@ -0,0 +1,24 @@ +# coding=utf-8 +"""Script to run the tests for goddess and generate the code coverage report""" + +import os +import sys +import pytest + + +work_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) +os.chdir(work_path) +print(work_path) + + +version = sys.version_info[0] +report_dir = 'tests/report/python{}'.format(version) +errno = pytest.main([ + 'tests', + '--ignore=tests/report', + '--cov=nes', + '--cov-report=term', + '--cov-report=html:{}/coverage_html'.format(report_dir), + '--cov-report=xml:{}/coverage.xml'.format(report_dir), +]) +sys.exit(errno) diff --git a/setup.py b/setup.py new file mode 100755 index 0000000000000000000000000000000000000000..ed72895166b01942369734b1fc2e632c5a5e06c6 --- /dev/null +++ b/setup.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +from setuptools import find_packages +from setuptools import setup +from nes import __version__ + + +# Get the version number from the relevant file +version = __version__ + +with open("README.md", "r") as f: + long_description = f.read() + +setup( + name='NES', + # license='', + # platforms=['GNU/Linux Debian'], + version=version, + description='', + long_description=long_description, + long_description_content_type="text/markdown", + author="Carles Tena Medina, Alba Vilanova Cortezon", + author_email='carles.tena@bsc.es, alba.vilanova@bsc.es', + url='https://earth.bsc.es/gitlab/es/NES', + + keywords=['Python', 'NetCDF4', 'Grib2', 'Earth'], + install_requires=[ + 'configargparse', + ], + packages=find_packages(), + classifiers=[ + "Programming Language :: Python :: 3.7", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Operating System :: OS Independent", + "Topic :: Scientific/Engineering :: Atmospheric Science" + ], + package_data={'': [ + 'README.md', + 'CHANGELOG.md', + 'LICENSE', + ] + }, + + # entry_points={ + # 'console_scripts': [ + # 'NetCDF_mask = snes.netCDF_mask:run', + # ], + # }, +) diff --git a/tests/1-nes_tests_by_size.py b/tests/1-nes_tests_by_size.py new file mode 100644 index 0000000000000000000000000000000000000000..e545fadf190480843312c1797f9d0e10a1752d2e --- /dev/null +++ b/tests/1-nes_tests_by_size.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +import timeit +import sys +from nes import * +import pandas as pd +# from mpi4py import MPI + +# test_list = [{'in_file': "/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc", +# 'prefix': 'small'}, +# {'in_file': "/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2021080212.nc", +# 'prefix': 'medium'}, +# {'in_file': "/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2017123000.nc", +# 'prefix': 'large'}, +# ] +test_list = [{'in_file': "/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2017123000.nc", + 'prefix': 'large'}, + ] +# parallel_method = 'X' +# parallel_method = 'Y' +parallel_method = 'T' + + +times = pd.DataFrame(index=[0]) +# size = MPI.COMM_WORLD.Get_size() + +for test in test_list: + # ====================================================================================================================== + + st_time = timeit.default_timer() + # ===== Reading + nessy = open_netcdf(path=test['in_file'], parallel_method=parallel_method) + size = nessy.size + if nessy.master: + print("\n=====", test['prefix'], "=====") + + nessy.keep_vars(['O3', 'T', 'U', 'V', 'layer_thickness']) + # nessy.keep_vars(['O3']) + # ===== END Reading + spent_time = timeit.default_timer() - st_time + if nessy.master: + print("Init time {0:02d}:{1:02.3f} (tot: {2:.3f} s)".format( + int(spent_time // 60), spent_time - (int(spent_time // 60) * 60), spent_time)) + sys.stdout.flush() + times["{0}_Init".format(test['prefix'])] = spent_time + + # ====================================================================================================================== + + st_time = timeit.default_timer() + # ===== Loading + nessy.load() + # ===== END Loading + spent_time = timeit.default_timer() - st_time + if nessy.master: + print("Loading time {min:02d}:{sec:02.3f} (tot: {tot:.3f} s ; var: {var:.3f}s/var)".format( + min=int(spent_time // 60), sec=spent_time - (int(spent_time // 60) * 60), + var=spent_time / len(nessy.variables.keys()), tot=spent_time)) + sys.stdout.flush() + times["{0}_Load".format(test['prefix'])] = spent_time + + # ====================================================================================================================== + + if test['prefix'] not in ['large']: + st_time = timeit.default_timer() + # ===== Serial Write + nessy.to_netcdf('nc_test_{0}_{1}_serial.nc'.format(test['prefix'], nessy.size), serial=True) + # ===== END Serial Write + spent_time = timeit.default_timer() - st_time + if nessy.master: + print("Writing in serial time {min:02d}:{sec:02.3f} (tot: {tot:.3f} s ; var: {var:.3f}s/var)".format( + min=int(spent_time // 60), sec=spent_time - (int(spent_time // 60) * 60), + var=spent_time / len(nessy.variables.keys()), tot=spent_time)) + sys.stdout.flush() + times["{0}_Serial".format(test['prefix'])] = spent_time + + # ====================================================================================================================== + + st_time = timeit.default_timer() + # ===== Parallel Chunk Write + nessy.to_netcdf('nc_test_{0}_{1}_parallel_chunked.nc'.format(test['prefix'], nessy.size), chunking=True) + # ===== END Parallel Chunk Write + spent_time = timeit.default_timer() - st_time + if nessy.master: + print("Writing in chunked parallel time {min:02d}:{sec:02.3f} (tot: {tot:.3f} s ; var: {var:.3f}s/var)".format( + min=int(spent_time // 60), sec=spent_time - (int(spent_time // 60) * 60), + var=spent_time / len(nessy.variables.keys()), tot=spent_time)) + sys.stdout.flush() + times["{0}_Chunk".format(test['prefix'])] = spent_time + + # ====================================================================================================================== + + st_time = timeit.default_timer() + # ===== Parallel Write + nessy.to_netcdf('nc_test_{0}_{1}_parallel.nc'.format(test['prefix'], nessy.size)) + # ===== END Parallel Write + spent_time = timeit.default_timer() - st_time + if nessy.master: + print("Writing in parallel time {min:02d}:{sec:02.3f} (tot: {tot:.3f} s ; var: {var:.3f}s/var)".format( + min=int(spent_time // 60), sec=spent_time - (int(spent_time // 60) * 60), + var=spent_time / len(nessy.variables.keys()), tot=spent_time)) + sys.stdout.flush() + times["{0}_Parallel".format(test['prefix'])] = spent_time + + # ====================================================================================================================== + + +times.to_csv("times_{0}.csv".format(str(size).zfill(3))) diff --git a/tests/2-nes_tests_by_projection.py b/tests/2-nes_tests_by_projection.py new file mode 100644 index 0000000000000000000000000000000000000000..93b4b4734b92f93f74313317e6f12c12aff0613d --- /dev/null +++ b/tests/2-nes_tests_by_projection.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +import sys +import timeit +import pandas as pd +from mpi4py import MPI +from nes import * + +paths = {'regular_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/original_file/MONARCH_d01_2008123100.nc', + 'projection': 'regular', + 'variables': ['O3'], + 'parallel_methods': ['X', 'Y', 'T']}, + 'rotated_file': {'path': '/gpfs/scratch/bsc32/bsc32538/mr_multiplyby/OUT/stats_bnds/monarch/a45g/regional/daily_max/O3_all/O3_all-000_2021080300.nc', + 'projection': 'rotated', + 'variables': ['O3_all'], + 'parallel_methods': ['X', 'Y', 'T']}, + 'points_file': {'path': '/esarchive/obs/eea/eionet/hourly/pm10/pm10_202107.nc', + 'projection': 'points', + 'variables': [], # all + 'parallel_methods': ['X', 'T']}, + 'points_ghost_file': {'path': '/gpfs/projects/bsc32/AC_cache/obs/ghost/EANET/1.4/daily/sconcso4/sconcso4_201911.nc', + 'projection': 'points_ghost', + 'variables': [], # all + 'parallel_methods': ['X']}, + 'lcc_file': {'path': '/esarchive/exp/wrf-hermes-cmaq/b075/eu/hourly/pm10/pm10_2022062600.nc', + 'projection': 'lcc', + 'variables': [], # all + 'parallel_methods': ['X', 'Y', 'T']}, + 'mercator_file': {'path': '/esarchive/scratch/avilanova/software/NES/Jupyter_notebooks/input/mercator_grid_example.nc', + 'projection': 'mercator', + 'variables': [], # all + 'parallel_methods': ['X', 'Y', 'T']} + } + +results = [] + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +size = comm.Get_size() + +for name, dict in paths.items(): + + path = dict['path'] + projection = dict['projection'] + variables = dict['variables'] + parallel_methods = dict['parallel_methods'] + + for parallel_method in parallel_methods: + + if rank == 0: + print('TEST TO USE {0} GRID IN {1} FOR {2} USING {3} NODES'.format( + projection.upper(), parallel_method, path, size)) + sys.stdout.flush() + + try: + + # Read + start_time = timeit.default_timer() + nessy_1 = open_netcdf(path=path, comm=comm, info=True, parallel_method=parallel_method) + open_time = timeit.default_timer() - start_time + + # Select variables and load + start_time = timeit.default_timer() + if len(variables) > 0: + nessy_1.keep_vars(variables) + nessy_1.load() + load_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in serial + if rank == 0: + print('WRITE IN SERIAL') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_serial.nc'.format(size, projection, parallel_method), + info=True, serial=True) + serial_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in parallel + if rank == 0: + print('WRITE IN PARALLEL') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_parallel.nc'.format(size, projection, parallel_method), + info=True) + parallel_time = timeit.default_timer() - start_time + comm.Barrier() + + # Write in chunks + if rank == 0: + print('WRITE IN CHUNKS') + sys.stdout.flush() + start_time = timeit.default_timer() + nessy_1.to_netcdf('{0}_{1}_file_{2}_chunking.nc'.format(size, projection, parallel_method), + info=True, chunking=True) + chunking_time = timeit.default_timer() - start_time + comm.Barrier() + + # Close everything + del nessy_1 + + if rank == 0: + print('Test was successful for {0} projection in {1}'.format(projection, parallel_method)) + sys.stdout.flush() + + # End timer and save results + results.append({'Projection': projection, + 'Method': parallel_method, + 'Open': '{min:02d}:{sec:02.3f}'.format( + min=int(open_time // 60), sec=open_time - (int(open_time // 60) * 60)), + 'Load': '{min:02d}:{sec:02.3f}'.format( + min=int(load_time // 60), sec=load_time - (int(load_time // 60) * 60)), + 'Serial': '{min:02d}:{sec:02.3f}'.format( + min=int(serial_time // 60), sec=serial_time - (int(serial_time // 60) * 60)), + 'Chunking': '{min:02d}:{sec:02.3f}'.format( + min=int(chunking_time // 60), sec=chunking_time - (int(chunking_time // 60) * 60)), + 'Parallel': '{min:02d}:{sec:02.3f}'.format( + min=int(parallel_time // 60), sec=parallel_time - (int(parallel_time // 60) * 60)) + }) + + comm.Barrier() + + except Exception as e: + print(e) + + sys.stdout.flush() + +comm.Barrier() + +if rank == 0: + table = pd.DataFrame(results) + print('RESULTS TABLE') + print(table) + table.to_csv('{0}_results.csv'.format(size)) + sys.stdout.flush() diff --git a/tests/scalability_test_nord3v2.bash b/tests/scalability_test_nord3v2.bash new file mode 100644 index 0000000000000000000000000000000000000000..7e398100060d08b481af1de54d51ae3525350023 --- /dev/null +++ b/tests/scalability_test_nord3v2.bash @@ -0,0 +1,23 @@ +#!/bin/bash + +#EXPORTPATH="/esarchive/scratch/avilanova/software/NES" +EXPORTPATH="/gpfs/projects/bsc32/models/NES" +SRCPATH="/esarchive/scratch/avilanova/software/NES/tests" +EXE="2-nes_tests_by_projection.py" + +module purge +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 + + +for nprocs in 1 2 4 8 +do + JOB_ID=`sbatch --ntasks=${nprocs} --exclusive --job-name=nes_${nprocs} --output=./log_nord3v2_NES_${nprocs}_%J.out --error=./log_nord3v2_NES_${nprocs}_%J.err -D . --time=02:00:00 --wrap="export PYTHONPATH=${EXPORTPATH}:${PYTHONPATH}; cd ${SRCPATH}; mpirun --mca mpi_warn_on_fork 0 -np ${nprocs} python ${SRCPATH}/${EXE}"` +done \ No newline at end of file diff --git a/tests/test_bash_mn4.cmd b/tests/test_bash_mn4.cmd new file mode 100644 index 0000000000000000000000000000000000000000..f3d2da7e3665ebc2cead9c78ff006bc10ed11701 --- /dev/null +++ b/tests/test_bash_mn4.cmd @@ -0,0 +1,31 @@ +#!/bin/bash + +#SBATCH --qos=debug +#SBATCH -A bsc32 +#SBATCH --cpus-per-task=1 +#SBATCH -n 4 +#SBATCH -t 00:30:00 +#SBATCH -J test_nes +#SBATCH --output=log_mn4_NES_%j.out +#SBATCH --error=log_mn4_NES_%j.err +#SBATCH --exclusive + +### ulimit -s 128000 + +module purge +module use /gpfs/projects/bsc32/software/suselinux/11/modules/all + +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load OpenMPI/4.0.5-GCC-8.3.0-mn4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 + + +export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} +cd /gpfs/scratch/bsc32/bsc32538/NES_tests/NES/tests + +mpirun --mca mpi_warn_on_fork 0 -np 4 python basic_nes_tests.py diff --git a/tests/test_bash_nord3v2.cmd b/tests/test_bash_nord3v2.cmd new file mode 100644 index 0000000000000000000000000000000000000000..8dd85bb8db5201407c6decd780c6644dce6fa3dc --- /dev/null +++ b/tests/test_bash_nord3v2.cmd @@ -0,0 +1,31 @@ +#!/bin/bash + +####SBATCH --qos=debug +#SBATCH -A bsc32 +#SBATCH --cpus-per-task=1 +#SBATCH -n 4 +#SBATCH -t 00:10:00 +#SBATCH -J test_nes +#SBATCH --output=log_nord3v2_NES_%j.out +#SBATCH --error=log_nord3v2_NES_%j.err +#SBATCH --exclusive + +### ulimit -s 128000 + +module purge + +module load Python/3.7.4-GCCcore-8.3.0 +module load netcdf4-python/1.5.3-foss-2019b-Python-3.7.4 +module load cfunits/1.8-foss-2019b-Python-3.7.4 +module load xarray/0.17.0-foss-2019b-Python-3.7.4 +module load pandas/1.2.4-foss-2019b-Python-3.7.4 +module load mpi4py/3.0.3-foss-2019b-Python-3.7.4 +module load filelock/3.7.1-foss-2019b-Python-3.7.4 +module load eccodes-python/0.9.5-foss-2019b-Python-3.7.4 +module load pyproj/2.5.0-foss-2019b-Python-3.7.4 + +export PYTHONPATH=/gpfs/projects/bsc32/models/NES:${PYTHONPATH} + +cd /esarchive/scratch/avilanova/software/NES/tests + +mpirun --mca mpi_warn_on_fork 0 -np 4 python 2-nes_tests_by_projection.py