diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index deac7bb068f6946df608939f7e706a3651c2fb0e..21633743b0bd8c249fde3d56ada16d8493677c2d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -30,7 +30,7 @@ lint1-test-job: # This job also runs in the test stage. - echo "Running venv..." - source $HOME/dust-dashboard/bin/activate - echo "Linting main code..." - - pylint ./*py | tail -3 + - pylint --fail-under=3.0 ./*py | tail -3 lint2-test-job: # This job also runs in the test stage. stage: test # It can run at the same time as unit-test-job (in parallel). @@ -38,7 +38,7 @@ lint2-test-job: # This job also runs in the test stage. - echo "Running venv..." - source $HOME/dust-dashboard/bin/activate - echo "Linting tabs code..." - - pylint ./tabs/*py | tail -3 + - pylint --fail-under=3.0 ./tabs/*py | tail -3 lint3-test-job: # This job also runs in the test stage. stage: test # It can run at the same time as unit-test-job (in parallel). @@ -46,4 +46,4 @@ lint3-test-job: # This job also runs in the test stage. - echo "Running venv..." - source $HOME/dust-dashboard/bin/activate - echo "Linting preproc code..." - - pylint ./preproc/*py | tail -3 + - pylint --fail-under=3.0 ./preproc/*py | tail -3 diff --git a/assets/style.css b/assets/style.css index 1c1370cfe7a3ced30d13f56f323badd8474a6e4b..561f42c5b9048faedbdbb5d63cda1940ba3afc32 100644 --- a/assets/style.css +++ b/assets/style.css @@ -320,7 +320,7 @@ div.SingleDatePickerInput { padding: 0.3rem 35px 25px !important; } -#slider-graph, #obs-slider-graph, #obs-aod-slider-graph { +#model-slider-graph, #obs-slider-graph, #obs-aod-slider-graph { padding: 0.2rem 25px 25px 1rem !important; width: 30rem; } diff --git a/conf/aliases.json b/conf/aliases.json index a7bcc756e83eb0da2d7dd9092382164280220198..052ac6b1cad51dbdc7eede927673c3b17e1c878e 100644 --- a/conf/aliases.json +++ b/conf/aliases.json @@ -7,7 +7,7 @@ "dry_deposition": "DUST_DEPD", "wet_deposition": "DUST_DEPW", "load": "DUST_LOAD", - "extinction":"DUST_EXT", + "extinction": "DUST_EXT_SFC", "visual_comparison": "nrt", "statistics": "scores", "eumetsat-rgb": "rgb", diff --git a/conf/dates_dev.json b/conf/dates_dev.json index 6fb752dcd678593497ec542ef91366c89b37ca1c..ca806b61d311888e8e9b3d451b885696f949735d 100644 --- a/conf/dates_dev.json +++ b/conf/dates_dev.json @@ -1,4 +1,8 @@ { - "start_date" : "20120120", - "end_date" : "20220831" + "start_date" : "20230330", + "end_date" : "20230409", + "delay" : { + "delayed": false, + "start_date": "20230401" + } } diff --git a/conf/dates_prod.json b/conf/dates_prod.json index 1da5c9f01987ce17832aa3013d4a696e9993cc1b..0ca588fdbb36ad4ff4dc563f840dc66e7f8738db 100644 --- a/conf/dates_prod.json +++ b/conf/dates_prod.json @@ -1,4 +1,5 @@ { "start_date" : "20120120", - "end_date" : "" + "end_date" : "", + "delay" : true } diff --git a/conf/models.json b/conf/models.json index bf79b459f7f565c7db025f5a49a2ee9862a2b1d1..3f8ca50eda2cdd99e8f11b620ddfda7542e11c54 100644 --- a/conf/models.json +++ b/conf/models.json @@ -4,111 +4,127 @@ "template": "_3H_MEDIAN", "path": "/data/products/median", "color": "#303030", - "start": 12 + "start": 0, + "weight": 0 }, "monarch": { "name": "MONARCH", "template": "12_3H_SDSWAS_NMMB-BSC-v2_OPER", "path": "/data/products/NMMB-BSC", "color": "#2e3192", - "start": 12 + "start": 12, + "weight": 1 }, "cams": { "name": "CAMS-IFS", "template": "00_3H_MACC-ECMWF", "path": "/data/products/ECMWF", "color": "#70d8f6", - "start": 0 + "start": 0, + "weight": 1 }, "dream8-macc": { "name": "DREAM8-CAMS", "template": "00_3H_DREAM8-MACC", "path": "/data/products/DREAM8-MACC", "color": "#26b33f", - "start": 0 + "start": 0, + "weight": 1 }, "nasa-geos": { "name": "NASA-GEOS", "template": "_NASA-GEOS", "path": "/data/products/NASA-GEOS", "color": "#cb4ca6", - "start": 0 + "start": 0, + "weight": 1 }, "metoffice": { "name": "MetOffice-UM", "template": "00_3H_UKMET", "path": "/data/products/msg-metoffice", "color": "#6674f8", - "start": 0 + "start": 0, + "weight": 1 }, "ncep-gefs": { "name": "NCEP-GEFS", "template": "_NCEP-NGAC", "path": "/data/products/NCEP-NGAC", "color": "#ad7729", - "start": 0 + "start": 0, + "weight": 1 }, "ema-regcm4": { "name": "EMA-RegCM4", "template": "_EMA-RegCM4", "path": "/data/products/EMA-RegCM4", "color": "#f697db", - "start": 0 + "start": 0, + "weight": 0.1 }, "silam": { "name": "SILAM", "template": "00_SILAM", "path": "/data/products/SILAM", "color": "#47ecc4", - "start": 0 + "start": 0, + "weight": 1 }, "lotos-euros": { "name": "LOTOS-EUROS", "template": "00_3H_LOTOSEUROS", "path": "/data/products/LOTOS-EUROS", "color": "#155d9e", - "start": 0 + "start": 0, + "weight": 1 }, "icon-art": { "name": "ICON-ART", "template": "00_ICON-ART", "path": "/data/products/ICON-ART", "color": "#006837", - "start": 0 + "start": 0, + "weight": 1 }, "noa": { "name": "NOA-WRF-CHEM", "template": "12_NOA-WRF-CHEM", "path": "/data/products/NOA", "color": "#b4e76c", - "start": 12 + "start": 12, + "weight": 1 }, "wrf-nemo": { "name": "WRF-NEMO", "template": "00_1H_WRF-NEMO-CAMx", "path": "/data/products/WRF-NEMO", "color": "#28a0bc", - "start": 0 + "start": 0, + "weight": 0 }, "aladin": { "name": "ALADIN", "template": "00_3H_ALADIN_DUST", "path": "/data/products/ALADIN", "color": "#d9e021", - "start": 0 + "start": 0, + "weight": 1 }, "zamg": { "name": "ZAMG-WRF-CHEM", "template": "00_3H_ZAMG-WRF-CHEM", "path": "/data/products/ZAMG-WRF-CHEM", "color": "#ff9355", - "start": 0 + "start": 0, + "weight": 1 }, "mocage": { "name": "MOCAGE", "template": "00_3H_MOCAGE", "path": "/data/products/MOCAGE", "color": "#AD001D", - "start": 0 + "start": 0, + "weight": 1 } } diff --git a/conf/obs.json b/conf/obs.json index 1e10f7b7c95c29441cb459afaad40d99929489cb..f9af4378935e72af4a99077804c3d39819405c77 100644 --- a/conf/obs.json +++ b/conf/obs.json @@ -6,7 +6,8 @@ "template" : "{}_{}", "sites": "aeronet_sites.txt", "path" : "/data/daily_dashboard/obs/aeronet/", - "name": "Aeronet v3 lev1.5" + "name": "Aeronet v3 lev1.5", + "start_date": "20120101" }, "modis": { "obs_var": "od550aero", @@ -14,6 +15,7 @@ "template" : "{}_{}", "path" : "/data/daily_dashboard/obs/modis/", "name": "MODIS", - "title": "Optical Depth (550nm)
Valid: %(shour)sh %(sday)s %(smonth)s %(syear)s" + "title": "Optical Depth (550nm)
Valid: %(shour)sh %(sday)s %(smonth)s %(syear)s", + "start_date": "20180101" } } diff --git a/conf/vars.json b/conf/vars.json index 39e669de2db1d90529b7c0584b43073494be3eca..da4323e2bc96e5d88dab13300d22c59a5c749ea4 100644 --- a/conf/vars.json +++ b/conf/vars.json @@ -5,6 +5,7 @@ "bounds": [0, 0.1, 0.2, 0.4, 0.8, 1.2, 1.6, 3.2, 6.4, 10], "mul": 1, "title": "Dust Optical Depth (550nm)
Valid: %(shour)sh %(sday)s %(smonth)s %(syear)s (H+%(step)s)", + "threshold_list": [0.1, 0.2, 0.5, 0.8], "models": "all" }, "SCONC_DUST": { @@ -13,6 +14,7 @@ "bounds": [0, 5, 20, 50, 200, 500, 2000, 5000, 20000, 100000], "mul": 1e9, "title": "Dust Surface Conc. (µg/m³)
Valid: %(shour)sh %(sday)s %(smonth)s %(syear)s (H+%(step)s)", + "threshold_list": [50, 100, 200, 500], "models": "all" }, "DUST_DEPD": { diff --git a/dash_server.py b/dash_server.py index 2f9587047831b1eb22cf2a64fe6d347ef3cfb12f..63ae92db801995543b0932fcbf3e2b8e25a0d866 100755 --- a/dash_server.py +++ b/dash_server.py @@ -1,49 +1,14 @@ """ Dash Server """ -#import gevent.monkey -#gevent.monkey.patch_all() - import dash import dash_bootstrap_components as dbc from dash import dcc from dash import html -from dash.dependencies import Output -from dash.dependencies import Input -from dash.dependencies import State -from dash.dependencies import ALL -from dash.dependencies import MATCH -from dash.dependencies import ClientsideFunction -from dash.exceptions import PreventUpdate -import flask -from flask import g, make_response, request -from flask_caching import Cache -#from pyinstrument import Profiler -from pathlib import Path -from datetime import datetime as dt -from datetime import timedelta -from urllib.parse import urlparse, parse_qs +from flask.app import Flask -from data_handler import DEFAULT_VAR -from data_handler import DEFAULT_MODEL -from data_handler import VARS -from data_handler import MODELS from data_handler import DEBUG -from data_handler import DATES -from data_handler import cache, cache_timeout -from data_handler import pathname -from data_handler import ALIASES -from data_handler import ROUTE_DEFAULTS - -from tabs.forecast import tab_forecast -from tabs.forecast import sidebar_forecast -from tabs.evaluation import tab_evaluation -from tabs.evaluation import sidebar_evaluation -from tabs.observations import tab_observations -from tabs.observations import sidebar_observations -from tabs.fullscreen import go_fullscreen - -import uuid -import socket +from data_handler import cache +from data_handler import PATHNAME from router import * @@ -52,7 +17,7 @@ TIMEOUT = 10 fontawesome = 'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css' leaflet = "https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.2.0/leaflet.css" -srv = flask.Flask(__name__) +srv = Flask(__name__) app = dash.Dash(__name__, external_scripts=['https://code.jquery.com/jquery-3.6.0.slim.min.js'], external_stylesheets=[dbc.themes.BOOTSTRAP, @@ -60,7 +25,7 @@ app = dash.Dash(__name__, fontawesome, leaflet ], - url_base_pathname=pathname, + url_base_pathname=PATHNAME, server=srv, prevent_initial_callbacks=True ) @@ -70,6 +35,7 @@ app.config.suppress_callback_exceptions = True server = app.server cache.init_app(server) +# cache.app = app try: cache.clear() diff --git a/data_handler.py b/data_handler.py index 01a07fec8f719752e813d448ee00037b091bffd9..9fac649d853a3e2a482df17d1e818ec9869fc031 100644 --- a/data_handler.py +++ b/data_handler.py @@ -11,15 +11,11 @@ from matplotlib.colors import ListedColormap import numpy as np from netCDF4 import Dataset as nc_file import pandas as pd -import geopandas as gpd -from shapely import geometry import json import orjson from datetime import datetime from dateutil.relativedelta import relativedelta from collections import OrderedDict -from PIL import Image -import requests import calendar import time import os @@ -32,19 +28,10 @@ from utils import retrieve_single_point from utils import get_colorscale from pathlib import Path -import flask from flask_caching import Cache import uuid import socket -#SETUP BASE URL -HOSTNAME = socket.gethostbyname_ex(socket.gethostname())[0] - -if HOSTNAME in ('bscesdust03.bsc.es', 'dust.hqads.aemet.es'): - pathname = '/daily_dashboard/' -else: - pathname = '/dashboard/' - #SETUP CACHE cache_dir = "/dev/shm/{}".format(str(uuid.uuid1())) Path(cache_dir).mkdir(parents=True, exist_ok=True) @@ -58,6 +45,14 @@ cache_config = { cache = Cache(config=cache_config) cache_timeout = 86400 +#SETUP BASE URL +HOSTNAME = socket.gethostbyname_ex(socket.gethostname())[0] + +if HOSTNAME in ('bscesdust03.bsc.es', 'dust.hqads.aemet.es'): + PATHNAME = '/daily_dashboard/' +else: + PATHNAME = '/dashboard/' + DIR_PATH = os.path.dirname(os.path.realpath(__file__)) DEBUG = True # False @@ -87,6 +82,13 @@ COLORS_PROB = ALL_COLORS['prob'] COLORMAP = ListedColormap(COLORS_NORGB) COLORMAP_PROB = ListedColormap(COLORS_PROB) +START_DATE = DATES['start_date'] +DELAY = DATES['delay']['delayed'] +DELAY_DATE = DATES['delay']['start_date'] +END_DATE = DATES['end_date'] +END_DATE = END_DATE or (DELAY and (dt.now() - + timedelta(days=1)).strftime("%Y%m%d") or dt.now().strftime("%Y%m%d")) + ROUTE_DEFAULTS = { 'tab':['forecast'], 'var': ['OD550_DUST'], @@ -96,7 +98,7 @@ ROUTE_DEFAULTS = { 'obs_option': ['rgb'], 'country': ['burkinafaso'], 'download': [None], - 'date': [DATES['end_date'] or (dt.now() - timedelta(days=1)).strftime("%Y%m%d")] + 'date': [END_DATE] } STATS = OrderedDict({ @@ -207,7 +209,7 @@ class Observations1dHandler(object): class ObsTimeSeriesHandler(object): - """ Class to handle time series """ + """ Class to handle evaluation time series """ def __init__(self, obs, start_date, end_date, variable, models=None): self.obs = obs @@ -229,10 +231,10 @@ class ObsTimeSeriesHandler(object): '{}01'.format((datetime.strptime(months[-1], '%Y%m') + relativedelta(days=31)).strftime('%Y%m')), freq='3H') - if DEBUG: print('MONTHS', months) - if DEBUG: print('DATE_INDEX', self.date_index) -# if not months: -# months = [datetime.strptime(end_date, "%Y-%m-%d").strftime("%Y%m")] + if DEBUG: + print('MONTHS', months) + if DEBUG: + print('DATE_INDEX', self.date_index) fname_obs = fname_tpl.format(dat=obs) notnans, obs_df = concat_dataframes(fname_obs, months, variable, @@ -243,7 +245,6 @@ class ObsTimeSeriesHandler(object): fname_mod = fname_tpl.format(dat=mod) _, mod_df = concat_dataframes(fname_mod, months, variable, rename_from=None, notnans=notnans) - # if DEBUG: print("DATAFRAMES", mod, mod_df) self.dataframe.append(mod_df) @@ -352,7 +353,7 @@ class ObsTimeSeriesHandler(object): class TimeSeriesHandler(object): - """ Class to handle time series """ + """ Class to handle forecast time series """ def __init__(self, model, date, variable): if isinstance(model, str): @@ -367,21 +368,27 @@ class TimeSeriesHandler(object): self.month = datetime.strptime(date, "%Y-%m-%d").strftime("%Y%m") self.currdate = datetime.strptime(date, "%Y-%m-%d").strftime("%Y%m%d") - def retrieve_single_point(self, tstep, lat, lon, model=None, method='netcdf', forecast=False): + def retrieve_single_point(self, tstep, lat, lon, model=None): if not model: model = self.model[0] if DEBUG: print("----------", model) - if forecast: - method = 'netcdf' - path_template = '{}{}.nc'.format(self.currdate, MODELS[model]['template'], self.variable) + method = 'netcdf' + if not DELAY and MODELS[model]['start'] == 12: + mod_date = (datetime.strptime(self.currdate, "%Y%m%d") - + timedelta(days=1)).strftime("%Y%m%d") + else: + mod_date = self.currdate + path_template = '{}{}.nc'.format(mod_date, + MODELS[model]['template'], + self.variable) fpath = os.path.join(MODELS[model]['path'], method, path_template) - return retrieve_single_point( fpath, tstep, lat, lon, self.variable, - method=method, forecast=forecast) + return retrieve_single_point(fpath, tstep, lat, lon, + self.variable) def retrieve_timeseries(self, lat, lon, model=None, method='netcdf', forecast=False): @@ -411,7 +418,12 @@ class TimeSeriesHandler(object): if forecast: method = 'netcdf' - path_template = '{}{}.nc'.format(self.currdate, MODELS[mod]['template'], self.variable) + if not DELAY and MODELS[mod]['start'] == 12: + mod_date = (datetime.strptime(self.currdate, "%Y%m%d") - + timedelta(days=1)).strftime("%Y%m%d") + else: + mod_date = self.currdate + path_template = '{}{}.nc'.format(mod_date, MODELS[mod]['template'], self.variable) fpath = os.path.join(filedir, method, @@ -1422,10 +1434,6 @@ class ProbFigureHandler(object): varlist = [var for var in self.input_file.variables if var in VARS] self.xlon, self.ylat = np.meshgrid(lon, lat) -# self.colormaps = { -# self.varname: get_colorscale(self.bounds, COLORMAP_PROB) -# } - if selected_date: self.selected_date_plain = selected_date @@ -1449,7 +1457,6 @@ class ProbFigureHandler(object): """ Retrieve data from NetCDF file """ if DEBUG: print("----------", tstep, self.tim) tim = int(self.tim[tstep]) - tim += 1 if self.what == 'days': cdatetime = self.rdatetime + relativedelta(days=tim) elif self.what == 'hours': @@ -1548,10 +1555,8 @@ class ProbFigureHandler(object): geojson, colorbar = self.generate_contour_tstep_trace(varname, tstep) if varname and os.path.exists(self.filepath) and static: if DEBUG: print('Adding points ...') - # ret.append(self.generate_var_tstep_trace(varname, tstep)) elif varname is None or not os.path.exists(self.filepath): if DEBUG: print('Adding one point ...') - # ret.append(self.generate_var_tstep_trace()) if DEBUG: print('Update layout ...') if not varname: @@ -1620,7 +1625,7 @@ class WasFigureHandler(object): self.fig = None - def get_regions_data(self, day=1): + def get_regions_data(self, day=0): input_dir = WAS[self.was]['path'].format(was=self.was, format='h5', date=self.selected_date_plain) input_file = WAS[self.was]['template'].format(date=self.selected_date_plain, var=self.variable, format='h5') @@ -1635,7 +1640,7 @@ class WasFigureHandler(object): names, colors, definitions = df.loc['Day{}'.format(day)].values.T return names, colors, definitions - def get_geojson_url(self, day=1): + def get_geojson_url(self, day=0): from dash_server import app geojsons_dir = WAS[self.was]['path'].format(was=self.was, format='geojson', date=self.selected_date_plain) geojson_file = WAS[self.was]['template'].format(date=self.selected_date_plain, var=self.variable, format='geojson').replace('.geojson', '_{}.geojson'.format(day)) @@ -1668,7 +1673,7 @@ class WasFigureHandler(object): return cdatetime - def generate_contour_tstep_trace(self, day=1): + def generate_contour_tstep_trace(self, day=0): """ Generate trace to be added to data, per variable and timestep """ if not hasattr(self, 'was'): @@ -1726,7 +1731,7 @@ class WasFigureHandler(object): return geojson, legend - def get_title(self, day=1): + def get_title(self, day=0): """ return title according to the date """ rdatetime = self.retrieve_cdatetime(tstep=0) cdatetime = rdatetime + relativedelta(days=day) @@ -1742,7 +1747,7 @@ class WasFigureHandler(object): #'step': "{:02d}".format(tstep*FREQ), }) - def retrieve_var_tstep(self, day=1, static=True, aspect=(1,1)): + def retrieve_var_tstep(self, day=0, static=True, aspect=(1,1)): """ run plot """ self.fig = go.Figure() day = int(day) @@ -1752,7 +1757,9 @@ class WasFigureHandler(object): if self.input_file is not None: fig_title = html.P(html.B( [ - item for sublist in self.get_title(day).split('
') for item in [sublist, html.Br()] + item + for sublist in self.get_title(day).split('
') + for item in [sublist, html.Br()] ][:-1] )) else: diff --git a/preproc/calc_median.py b/preproc/calc_median.py deleted file mode 100644 index 284d1a0f8468056c396fd810c18afd8901b5265f..0000000000000000000000000000000000000000 --- a/preproc/calc_median.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2016 Earth Sciences Department, BSC-CNS -# - -"""calc_median module - -This module provide calculation of MEDIAN. - -""" - -import numpy as np -import xarray as xr -import json -import os -import os.path -import sys -import subprocess -import time -from datetime import datetime - -np.set_printoptions(precision=2) - -DIR_PATH = os.path.abspath(os.path.dirname(__file__)) -MODELS = json.load(open(os.path.join(DIR_PATH, '../conf/models.json'))) -NETCDF_TEMPLATE = "{}/archive/{}{}.nc" - -DEBUG = True - -lonlat = "-27,65,0,65" -main_output_path = "/data/products/median/" -interpolated_path = os.path.join(main_output_path, "interpolated") -# archive_path = os.path.join(main_output_path, "tmp") -archive_path = os.path.join(main_output_path, "archive") -# selected_date = "20220515" - - -def generate_median_file(selected_date): - - filepaths = [] - - for model in MODELS: - - filepath = NETCDF_TEMPLATE.format( - MODELS[model]['path'], - selected_date, - MODELS[model]['template'] - ) - - interp_filepath = os.path.join(interpolated_path, os.path.basename(filepath)) - - if model == 'median' or not os.path.exists(filepath): - continue - - print('MODEL', model) - - if MODELS[model]['start'] == 0: - tsteps = "5/25" - else: - tsteps = "1/21" - - cdo_date = datetime.strptime(selected_date, "%Y%m%d").strftime("%Y-%m-%d") - remap_script = "cdo -L -r -settaxis,"+cdo_date+",12:00:00,3hours -selvar,OD550_DUST,SCONC_DUST -chname,od550_dust,OD550_DUST -chname,sconc_dust,SCONC_DUST -seltimestep,"+tsteps+" -sellonlatbox,"+lonlat+" -remapbil,global_0.5 "+filepath+" "+interp_filepath - split_script = "cdo splitname {} {}".format(interp_filepath, interp_filepath.replace('.nc', '_')) - - subprocess.call(remap_script, shell=True) - time.sleep(1) - subprocess.call(split_script, shell=True) - - filepaths.append(interp_filepath) - - sconc_ds = xr.open_mfdataset("{}/{}*SCONC_DUST.nc".format(interpolated_path, selected_date), concat_dim='model', combine='nested') - od550_ds = xr.open_mfdataset("{}/{}*OD550_DUST.nc".format(interpolated_path, selected_date), concat_dim='model', combine='nested') - sconc_dust = sconc_ds['SCONC_DUST'].median(dim='model', skipna=True, keep_attrs=True) - od550_dust = od550_ds['OD550_DUST'].median(dim='model', skipna=True, keep_attrs=True) - newds = xr.Dataset({ 'OD550_DUST': od550_dust, 'SCONC_DUST': sconc_dust }).astype(np.float32) - print(newds) - newds = newds.rename_dims({'lon': 'longitude', 'lat': 'latitude'}).rename_vars({'lon': 'longitude', 'lat': 'latitude'}) - newds['longitude'] = newds['longitude'].astype(np.float32) - newds['latitude'] = newds['latitude'].astype(np.float32) - newds.to_netcdf('{}/{}_3H_MEDIAN.nc'.format(archive_path, selected_date), - encoding={ 'time' : { 'units' : 'hours since '+cdo_date+' 00:00:00' } }) - - # clean interpolated directory - for fname in filepaths: - # os.remove(fname) - print("to be removed", fname) - - -if __name__ == "__main__": - import sys - if len(sys.argv) > 1: - generate_median_file(sys.argv[1]) - else: - generate_median_file(datetime.date.now().strftime('%Y%m%d')) diff --git a/preproc/median_calc.py b/preproc/median_calc.py new file mode 100644 index 0000000000000000000000000000000000000000..18d77788a20183329b3ff1dd9a26d1204cde6dd0 --- /dev/null +++ b/preproc/median_calc.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Earth Sciences Department, BSC-CNS +# + +"""calc_median module + +This module provide calculation of MEDIAN. + +""" + +import numpy as np +import xarray as xr +import json +import os +import os.path +import sys +import subprocess +import time +from datetime import datetime +from datetime import timedelta +from operator import itemgetter + +np.set_printoptions(precision=2) + +DIR_PATH = os.path.abspath(os.path.dirname(__file__)) +MODELS = json.load(open(os.path.join(DIR_PATH, '../conf/models.json'))) +DATES = json.load(open(os.path.join(DIR_PATH, '../conf/dates.json'))) +NETCDF_TEMPLATE = "{}/archive/{}{}.nc" + +DEBUG = True +DELAY = DATES['delay']['delayed'] + +lonlat = "-27,65,0,65" +main_output_path = "/data/products/median/" +interpolated_path = os.path.join(main_output_path, "interpolated") +# archive_path = os.path.join(main_output_path, "tmp") +archive_path = os.path.join(main_output_path, "archive") +# selected_date = "20220515" + + +def generate_median_file(selected_date): + """ Generates the median of stored data models """ + + model_conf = { + True: { + 0: { + 'tsteps': "5/25", + 'model_date': selected_date, + 'start_hour': "12" + }, + 12: { + 'tsteps': "1/21", + 'model_date': selected_date, + 'start_hour': "12" + }, + }, + False: { + 0: { + 'tsteps': "1/25", + 'model_date': selected_date, + 'start_hour': "00" + }, + 12: { + 'tsteps': "5/29", + 'model_date': (datetime.strptime(selected_date, "%Y%m%d") - + timedelta(days=1)).strftime("%Y%m%d"), + 'start_hour': "00" + }, + } + } + + filepaths = [] + cdo_date = datetime.strptime(selected_date, "%Y%m%d").strftime("%Y-%m-%d") + if DEBUG: + print("CDO DATE", cdo_date) + + for model in MODELS: + + model_start = MODELS[model]['start'] + cur_dict = model_conf[DELAY][model_start] + tsteps, model_date, start_hour = itemgetter(*cur_dict.keys())(cur_dict) + + filepath_interp = NETCDF_TEMPLATE.format( + MODELS[model]['path'], + selected_date, + MODELS[model]['template'] + ) + + filepath = NETCDF_TEMPLATE.format( + MODELS[model]['path'], + model_date, + MODELS[model]['template'] + ) + + interp_filepath = os.path.join(interpolated_path, os.path.basename(filepath_interp)) + + if model == 'median' or not os.path.exists(filepath): + continue + + if DEBUG: + print('MODEL', model, filepath, model_date, filepath_interp, selected_date) + + + remap_script = "cdo -L -r -settaxis," +\ + cdo_date + "," + start_hour + ":00:00,3hours" +\ + " -selvar,OD550_DUST,SCONC_DUST" +\ + " -chname,od550_dust,OD550_DUST" +\ + " -chname,sconc_dust,SCONC_DUST" +\ + " -seltimestep," + tsteps +\ + " -sellonlatbox," + lonlat +\ + " -remapbil,global_0.5 " + filepath +\ + " " + interp_filepath + split_script = "cdo splitname {} {}".format(interp_filepath, interp_filepath.replace('.nc', '_')) + + + if DEBUG: + print('CDO REMAP SCRIPT', remap_script) + subprocess.call(remap_script, shell=True) + time.sleep(1) + if DEBUG: + print('CDO SPLIT SCRIPT', split_script) + subprocess.call(split_script, shell=True) + + filepaths.append(interp_filepath) + + sconc_ds = xr.open_mfdataset( + "{}/{}*SCONC_DUST.nc".format(interpolated_path, selected_date), + concat_dim='model', combine='nested') + od550_ds = xr.open_mfdataset( + "{}/{}*OD550_DUST.nc".format(interpolated_path, selected_date), + concat_dim='model', combine='nested') + sconc_dust = sconc_ds['SCONC_DUST'].median(dim='model', skipna=True, keep_attrs=True) + od550_dust = od550_ds['OD550_DUST'].median(dim='model', skipna=True, keep_attrs=True) + newds = xr.Dataset({ 'OD550_DUST': od550_dust, 'SCONC_DUST': sconc_dust }).astype(np.float32) + print(newds) + newds = newds.rename_dims( + {'lon': 'longitude', 'lat': 'latitude'}).rename_vars( + {'lon': 'longitude', 'lat': 'latitude'}) + newds['longitude'] = newds['longitude'].astype(np.float32) + newds['latitude'] = newds['latitude'].astype(np.float32) + newds.to_netcdf('{}/{}_3H_MEDIAN.nc'.format(archive_path, selected_date), + encoding={ 'time' : { 'units' : 'hours since '+cdo_date+' 00:00:00' } }) + + # clean interpolated directory + for fname in filepaths: + # os.remove(fname) + print("to be removed", fname) + + +if __name__ == "__main__": + import sys + if len(sys.argv) > 1: + generate_median_file(sys.argv[1]) + else: + generate_median_file(datetime.date.now().strftime('%Y%m%d')) diff --git a/preproc/nc2geojson_was.py b/preproc/nc2geojson_was.py index abe7b155f25827aa6be7379a0ec12c6307726f87..294a189646e8b316869a47914ba3279862856d48 100755 --- a/preproc/nc2geojson_was.py +++ b/preproc/nc2geojson_was.py @@ -25,9 +25,16 @@ np.set_printoptions(precision=2) DIR_PATH = os.path.abspath(os.path.dirname(__file__)) MODELS = json.load(open(os.path.join(DIR_PATH, '../conf/models.json'))) WAS = json.load(open(os.path.join(DIR_PATH, '../conf/was.json'))) +DATES = json.load(open(os.path.join(DIR_PATH, '../conf/dates.json'))) NETCDF_TEMPLATE = "{}/archive/{}{}.nc" DEBUG = True +DELAY = DATES['delay']['delayed'] + +if DELAY: + DAYS = range(1, 3) +else: + DAYS = range(0, 3) class WasTables(object): """ Class to manage the figure creation """ @@ -70,17 +77,18 @@ class WasTables(object): if len(time_units) == 3: self.what, _, rdate = time_units rtime = "00:00" - elif len(time_units) >= 4: + # elif len(time_units) >= 4: + else: self.what, _, rdate, rtime = time_units[:4] if len(rtime) > 5: rtime = rtime[:5] if self.what == 'seconds': self.rdatetime = datetime.strptime("{} {}" .format(rdate, - rtime), "%Y-%m-%d %H:%M") + relativedelta(seconds=float(self.tim[0])) + rtime), "%Y-%m-%d %H:%M") + relativedelta(seconds=int(self.tim[0])) elif self.what == 'hours': self.rdatetime = datetime.strptime("{} {}" .format(rdate, - rtime), "%Y-%m-%d %H:%M") + relativedelta(hours=float(self.tim[0])) + rtime), "%Y-%m-%d %H:%M") + relativedelta(hours=int(self.tim[0])) self.xlon, self.ylat = np.meshgrid(lon, lat) self.vardata = self.input_file.variables[variable][:]*1e9 @@ -113,7 +121,8 @@ class WasTables(object): print('TIME', n, tstep) if self.what == 'seconds': ctime = (self.rdatetime + relativedelta(seconds=float(inc))) - elif self.what == 'hours': + # elif self.what == 'hours': + else: ctime = (self.rdatetime + relativedelta(hours=float(inc))) print('CTIME', ctime, 'DTIME', d_date) @@ -125,6 +134,11 @@ class WasTables(object): return self.vardata[d_idx,:,:].max(axis=0) def get_regions_data(self): + """ """ + names = [] + colors = [] + definitions = [] + if not hasattr(self, 'xlon'): return names, colors, definitions @@ -135,9 +149,6 @@ class WasTables(object): out_df = pd.DataFrame( columns=['day', 'names', 'colors', 'definitions']) - names = [] - colors = [] - definitions = [] days = [] corresp = {} @@ -147,7 +158,7 @@ class WasTables(object): if 'exclude' in WAS[self.was]: exclude = WAS[self.was]['exclude'] - for day in [1, 2]: + for day in DAYS: data = self.set_data(day=day).flatten() tmp_was_df = self.was_df if DEBUG: print('SHAPE DATA', data.shape) @@ -207,7 +218,8 @@ class WasTables(object): props = geojson_data['features'][feat_idx]['properties'] if props[self.was_name] in items_names: props['value'] = int(color_idx) - props['tooltip'] = "Region: {}
Level: {}".format(props[self.was_name], definition) + props['tooltip'] = "Region: {}
Level: {}".format( + props[self.was_name], definition) tmp_out = self.geojson_output.replace('.geojson', '_{}.geojson'.format(day)) with open(tmp_out, 'w') as out: diff --git a/preproc/prob/downloadBSCmodels.sh b/preproc/prob/downloadBSCmodels.sh deleted file mode 100644 index 25a8cf068620cc8be4506a1898efd33cf7cba721..0000000000000000000000000000000000000000 --- a/preproc/prob/downloadBSCmodels.sh +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/sh - -DATE1=$(date +"%Y%m%d") -YYYY1=$(date +"%Y") -MM1=$(date +"%m") -DD1=$(date +"%d") - -DATE=$(date +"%Y%m%d" -d "-1 day") -YYYY=$(date +"%Y" -d "-1 day") -MM=$(date +"%m" -d "-1 day") -DD=$(date +"%d" -d "-1 day") - -echo $DATE1 -echo $DATE -rm -rvf ncModelFiles/*.nc -#DATE="20180726" -#YYYY="2018" -#MM="07" -#DD="26" - -WG=' -q --user ewernerh@aemet.es --password Aemet304 --auth-no-challenge --no-check-certificate sds-was.aemet.es/forecast-products/dust-forecasts/files-download/' -WG=' -P ./ncModelFiles/ -q --user ewernerh@aemet.es --password Aemet304 --auth-no-challenge --no-check-certificate sds-was.aemet.es/forecast-products/dust-forecasts/files-download/' - -listOfModels=" -nmmb-bsc/$YYYY/$MM/${DATE}12_3H_NMMB-BSC.nc -noa-wrf-chem/$YYYY/$MM/${DATE}12_NOA-WRF-CHEM.nc -bsc-dream8b-v2/$YYYY/$MM/${DATE}_BSC_DREAM8b_V2.nc -macc-ecmwf/$YYYY/$MM/${DATE}00_3H_MACC-ECMWF.nc -nasa-geos-5/$YYYY/$MM/${DATE}_NASA-GEOS.nc -dream-nmme-macc/$YYYY/$MM/${DATE}00_3H_DREAM8-MACC.nc -silam/$YYYY/$MM/${DATE}00_SILAM.nc -ema-regcm4/$YYYY/$MM/${DATE}_EMA-RegCM4.nc -lotos-euros/$YYYY/$MM/${DATE}00_3H_LOTOSEUROS.nc -uk-met-office-um/$YYYY/$MM/${DATE}00_3H_UKMET.nc -ncep-ngac/$YYYY/$MM/${DATE}_NCEP-NGAC.nc -icon-art/$YYYY/$MM/${DATE}00_ICON-ART.nc -" -#ncep-ngac/$YYYY/$MM/${DATE}_NCEP-NGAC.nc Not being downloaded due to poor performance -echo $string |cut -d';' -f1 | read str1 -echo "download RUNNING..." -for modelName in $listOfModels -do - #mod=$(echo $modelName |cut -d '/' -f 1) - echo wget $WG$modelName - wget $WG$modelName - echo $modelName -done - -#Change the _FillValue for NOA model to prevent error -ncatted -a _FillValue,,m,f,-1.0e36 ncModelFiles/${DATE}12_NOA-WRF-CHEM.nc -#rename od550_dust in NMMB-BSC model -ncrename -v od550_dust,OD550_DUST ncModelFiles/${DATE}12_3H_NMMB-BSC.nc -ncrename -v sconc_dust,SCONC_DUST ncModelFiles/${DATE}12_3H_NMMB-BSC.nc -#Choose the steps runtime 12 -cdo -seltimestep,1/21 ncModelFiles/${DATE}12_3H_NMMB-BSC.nc ncModelFiles/NMMB-BSC.nc -cdo -seltimestep,1/21 ncModelFiles/${DATE}12_NOA-WRF-CHEM.nc ncModelFiles/NOA-WRF-CHEM.nc -cdo -seltimestep,1/21 ncModelFiles/${DATE}_BSC_DREAM8b_V2.nc ncModelFiles/BSC_DREAM8b_V2.nc -#Choose the steps runtime 00 -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_3H_MACC-ECMWF.nc ncModelFiles/MACC-ECMWF.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}_NASA-GEOS.nc ncModelFiles/NASA-GEOS.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}_NCEP-NGAC.nc ncModelFiles/NCEP-NGAC.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_3H_DREAM8-MACC.nc ncModelFiles/DREAM8-MACC.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_SILAM.nc ncModelFiles/SILAM.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}_EMA-RegCM4.nc ncModelFiles/EMA-RegCM4.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_3H_LOTOSEUROS.nc ncModelFiles/LOTOSEUROS.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_3H_UKMET.nc ncModelFiles/UKMET.nc -cdo -seltimestep,5/25 ncModelFiles/${DATE}00_ICON-ART.nc ncModelFiles/ICON-ART.nc -#Change longitudes in NCEP-NGAC.nc (0 360 into 0 180 0 -180) -cdo -O sellonlatbox,-180,180,-90,90 ncModelFiles/NCEP-NGAC.nc ncModelFiles/NCEP-NGACok.nc -rm -rvf ncModelFiles/NCEP-NGAC.nc -#remove nc files -rm -rvf ncModelFiles/${YYYY}*.nc - - - -echo "download DONE!" - - -#wget -q --user ggarciacastrillor@aemet.es --password Canalejas55 --auth-no-challenge sds-was.aemet.es/forecast-products/dust-forecasts/files-download/nmmb-bsc-dust-public/2018/07/2018070212_3H_NMMB-BSC.nc/view - -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/nmmb-bsc/2018/07/2018070512_3H_NMMB-BSC.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/bsc-dream8b-v2/2018/07/20180704_BSC_DREAM8b_V2.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/macc-ecmwf/2018/07/2018070300_3H_MACC-ECMWF.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/nasa-geos-5/2018/07/20180704_NASA-GEOS.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/ncep-ngac/2018/07/20180704_NCEP-NGAC.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/noa-wrf-chem/2018/07/2018070412_NOA-WRF-CHEM.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/dream-nmme-macc/2018/07/2018070400_3H_DREAM8-MACC.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/silam/2018/07/2018070400_SILAM.nc/view -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/icon-art/2021/01/2021013100_ICON-ART.nc/view -# NOT INCLUDED (not working since 06/2018) -#https://sds-was.aemet.es/forecast-products/dust-forecasts/files-download/dreamabol/2018/06/20180629_DREAMABOL.nc/view -#dreamabol/$YYYY/$MM/${DATE}_DREAMABOL.nc diff --git a/preproc/prob/interpolateNetcdf.py b/preproc/prob/interpolateNetcdf.py deleted file mode 100644 index 181d6d01583116d453902a8cc841decbea431188..0000000000000000000000000000000000000000 --- a/preproc/prob/interpolateNetcdf.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -import os -import subprocess -from glob import glob - -class InterpolateNetcdf(object): - - def __init__(self, curdate, ncModelsDir, main_output_path, lonlat): - - dates = curdate - inputFileDir = os.listdir(ncModelsDir) - scriptClean = "rm -rvf "+main_output_path+"/*.nc" - subprocess.call(scriptClean, shell=True) - for ifileDir in inputFileDir: - if ifileDir == 'NMMB-BSC': - tpl = '*OPER.nc' - elif ifileDir == 'median': - tpl = '*MEDIAN.nc' - else: - tpl = '*.nc' - print (ifileDir) - for ifile in glob('{}/archive/{}{}'.format('/data/products/'+ifileDir, dates, tpl)): - if os.path.exists(main_output_path+"interpolated/"+os.path.basename(ifile)): - print(main_output_path+"interpolated/"+os.path.basename(ifile), 'exists. Exit.') - continue - script = "cdo -L -s -sellonlatbox,"+lonlat+" -remapbil,global_0.5 "+ifile+" "+main_output_path+"interpolated/"+os.path.basename(ifile) - print (script) - subprocess.call(script, shell=True) - -if __name__=="__main__": - import sys - #ncModelsDir = "/home/administrador/webpolvo/DustEpsgrams/ncModelFiles/" - ncModelsDir = "/data/thredds/models-repos/" - #LonW, LonE, LatN, LatS - lonlatRegional = "-25,60,5,65" - main_output_path = '/data/daily_dashboard/prob/tmp/' - curdate = sys.argv[1] - InterpolateNetcdf(curdate, ncModelsDir, main_output_path, lonlatRegional) diff --git a/preproc/prob/probabilityMaps.py b/preproc/prob/probabilityMaps.py deleted file mode 100644 index ee7b5534d703fc5cee6c34ebe33b28cf8e7e482c..0000000000000000000000000000000000000000 --- a/preproc/prob/probabilityMaps.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -#import pdb; pdb.set_trace() - -import netCDF4 -from scipy.interpolate import interp2d -import pickle -import numpy as np -import pandas as pd -import os -import sys -import shutil -import datetime as dt -from glob import glob - -DEBUG = True - -# DATE = '20211001' - -# ProbabilityMaps class plots probability maps for D+1 and D+2 daily -# maximum of a given parameter (AOD and "SCONC_DUST") from a interpolated -# netcdf file and needs: inputDir, outputDir, parameterName, threshold -# (The Domain is defined previously in the netcdf file) -# doIt method needs: conversionFactor, units, parameterNameTitle (title figure) - -class ProbabilityMaps(object): - - def __init__(self, curdate, mWDict, inputDir, outputDir, parameterName, threshold): - self._mWDict = mWDict - self._inputDir = inputDir - self._outputDir = outputDir - self._parameterName = parameterName - self._threshold = threshold - self.curdate = curdate - @property - def mWDict (self): - return self._mWDict - @property - def inputDir (self): - return self._inputDir - @property - def outputDir (self): - return self._outputDir - @property - def parameterName (self): - return self._parameterName - @property - def threshold (self): - return self._threshold - - def doIt(self, conversionFactor, units, parameterNameTitle): - # Getting parameter from nc files - parameterMax24ProbList = [] - parameterMax48ProbList = [] - parameterMean24ProbList = [] - parameterMean48ProbList = [] - inputFileList = glob('{}/{}*nc'.format(self.inputDir, self.curdate)) - print ("inputFileList...", inputFileList) - weightSum= 0. - for f in inputFileList: - key = '_'.join(os.path.basename(f).split('_')[1:]) - if key not in self.mWDict: - continue - print(f, "Weight", self.mWDict[key]) - weight = self.mWDict[key] - weightSum += weight - ff = netCDF4.Dataset(os.path.join(inputDir, f)) # , format="NETCDF3") - #print (ff.variables.keys()) - if self.parameterName not in ff.variables: - try: - parameterValues = ff.variables[self.parameterName.lower()][:] - except Exception as e: - print("ERROR", str(e)) - continue - else: - parameterValues = ff.variables[self.parameterName][:] - lon = ff.variables["lon"][:] - lat = ff.variables["lat"][:] - steps = ff.variables["time"][:] - runTime = ff.variables['time'].units.split()[2:] - #print (runTime) - #print (steps) - # Calculates parameter daily maximum for D1, from 24 to 48 - # Calculates parameter daily mean for D1, from 24 to 48 - # (steps are 12 15 18 21...24 27 30 33 36 39 42 45...) - # Slicing an array: a[start:end+1] - # parameterArray has [step,lat,lon] - parameterMax24 = parameterValues[4,:,:] #first step 24 hours - parameterMean24 = parameterValues[4,:,:] #first step 24 hours - for m in parameterValues[5:12,:,:]: - parameterMax24 = np.maximum(parameterMax24, m) - parameterMean24 = parameterMean24+m - parameterMean24 = parameterMean24/len(parameterValues[4:12,:,:]) - # Change matrix values to true/false when exceding the given threshold - # and when *1 to 1/0 this way matrixs can be added. - parameterMax24Prob = (parameterMax24 >self.threshold*conversionFactor)*1*weight - parameterMax24ProbList.append(np.array(parameterMax24Prob)) - parameterMean24Prob = (parameterMean24 > self.threshold*conversionFactor)*1*weight - parameterMean24ProbList.append(np.array(parameterMean24Prob)) - #print (parameterMean24ProbList) - # Calculates parameter daily maximum for D2, from 48 to 69 - # (steps are 48 51 54 57 60 63 66 69...72) - parameterMax48 = parameterValues[12,:,:] - parameterMean48 = parameterValues[12,:,:] - for m in parameterValues[13:20,:,:]: - parameterMax48 = np.maximum(parameterMax48, m) - parameterMean48 = parameterMean48+m - parameterMean48 = parameterMean48/len(parameterValues[12:20,:,:]) - # Change matrix values to true/false when exceding the given threshold - # and when *1 to 1/0 this way matrixs can be added. - parameterMax48Prob = (parameterMax48 >self.threshold*conversionFactor)*1*weight - parameterMax48ProbList.append(np.array(parameterMax48Prob)) - parameterMean48Prob = (parameterMean48 >self.threshold*conversionFactor)*1*weight - parameterMean48ProbList.append(np.array(parameterMean48Prob)) - ff.close() - print ("Done reading nc files") - # We add all the matrix for each model - # and divide by the number of models to get probability - print ("weightSum", weightSum) - print ("runTime0", runTime[0]) - # and then plotting the maps - ######parameterMaxProbListList = [parameterMax24ProbList, parameterMax48ProbList] - parameterMaxProbListList = [parameterMean24ProbList, parameterMean48ProbList] - D = dt.datetime.strptime(runTime[0],"%Y-%m-%d") - D1 = dt.datetime.strptime(runTime[0],"%Y-%m-%d") + dt.timedelta(days=1) - D2 = dt.datetime.strptime(runTime[0],"%Y-%m-%d") + dt.timedelta(days=2) - D1s = str(D1.day).zfill(2) +"/"+ str(D1.month).zfill(2) + "/" +str(D1.year) - D2s = str(D2.day).zfill(2) +"/"+ str(D2.month).zfill(2) + "/" +str(D2.year) - Ds = str(D.day).zfill(2) +"/"+ str(D.month).zfill(2) + "/" +str(D.year) - dayForecastList = ["D+1", "D+2"] #this is for the image file name - DDMMYYYYList = [D1s, D2s] - # print(parameterMaxProbListList, dayForecastList, DDMMYYYYList) - - os.makedirs(os.path.join(self._outputDir, self.curdate), exist_ok=True) - prob_file = "{}/{}/{}{}{}_{}_Prob{}.nc".format(self._outputDir, self.curdate, str(D.year), - str(D.month).zfill(2), str(D.day).zfill(2), self.parameterName, - str(threshold)) - - tmp_data = [] # np.ones((2, len(lat), len(lon)))*-999. - - if DEBUG: print('*****', prob_file) - for parameterMaxProbList, dayForecast, DDMMYYYY, curD in zip(parameterMaxProbListList, dayForecastList, DDMMYYYYList, [D1, D2]): - # print('::::', parameterMaxProbList, dayForecast, DDMMYYYY) - result1=np.array(parameterMaxProbList[0]) - dataListLen = np.array(parameterMaxProbList).shape[0] - for m in range(1, dataListLen): - result1=result1+parameterMaxProbList[m] - #data1=100*result1/float(dataListLen) - data1=100*result1/weightSum - - # dataframe to NETCDF ############################################# - print (len(data1), len(data1[0])) - print ("lon:", len(lon)) - print ("lat:", len(lat)) - # lat, lon, prob must have same lenght - # np.tile(a, n) repeats an array "a" "n" times: [[a],[a],...] - # Ex: a = [1,2,3] - # flatten reduce to 1 dimension by rows: [1,2,3,1,2,3,...] - # flatten('F') reduce to 1 dimension by columns: [1,1,1,2,2,2,...] - # default format is NETCDF4 - # format {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC"} - print (self.parameterName) - ##we can ad after 'lon': 'time': ["23/03/2021"]*(len(lat)*len(lon)), - ##https://stackoverflow.com/questions/57006443/how-do-i-add-the-time-to-the-netcdf-file - - tmp_data.append(data1) -# df_multiindex = pd.DataFrame({'time':[curD]*(len(lat)*len(lon)), -# 'lat': np.tile(lat,(len(lon),1)).flatten('F'), -# 'lon': np.tile(lon,len(lat)), -# self.parameterName: np.array(data1).flatten() }) -# # self.parameterName+"_P"+str(threshold): np.array(data1).flatten() }) -# df_multiindex = df_multiindex.set_index(['time','lat','lon']) -# xr = df_multiindex.to_xarray() -# print (df_multiindex) -# print(xr) -# xr.to_netcdf(prob_file) -# # +'_'+self.parameterName+'_Prob'+str(threshold)+'_Canarias.nc') - - if os.path.exists(prob_file): - os.remove(prob_file) - - outfile = netCDF4.Dataset(prob_file, 'w') - - outfile.createDimension('lon', len(lon)) - outfile.createDimension('lat', len(lat)) - outfile.createDimension('time', 2) - - var_time = outfile.createVariable('time', 'i', ('time',)) - var_time.units = "days since {} 00:00:00".format(dt.datetime.strftime(D, "%Y-%m-%d")) - var_time[:] = [0, 1] - - var_lon = outfile.createVariable('lon', 'f', ('lon',)) - var_lon.units = "degrees_east" - var_lon[:] = lon[:] - - var_lat = outfile.createVariable('lat', 'f', ('lat',)) - var_lat.units = "degrees_north" - var_lat[:]= lat[:] - - var_data = outfile.createVariable(self.parameterName, 'f', ('time', 'lat', 'lon')) - print(":::", var_data.shape, np.array(tmp_data).shape) - var_data[:] = np.array(tmp_data)[:] - - outfile.close() - - - -if __name__ == "__main__": - # makes daily directories where the maps will be stored #before 20200715: Sahel_prova... - today = dt.datetime.today().strftime('%Y%m%d') - print (today) - curdate = sys.argv[1] - - allModelNameList = ['BSC_DREAM8b_V2', '3H_DREAM8-MACC', 'EMA-RegCM4', - '3H_LOTOSEUROS', '3H_MACC-ECMWF', 'NASA-GEOS', - 'NCEP-NGAC', '3H_SDSWAS_NMMB-BSC-v2_OPER', 'NOA-WRF-CHEM', - 'SILAM', '3H_UKMET', 'ICON-ART' , - '1H_WRF-NEMO-CAMx', '3H_ALADIN_DUST', '3H_ZAMG-WRF-CHEM', - '3H_MOCAGE' ] - - allModelNameListInterpolated = ["{}.nc".format(mod) for mod in allModelNameList] - #[0.3, 0.3, 0.1, 1, 0.3, 1, 0.3, 1, 1, 0.3, 1 ] - weightListDustSfcConc = [1., 1., 0.1, - 1., 1., 1., - 0., 1., 1., - 1., 1., 1., - 0., 1., 1., - 1.] - - weightListAod = [1., 1., 0.1, - 1., 1., 1., - 1., 1., 1., - 1., 1., 1., - 0., 1., 1., - 1.] - - modelWeightAodDict = dict(zip(allModelNameListInterpolated, weightListAod)) - modelWeightDustSfcConcDict = dict(zip(allModelNameListInterpolated, weightListDustSfcConc)) - - # Probability Maps for Regional SCONC_SFC - # Input threshold Regional - thresholdList = [50, 100, 200, 500] - conversionFactor = 10**-9 - units = u"\u03bcg/m3" - # Input parameter-netcdf-name - parameterName = "SCONC_DUST" - parameterNameTitle = "Dust\: SFC\: Concentration" - # Input nc files - inputDir = "/data/daily_dashboard/prob/tmp/interpolated/" - # /data/daily_dashboard/prob/sconc_dust/50/netcdf/20211002/20211002_SCONC_DUST_Prob50.nc - for threshold in thresholdList: - outDir = "/data/daily_dashboard/prob/{}/{}/netcdf/".format(parameterName.lower(), threshold) - ProbabilityMaps(curdate, modelWeightDustSfcConcDict, inputDir, outDir, parameterName, threshold).doIt(conversionFactor, units, parameterNameTitle) - # Probability Maps for Regional AOD - # Input threshold Regional - # thresholdList = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0] - thresholdList = [0.1, 0.2, 0.5, 0.8] - conversionFactor = 1 - units = "" - # Input parameter-netcdf-name - parameterName = "OD550_DUST" - parameterNameTitle = "Dust\: AOD" - # Input nc files - inputDir = "/data/daily_dashboard/prob/tmp/interpolated/" - for threshold in thresholdList: - outDir = "/data/daily_dashboard/prob/{}/{}/netcdf/".format(parameterName.lower(), threshold) - ProbabilityMaps(curdate, modelWeightAodDict, inputDir, outDir, parameterName, threshold).doIt(conversionFactor, units, parameterNameTitle) diff --git a/preproc/prob/prob_calc.sh b/preproc/prob_calc.sh similarity index 74% rename from preproc/prob/prob_calc.sh rename to preproc/prob_calc.sh index d8c2f58c749567367bbb4828eceba1b4ba131249..b5474702f3abf945e80f06714a5cf40be1c1717c 100755 --- a/preproc/prob/prob_calc.sh +++ b/preproc/prob_calc.sh @@ -8,9 +8,9 @@ fi PYTHON=$HOME/dust-dashboard/bin/python -$PYTHON interpolateNetcdf.py $curdate -wait -$PYTHON probabilityMaps.py $curdate +# $PYTHON interpolate_netcdf.py $curdate +# wait +$PYTHON probability_maps.py $curdate wait for var in /data/daily_dashboard/prob/{sconc_dust,od550_dust} @@ -21,6 +21,3 @@ do $PYTHON $HOME/interactive-forecast-viewer/preproc/nc2geojson.py PROB $thresh/geojson/ $thresh/netcdf/$curdate/$curdate*nc done done - -wait -rm /data/daily_dashboard/prob/tmp/interpolated/${curdate}*nc diff --git a/preproc/probability_maps.py b/preproc/probability_maps.py new file mode 100644 index 0000000000000000000000000000000000000000..06e0ce0635d3c10b44dcce65731f5ced24a9b8cf --- /dev/null +++ b/preproc/probability_maps.py @@ -0,0 +1,214 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +"""Probability Maps netCDF generation +""" +#import pdb; pdb.set_trace() + +import os +import sys +from datetime import datetime +import json +import netCDF4 +import numpy as np + +DEBUG = True + +PARAMS = ('SCONC_DUST', 'OD550_DUST') + +# INPUT_DIR = "/data/daily_dashboard/prob/tmp/interpolated/" +INPUT_DIR = "/data/products/median/interpolated/" + +DIR_PATH = os.path.abspath(os.path.dirname(__file__)) +MODELS = json.load(open(os.path.join(DIR_PATH, '../conf/models.json'))) +DATES = json.load(open(os.path.join(DIR_PATH, '../conf/dates.json'))) +VARS = json.load(open(os.path.join(DIR_PATH, '../conf/vars.json'))) +DELAY = DATES['delay']['delayed'] +DELAY_DATE = DATES['delay']['start_date'] + +TSTEPS = 8 + +if DELAY: + FIRST_STEPS_LEN = 2 +else: + FIRST_STEPS_LEN = 3 + +FIRST_STEP = 0 +LON0 = 5 +LON1 = -10 + +# ProbabilityMaps class plots probability maps for D+1 and D+2 daily +# maximum of a given parameter (AOD and "SCONC_DUST") from a interpolated +# netcdf file and needs: input_dir, output_dir, parameter_name, threshold +# (The Domain is defined previously in the netcdf file) +# run_prob method needs: conversion_factor + +class ProbabilityMaps: + """Class handling creation of netCDF files with ensemble probability values + """ + + def __init__(self, curdate, input_dir, output_dir, parameter_name, threshold): + self._input_dir = input_dir + self._output_dir = output_dir + self._parameter_name = parameter_name + self._threshold = threshold + self.curdate = curdate + @property + def input_dir(self): + return self._input_dir + @property + def output_dir(self): + return self._output_dir + @property + def parameter_name(self): + return self._parameter_name + @property + def threshold(self): + return self._threshold + + def run_prob(self, conversion_factor): + """ Executes probability calculations """ + # Getting parameter from nc files + param_maxprob_lists = {} + weight_sum = 0. + run_time = [''] + lon = [] + lat = [] + first_steps = [] + for model in MODELS: + tpl = MODELS[model]['template'] + ifile = os.path.join(self.input_dir, f'{self.curdate}{tpl}.nc') + if not os.path.exists(ifile): + continue + if DEBUG: + print ("input_file:", ifile) + weight = float(MODELS[model]['weight']) + weight_sum += weight + nc_file = netCDF4.Dataset(ifile) + if self.parameter_name not in nc_file.variables: + try: + param_values = nc_file.variables[self.parameter_name.lower()][:,:,LON0:LON1] + except Exception as err: + if DEBUG: + print("ERROR", str(err)) + continue + else: + param_values = nc_file.variables[self.parameter_name][:,:,LON0:LON1] + first_steps = np.arange(FIRST_STEP, param_values[FIRST_STEP:,:,:].shape[0], TSTEPS)[:-1] + if len(first_steps) < FIRST_STEPS_LEN: + continue + if DEBUG: + print("FIRST_STEPS", first_steps) + for step in first_steps: + if step not in param_maxprob_lists: + param_maxprob_lists[step] = [] + lon = nc_file.variables["lon"][LON0:LON1] + lat = nc_file.variables["lat"][:] + run_time = nc_file.variables['time'].units.split()[2:] + # Calculates parameter daily maximum for day1, from 24 to 48 + # Calculates parameter daily mean for day1, from 24 to 48 + # (steps are 12 15 18 21...24 27 30 33 36 39 42 45...) + # Slicing an array: a[start:end+1] + # parameterArray has [step,lat,lon] + + for cur_first_step in first_steps: + total_steps = cur_first_step + TSTEPS + param_max = param_values[cur_first_step,:,:] #first step 24 hours + param_mean = param_values[cur_first_step,:,:] #first step 24 hours + for param in param_values[cur_first_step+1:total_steps,:,:]: + param_max = np.maximum(param_max, param) + param_mean = param_mean + param + param_mean = param_mean/len(param_values[cur_first_step:total_steps,:,:]) + # Change matrix values to true/false when exceding the given threshold + # and when *1 to 1/0 this way matrixs can be added. + param_mean_prob = (param_mean > self.threshold*conversion_factor)*1*weight + param_maxprob_lists[cur_first_step].append(np.array(param_mean_prob)) + + nc_file.close() + + if DEBUG: + print ("Done reading nc files") + # We add all the matrix for each model + # and divide by the number of models to get probability + if DEBUG: + print ("weight_sum", weight_sum) + if DEBUG: + print ("run_time0", run_time[0]) + # and then plotting the maps + ######param_maxprob_lists = [param_max24_problist, param_max48_problist] + day0 = datetime.strptime(run_time[0], "%Y-%m-%d") + + os.makedirs(os.path.join(self._output_dir, self.curdate), exist_ok=True) + prob_file = "{}/{}/{}{}{}_{}_Prob{}.nc".format(self._output_dir, self.curdate, + str(day0.year), str(day0.month).zfill(2), str(day0.day).zfill(2), + self.parameter_name, str(self.threshold)) + + tmp_data = [] # np.ones((2, len(lat), len(lon)))*-999. + + if DEBUG: + print('*****', prob_file) + for param_maxprob_key in param_maxprob_lists: + param_maxprob = param_maxprob_lists[param_maxprob_key] + if DEBUG: + print('::::', param_maxprob_key, len(param_maxprob)) + result1 = np.array(param_maxprob[0]) + datalist_len = np.array(param_maxprob).shape[0] + for data_idx in range(1, datalist_len): + result1 = result1 + param_maxprob[data_idx] + #data1=100*result1/float(datalist_len) + data1 = 100*result1/weight_sum + + # dataframe to NETCDF ############################################# + if DEBUG: + print (len(data1), len(data1[0])) + if DEBUG: + print ("lon:", len(lon)) + if DEBUG: + print ("lat:", len(lat)) + if DEBUG: + print (self.parameter_name) + + tmp_data.append(data1) + + if os.path.exists(prob_file): + os.remove(prob_file) + + outfile = netCDF4.Dataset(prob_file, 'w') + + outfile.createDimension('lon', len(lon)) + outfile.createDimension('lat', len(lat)) + outfile.createDimension('time', FIRST_STEPS_LEN) + + var_time = outfile.createVariable('time', 'i', ('time',)) + var_time.units = "days since {} 00:00:00".format(datetime.strftime(day0, "%Y-%m-%d")) + var_time[:] = np.arange(FIRST_STEPS_LEN) + + var_lon = outfile.createVariable('lon', 'f', ('lon',)) + var_lon.units = "degrees_east" + var_lon[:] = lon[:] + + var_lat = outfile.createVariable('lat', 'f', ('lat',)) + var_lat.units = "degrees_north" + var_lat[:]= lat[:] + + var_data = outfile.createVariable(self.parameter_name, 'f', ('time', 'lat', 'lon')) + print(":::", var_data.shape, np.array(tmp_data).shape) + var_data[:] = np.array(tmp_data)[:] + + outfile.close() + + +if __name__ == "__main__": + # makes daily directories where the maps will be stored #before 20200715: Sahel_prova... + today = datetime.today().strftime('%Y%m%d') + print (today) + cdate = sys.argv[1] + + # Probability Maps for Regional + for param_name in PARAMS: + threshold_list = VARS[param_name]['threshold_list'] + conv_factor = VARS[param_name]['mul']**-1 + for thresh in threshold_list: + out_dir = f"/data/daily_dashboard/prob/{param_name.lower()}/{thresh}/netcdf/" + ProbabilityMaps(cdate, INPUT_DIR, + out_dir, param_name, + thresh).run_prob(conv_factor) diff --git a/router.py b/router.py index efdfcc8bf43b73d819820559591238b9d8462137..013c57a0baac7ad349332fa22d82fea5a858fd01 100644 --- a/router.py +++ b/router.py @@ -1,20 +1,16 @@ +# -*- coding: utf-8 -*- +""" Router """ + from urllib.parse import urlparse, parse_qs import dash -import dash_bootstrap_components as dbc from dash import dcc from dash import html from dash.dependencies import Output from dash.dependencies import Input -from dash.dependencies import State -from dash.dependencies import ALL -from data_handler import DEFAULT_VAR -from data_handler import DEFAULT_MODEL from data_handler import VARS from data_handler import MODELS from data_handler import DEBUG -from data_handler import DATES -from data_handler import cache, cache_timeout -from data_handler import pathname +from data_handler import PATHNAME from data_handler import ALIASES from data_handler import ROUTE_DEFAULTS @@ -26,6 +22,7 @@ from tabs.observations import tab_observations from tabs.observations import sidebar_observations from tabs.fullscreen import go_fullscreen + #-------------------- ADD ROUTING FUNCTIONS ---------------------- def get_input_aliases(route_selections): """ Change user inputs into internal variables""" @@ -48,11 +45,11 @@ def eval_section_query(queries): queries['obs_option'] = queries['section'] return queries -def get_url_queries(url, ROUTE_DEFAULTS=ROUTE_DEFAULTS): +def get_url_queries(url, route_defaults=ROUTE_DEFAULTS): """ pull all of the keyword arguments from url and update ROUTE_DEFAULTS dict as necessary""" parsed_url = urlparse(url, scheme='http', allow_fragments=False) queries = parse_qs(parsed_url[4]) - route_selections = ROUTE_DEFAULTS.copy() + route_selections = route_defaults.copy() route_selections.update(queries) route_selections = get_input_aliases(route_selections) route_selections = eval_section_query(route_selections) @@ -66,9 +63,16 @@ def get_url_queries(url, ROUTE_DEFAULTS=ROUTE_DEFAULTS): def render_sidebar(tab='forecast-tab', route_selections=ROUTE_DEFAULTS): """ Function rendering requested tab """ tabs = { - 'forecast-tab' : sidebar_forecast(VARS, route_selections['var'][0], MODELS, route_selections['model'], window=route_selections['for_option'][0], country=route_selections['country'][0]), - 'evaluation-tab' : sidebar_evaluation(route_selections['eval_option'][0]), - 'observations-tab' : sidebar_observations(route_selections['obs_option'][0]), + 'forecast-tab' : sidebar_forecast(VARS, + route_selections['var'][0], + MODELS, + route_selections['model'], + window=route_selections['for_option'][0], + country=route_selections['country'][0]), + 'evaluation-tab' : + sidebar_evaluation(route_selections['eval_option'][0]), + 'observations-tab' : + sidebar_observations(route_selections['obs_option'][0]), #'fullscreen-tab' : 'Full' } if tabs[tab] is 'Full': @@ -78,9 +82,9 @@ def render_sidebar(tab='forecast-tab', route_selections=ROUTE_DEFAULTS): def render404(): """Create a 404 page""" #setup routes for links - forecast_link = pathname + "/?tab=forecast" - eval_link = pathname + "/?tab=evaluation" - obs_link = pathname + "/?tab=observations" + forecast_link = PATHNAME + "/?tab=forecast" + eval_link = PATHNAME + "/?tab=evaluation" + obs_link = PATHNAME + "/?tab=observations" page = [html.Div( className='background', @@ -90,13 +94,22 @@ def render404(): children=[html.H2('404 Error', id='error_title'), html.P("Sorry we can't find the page you were looking for."), html.P("Here are some helpful links that might help:"), - dcc.Link("Forecast", id='forecast_link', href=forecast_link, className='error_links',target='_parent', refresh=True), + dcc.Link("Forecast", id='forecast_link', + href=forecast_link, + className='error_links',target='_parent', + refresh=True), html.Br(), html.Br(), - dcc.Link("Evaluation", id='evaluation_link', href=eval_link, className='error_links',target='_parent', refresh=True), + dcc.Link("Evaluation", id='evaluation_link', + href=eval_link, + className='error_links',target='_parent', + refresh=True), html.Br(), html.Br(), - dcc.Link("Observations", id='observations_link', href=obs_link, className='error_links',target='_parent', refresh=True) + dcc.Link("Observations", id='observations_link', + href=obs_link, + className='error_links',target='_parent', + refresh=True) ], ) ] @@ -111,23 +124,23 @@ def render404(): def router(url): """ Get url search queries and build layout for app""" route_selections = get_url_queries(url) - print('===== route_selections', route_selections) - try: - children = [ - html.Div( - id='app-sidebar', - children=render_sidebar(route_selections['tab'][0], - route_selections), - className='sidebar' - ), - dcc.Tabs(id='app-tabs', value=route_selections['tab'][0] , children=[ - tab_forecast(window=route_selections['for_option'][0], end_date=route_selections['date'][0]),#'models', 'prob', 'was' - tab_evaluation(route_selections['eval_option'][0]), #nrt or scores - tab_observations(route_selections['obs_option'][0]),#rgb or visibility - go_fullscreen(), - ]), - ] - except: #This handles when user inputs incorrect URL params - children = render404() + if DEBUG: print('===== route_selections', route_selections) + # try: + children = [ + html.Div( + id='app-sidebar', + children=render_sidebar(route_selections['tab'][0], + route_selections), + className='sidebar' + ), + dcc.Tabs(id='app-tabs', value=route_selections['tab'][0] , children=[ + tab_forecast(window=route_selections['for_option'][0], end_date=route_selections['date'][0]), + tab_evaluation(route_selections['eval_option'][0]), #nrt or scores + tab_observations(route_selections['obs_option'][0]),#rgb or visibility + go_fullscreen(), + ]), + ] +# except Exception as err: #This handles when user inputs incorrect URL params +# if DEBUG: print("ERROR 404", str(err)) +# children = render404() return children - diff --git a/tabs/evaluation.py b/tabs/evaluation.py index 0d22557a162482ef5a8d0e0f551017b42c1445df..cd9c4ac5c38423e9feade8d9f3ae2ab327396f09 100644 --- a/tabs/evaluation.py +++ b/tabs/evaluation.py @@ -118,24 +118,6 @@ eval_time_series = dbc.Spinner( def tab_evaluation(window='nrt'): nrt_children = [ - dbc.Alert( - "If you close the location tooltip, please refresh the page before clicking on another specific location on the map.", - id="alert-eval-popup", - is_open=False, - duration=6000, - fade=True, - color="primary", - style={ 'overflow': 'auto', 'marginBottom': 0 } - ), - dbc.Alert( - "If you close the location tooltip, please refresh the page before clicking on another specific location on the map.", - id="alert-eval-popup2", - is_open=False, - duration=6000, - fade=True, - color="primary", - style={ 'overflow': 'auto', 'marginBottom': 0 } - ), html.Span( html.P( "Visual comparison" diff --git a/tabs/evaluation_callbacks.py b/tabs/evaluation_callbacks.py index 59b4b636d32495c0db6cc9bfd15c21e5f5f92a64..d70c6e40ae89b8cb0f1edb06a81fbe934a606332 100644 --- a/tabs/evaluation_callbacks.py +++ b/tabs/evaluation_callbacks.py @@ -6,25 +6,19 @@ from dash import html from dash.dependencies import Output from dash.dependencies import Input from dash.dependencies import State -from dash.dependencies import ALL -from dash.dependencies import MATCH from dash.exceptions import PreventUpdate import dash_leaflet as dl from data_handler import DEFAULT_VAR -from data_handler import DEFAULT_MODEL -from data_handler import VARS +from data_handler import VARS from data_handler import MODELS from data_handler import OBS from data_handler import DEBUG -from data_handler import DATES -from data_handler import MODEBAR_CONFIG +from data_handler import END_DATE from data_handler import MODEBAR_CONFIG_TS -from data_handler import MODEBAR_LAYOUT from data_handler import MODEBAR_LAYOUT_TS from data_handler import DISCLAIMER_NO_FORECAST from data_handler import DISCLAIMER_OBS - -from utils import calc_matrix +from data_handler import cache, cache_timeout from tabs.evaluation import tab_evaluation from tabs.evaluation import STATS @@ -37,11 +31,8 @@ import orjson import os.path from random import random -from data_handler import cache, cache_timeout SCORES = list(STATS.keys()) -start_date = DATES['start_date'] -end_date = DATES['end_date'] or (dt.now() - timedelta(days=1)).strftime("%Y%m%d") def extend_l(l): @@ -90,14 +81,11 @@ def render_evaluation_tab(nrtbutton, scoresbutton): prevent_initial_call=True ) def update_time_selection(timescale, network): - + """ Update time selection among different networks """ if timescale is None: raise PreventUpdate - if network == 'modis': - start_date = '20180101' - elif network == 'aeronet': - start_date = '20120101' + start_date = OBS[network]['start_date'] seasons = { '03': 'Spring', @@ -113,21 +101,21 @@ def update_time_selection(timescale, network): 'value' : '{}-{}'.format( mon.strftime('%Y%m'), (mon + relativedelta(months=2)).strftime('%Y%m')) } - for mon in pd.date_range(start_date, end_date, freq='Q')[::-1]][1:] + for mon in pd.date_range(start_date, END_DATE, freq='Q')[::-1]][1:] placeholder = 'Select season' elif timescale == 'annual': ret = [{ 'label': mon.strftime('%Y'), 'value': '{year}01-{year}12'.format(year=mon.strftime('%Y')), - } for mon in - pd.date_range(start_date, end_date, freq='A')[::-1]] + } for mon in + pd.date_range(start_date, END_DATE, freq='A')[::-1]] placeholder = 'Select year' else: # timescale == 'monthly': ret = [{ 'label': mon.strftime('%B %Y'), 'value': mon.strftime('%Y%m'), - } for mon in - pd.date_range(start_date, end_date, freq='M')[::-1]] + } for mon in + pd.date_range(start_date, END_DATE, freq='M')[::-1]] placeholder = 'Select month' return ret, placeholder @@ -164,16 +152,17 @@ def modis_scores_tables_retrieve(n, models, stat, network, timescale, selection) if DEBUG: print("###########", models, stat, network, timescale, selection, n) filedir = OBS[network]['path'] - filename = "{}_scores.h5".format(selection) - tab_name = "total_{}".format(selection) + filename = f"{selection}_scores.h5" + tab_name = f"total_{selection}" filepath = os.path.join(filedir, "h5", filename) if not os.path.exists(filepath): return _no_modis_data() df = pd.read_hdf(filepath, tab_name) ret = df.loc[df['model'].isin(models), stat] ret['model'] = ret['model'].map({k:MODELS[k]['name'] for k in MODELS}) - if DEBUG: print('---', ret.columns) - if DEBUG: print('---', ret.to_dict('records')) + if DEBUG: + print('---', ret.columns) + print('---', ret.to_dict('records')) # Check if there is any data in returned table, return No Data if not if len(ret.to_dict('records')) < 1: return _no_modis_data() @@ -204,20 +193,23 @@ def scores_maps_retrieve(n_clicks, model, score, network, selection, orig_model, ctx = dash.callback_context - if DEBUG: print(':::', n_clicks, model, score, network, selection) + if DEBUG: + print(':::', n_clicks, model, score, network, selection) if ctx.triggered: button_id = ctx.triggered[0]["prop_id"].split(".")[0] if button_id != "scores-map-apply": if model is not None and score is not None: - if DEBUG: print('::: 1 :::') + if DEBUG: + print('::: 1 :::') figure = get_scores_figure(network, model, score, selection) figure.update_layout(mb) - return figure, True, model, score + return figure, True, model, score raise PreventUpdate if orig_model and orig_stats: - if DEBUG: print(':::', orig_model, orig_stats, ':::') + if DEBUG: + print(':::', orig_model, orig_stats, ':::') curr_model = [mod for mod in MODELS if mod in orig_model][0] curr_stat = [sc for sc in SCORES if sc in orig_stats][0] figure = get_scores_figure(network, curr_model, curr_stat, selection) @@ -227,34 +219,27 @@ def scores_maps_retrieve(n_clicks, model, score, network, selection, orig_model, print('::: 2.5 :::') figure = get_scores_figure(network, model, score, selection) figure.update_layout(mb) - return figure, True, model, score - -# if model is not None and score is not None: -# if DEBUG: print('::: 3 :::') -# figure = get_scores_figure(network, model, score, selection) -# figure.update_layout(mb) -# return figure, True, orig_model[0], orig_stats[0] + return figure, True, model, score return dash.no_update, False, dash.no_update, dash.no_update # PreventUpdate -def format_floats(df): - """This function takes a dataframe and changes all columns except for 'station' +def format_floats(dataframe): + """This function takes a dataframe and changes all columns except for 'station' so that floats will be formatted to 2 digits after the decimal place""" - for col in df.columns: + for col in dataframe.columns: # check if the column is not 'station' if col != 'station': # convert the column to a string to allow for string formatting - df[col] = df[col].astype(str) + dataframe[col] = dataframe[col].astype(str) # iterate over the values in the column - for i, val in enumerate(df[col]): + for i, val in enumerate(dataframe[col]): # check if the value is a float if '.' in val: # if so, format it to have 2 decimal places - df.at[i, col] = '{:.2f}'.format(float(val)) - return df + dataframe.at[i, col] = '{:.2f}'.format(float(val)) + return dataframe -def alphabetize_stations(df): -# def alphabetize_stations(filepath, tab_name): +def alphabetize_stations(dataframe): """ This will alphabetize the stations for each region in the aeronet stats table""" # Define a list of regions to sort between (in the desired order) regions = ['Europe', 'Mediterranean', 'MiddleEast', 'NAfrica', 'Total'] @@ -262,20 +247,20 @@ def alphabetize_stations(df): # Iterate over each pair of consecutive regions and sort the rows between them for i in range(len(regions)-1): # Get the indices of the current and next regions - current_region_idx = df[df['station'] == regions[i]].index[0] - next_region_idx = df[df['station'] == regions[i+1]].index[0] + current_region_idx = dataframe[dataframe['station'] == regions[i]].index[0] + next_region_idx = dataframe[dataframe['station'] == regions[i+1]].index[0] # Slice the dataframe to select the rows between the current and next regions - subset_df = df.loc[current_region_idx+1:next_region_idx-1] + subset_df = dataframe.loc[current_region_idx+1:next_region_idx-1] # Sort the rows alphabetically based on the 'station' column subset_df = subset_df.sort_values(by='station') # Create a new dataframe containing only the current region row - current_region_row = df.loc[current_region_idx].to_frame().T + current_region_row = dataframe.loc[current_region_idx].to_frame().T # Append the current region row and sorted subset dataframe to the list of region DataFrames region_dfs.append(current_region_row) region_dfs.append(subset_df) # Add the 'Total' row back to the end of the sorted dataframe - total_row_idx = df[df['station'] == 'Total'].index[0] - total_row = df.loc[total_row_idx].to_frame().T + total_row_idx = dataframe[dataframe['station'] == 'Total'].index[0] + total_row = dataframe.loc[total_row_idx].to_frame().T region_dfs.append(total_row) # Concatenate all of the region DataFrames into a single sorted DataFrame sorted_df = pd.concat(region_dfs) @@ -283,27 +268,28 @@ def alphabetize_stations(df): @dash.callback( extend_l([ - [Output('aeronet-scores-table-{}'.format(score), 'columns'), - Output('aeronet-scores-table-{}'.format(score), 'data'), - Output('aeronet-scores-table-{}'.format(score), 'style_table'), - Output('aeronet-scores-table-{}'.format(score), 'selected_cells'), - Output('aeronet-scores-table-{}'.format(score), 'active_cell')] + [Output(f'aeronet-scores-table-{score}', 'columns'), + Output(f'aeronet-scores-table-{score}', 'data'), + Output(f'aeronet-scores-table-{score}', 'style_table'), + Output(f'aeronet-scores-table-{score}', 'selected_cells'), + Output(f'aeronet-scores-table-{score}', 'active_cell')] for score in SCORES]), [Input('scores-apply', 'n_clicks'), - *[Input('aeronet-scores-table-{}'.format(score), 'active_cell') + *[Input(f'aeronet-scores-table-{score}', 'active_cell') for score in SCORES]], [State('obs-models-dropdown', 'value'), State('obs-statistics-dropdown', 'value'), State('obs-network-dropdown', 'value'), State('obs-timescale-dropdown', 'value'), State('obs-selection-dropdown', 'value')] + - extend_l([[State('aeronet-scores-table-{}'.format(score), 'columns'), - State('aeronet-scores-table-{}'.format(score), 'data'), - State('aeronet-scores-table-{}'.format(score), 'style_table')] + extend_l([[ + State(f'aeronet-scores-table-{score}', 'columns'), + State(f'aeronet-scores-table-{score}', 'data'), + State(f'aeronet-scores-table-{score}', 'style_table')] for score in SCORES]), prevent_initial_call=True ) -def aeronet_scores_tables_retrieve(n, *args): # *activel_cells, models, stat, network, timescale, selection, *tables): +def aeronet_scores_tables_retrieve(n, *args): """ Read scores tables and show data """ ctx = dash.callback_context @@ -317,11 +303,13 @@ def aeronet_scores_tables_retrieve(n, *args): # *activel_cells, models, stat, n if ctx.triggered: button_id = ctx.triggered[0]["prop_id"].split(".")[0] if DEBUG: print("BUTTON", button_id) - if button_id not in ['scores-apply'] + ['aeronet-scores-table-{}'.format(score) for score in SCORES]: + if button_id not in ['scores-apply'] + [f'aeronet-scores-table-{score}' + for score in SCORES]: raise PreventUpdate if not n or network != 'aeronet': - return extend_l([[dash.no_update, dash.no_update, { 'display': 'none' }, dash.no_update, dash.no_update] for score in SCORES]) + return extend_l([[dash.no_update, dash.no_update, { 'display': 'none' }, + dash.no_update, dash.no_update] for _ in SCORES]) # ORDER is IMPORTANT areas = ['Europe', 'Mediterranean', 'MiddleEast', 'NAfrica', 'Total'] @@ -348,7 +336,7 @@ def aeronet_scores_tables_retrieve(n, *args): # *activel_cells, models, stat, n ret_tables[ret_idx] = tables[obj_idx] ret_tables[ret_idx+1] = tables[obj_idx+1] ret_tables[ret_idx+2] = tables[obj_idx+2] - curr_columns = tables[obj_idx] + # curr_columns = tables[obj_idx] curr_data = tables[obj_idx+1] if active_cells[table_idx] is not None and \ active_cells[table_idx]['column_id'] == 'station': @@ -361,9 +349,9 @@ def aeronet_scores_tables_retrieve(n, *args): # *activel_cells, models, stat, n filepath = os.path.join(filedir, "h5", filename) if not os.path.exists(filepath): if DEBUG: print ("TABLES 0", tables) - #Build a no_data list of values to return + #Build a no_data list of values to return no_data = [] - for i in range(len(SCORES)): + for _ in range(len(SCORES)): no_data += [[], [], {'display': 'block'}, dash.no_update, None] # Add no data update for UI output only for 1 output no_data[0].append({'name':['NO DATA'], 'id':'station'}) @@ -400,25 +388,32 @@ def aeronet_scores_tables_retrieve(n, *args): # *activel_cells, models, stat, n if foll_val in areas: foll_idx = df.loc[df['station']==foll_val].index[0] tables[obj_idx+1] = [table_row - for table_row in curr_data if curr_data.index(table_row) < row_number] + \ + for table_row in curr_data + if curr_data.index(table_row) < row_number] + \ df.iloc[val_idx:foll_idx-1][models].to_dict('rows') + \ - [table_row for table_row in curr_data if curr_data.index(table_row) > row_number] + [table_row for table_row in curr_data + if curr_data.index(table_row) > row_number] else: foll_area = areas[areas.index(value)+1] if DEBUG: print("'''", curr_data) print("---", foll_area) - foll_idx = curr_data.index([row for row in curr_data if row['station'] == foll_area][0]) + foll_idx = curr_data.index([row for row in curr_data + if row['station'] == foll_area][0]) tables[obj_idx+1] = [table_row - for table_row in curr_data if curr_data.index(table_row) <= row_number] + \ - [table_row for table_row in curr_data if curr_data.index(table_row) >= foll_idx] + for table_row in curr_data + if curr_data.index(table_row) <= row_number] + \ + [table_row for table_row in curr_data + if curr_data.index(table_row) >= foll_idx] ret_tables[ret_idx+1] = tables[obj_idx+1] ret_tables[ret_idx+2] = { 'display': 'block' } ret_tables[ret_idx+3] = [] ret_tables[ret_idx+4] = None else: - # ret_tables[ret_idx+1] = df.loc[df['station'].isin(areas), models].to_dict('records') - ret_tables[ret_idx+1] = df[df['station'].isin(areas)].reindex(columns=models).to_dict('records') + # ret_tables[ret_idx+1] = df.loc[df['station'].isin(areas), models] + # .to_dict('records') + ret_tables[ret_idx+1] = df[df['station'].isin(areas)].reindex( + columns=models).to_dict('records') ret_tables[ret_idx+2] = { 'display': 'block' } ret_tables[ret_idx+3] = dash.no_update ret_tables[ret_idx+4] = dash.no_update @@ -474,19 +469,17 @@ def show_eval_modis_timeseries(nclicks, coords, date, obs, model): ) ), True - # return dash.no_update, False # PreventUpdate @dash.callback( - [#Output('alert-eval-popup2', 'is_open'), - Output('modis-clicked-coords', 'data'), + [Output('modis-clicked-coords', 'data'), Output(dict(tag='modis-map', index='modis'), 'children')], [Input(dict(tag='modis-map', index='modis'), 'click_lat_lng')], [State(dict(tag='modis-map', index='modis'), 'children'), State('eval-date-picker', 'date'), - State('obs-dropdown', 'value'), State('obs-mod-dropdown', 'value')], ) -def modis_popup(click_data, mapid, date, obs, model): +def modis_popup(click_data, mapid, date, model): + """ Manages popup info for modis """ from tools import get_single_point if DEBUG: print("CLICK:", str(click_data)) if not click_data: @@ -500,7 +493,7 @@ def modis_popup(click_data, mapid, date, obs, model): lat, lon = click_data value = get_single_point(model, date, 0, DEFAULT_VAR, lat, lon) if DEBUG: print("VALUE", value) - + if not value: raise PreventUpdate @@ -518,8 +511,10 @@ def modis_popup(click_data, mapid, date, obs, model): className='popup-map-value', ), html.Span([ - html.B("Lat {:.2f}, Lon {:.2f}".format(lat, lon)), html.Br(), - "DATE {:02d} {} {} {:02d}UTC".format(valid_dt.day, dt.strftime(valid_dt, '%b'), valid_dt.year, valid_dt.hour), + html.B(f"Lat {lat:.2f}, Lon {lon:.2f}"), html.Br(), + "DATE {:02d} {} {} {:02d}UTC".format(valid_dt.day, + dt.strftime(valid_dt, '%b'), + valid_dt.year, valid_dt.hour), html.Br(), html.Button("EXPLORE TIMESERIES", id='ts-eval-modis-button', @@ -532,7 +527,7 @@ def modis_popup(click_data, mapid, date, obs, model): ], id='modis-map-point', position=[lat, lon], - autoClose=True, + autoClose=True, closeOnEscapeKey=True, closeOnClick=True, closeButton=True, @@ -543,8 +538,7 @@ def modis_popup(click_data, mapid, date, obs, model): @dash.callback( - [#Output('alert-eval-popup', 'is_open') - Output('stations-clicked-coords', 'data'), + [Output('stations-clicked-coords', 'data'), Output(dict(tag='empty-map', index='None'), 'children'), Output(dict(tag='empty-map', index='None'), 'click_lat_lng')], [Input(dict(tag='empty-map', index='None'), 'click_lat_lng')], @@ -554,6 +548,7 @@ def modis_popup(click_data, mapid, date, obs, model): ) @cache.memoize(timeout=cache_timeout) def stations_popup(click_data, mapid, stations): + """ Manages popup info for aeronet """ if not click_data: raise PreventUpdate @@ -574,7 +569,7 @@ def stations_popup(click_data, mapid, stations): lat, lon = click_data curr_station = df_stations[(df_stations['lon'].round(2) == round(lon, 2)) & \ (df_stations['lat'].round(2) == round(lat, 2))]['stations'].values - + if DEBUG: print("CURR_STATION", curr_station) if not curr_station: @@ -586,8 +581,8 @@ def stations_popup(click_data, mapid, stations): children=[ html.Div([ html.Span([ - html.B("Lat {:.2f}, Lon {:.2f}".format(lat, lon)), html.Br(), - "STATION: ", html.B("{}".format(curr_station)), html.Br(), + html.B(f"Lat {lat:.2f}, Lon {lon:.2f}"), html.Br(), + "STATION: ", html.B(f"{curr_station}"), html.Br(), html.Button("EXPLORE TIMESERIES", id='ts-eval-button', n_clicks=0, @@ -603,7 +598,7 @@ def stations_popup(click_data, mapid, stations): index=int(random()*100) ), position=[lat, lon], - autoClose=True, + autoClose=True, closeOnEscapeKey=True, closeOnClick=True, closeButton=True, @@ -620,9 +615,9 @@ def stations_popup(click_data, mapid, stations): else: mapid.append(marker.to_plotly_json()) print("LAST", type(mapid), type(last), last) - for pos, log in enumerate(mapid): + for _, log in enumerate(mapid): if log is not None: - # mapid[pos]['id']['random'] = + # mapid[pos]['id']['random'] = if DEBUG: print("********", type(log), log.keys()) # print(log['type']) @@ -713,9 +708,10 @@ def update_eval_aeronet(n_clicks, sdate, edate, obs): sdate = dt.strptime( sdate, "%Y-%m-%d").strftime("%Y%m%d") except: - sdate = end_date + sdate = END_DATE pass - if DEBUG: print('SERVER: callback start_date {}'.format(sdate)) + if DEBUG: + print(f'SERVER: callback start_date {sdate}') if edate is not None: edate = edate.split()[0] @@ -724,19 +720,13 @@ def update_eval_aeronet(n_clicks, sdate, edate, obs): edate, "%Y-%m-%d").strftime("%Y%m%d") except: pass - if DEBUG: print('SERVER: callback end_date {}'.format(edate)) + if DEBUG: + print(f'SERVER: callback end_date {edate}') else: - edate = end_date + edate = END_DATE stations, points_layer = get_obs1d(sdate, edate, obs, DEFAULT_VAR) - fig = get_figure(model=None, var=DEFAULT_VAR, layer=points_layer), -# eval_graph = html.Div([ -# get_figure(model=None, var=DEFAULT_VAR, layer=points_layer), -# html.Div(DISCLAIMER_OBS, -# className='disclaimer') -# ], -# id='graph-eval-aeronet', -# ) + fig = get_figure(model=None, var=DEFAULT_VAR, layer=points_layer) return stations.to_dict(), fig @@ -777,17 +767,21 @@ def update_eval_modis(n_clicks, date, mod, obs, mod_div): date, "%Y-%m-%d").strftime("%Y%m%d") except: pass - if DEBUG: print('SERVER: callback date {}'.format(date)) + if DEBUG: + print(f'SERVER: callback date {date}') else: - date = end_date + date = END_DATE - if DEBUG: print("ZOOM", mod_zoom, "CENTER", mod_center) + if DEBUG: + print("ZOOM", mod_zoom, "CENTER", mod_center) if MODELS[mod]['start'] == 12: tstep = 4 else: tstep = 0 - fig_mod = get_figure(model=mod, var=DEFAULT_VAR, selected_date=date, tstep=tstep, hour=12, center=mod_center, zoom=mod_zoom) - fig_obs = get_figure(model=obs, var=DEFAULT_VAR, selected_date=date, tstep=0, center=mod_center, zoom=mod_zoom, tag='modis') + fig_mod = get_figure(model=mod, var=DEFAULT_VAR, selected_date=date, tstep=tstep, + hour=12, center=mod_center, zoom=mod_zoom) + fig_obs = get_figure(model=obs, var=DEFAULT_VAR, selected_date=date, tstep=0, + center=mod_center, zoom=mod_zoom, tag='modis') if DEBUG: print("MODIS", fig_obs) return fig_obs, fig_mod @@ -808,6 +802,8 @@ def update_eval(obs): if DEBUG: print('SERVER: calling figure from EVAL picker callback') # if DEBUG: print('SERVER: interval ' + str(n)) + start_date = OBS[obs]['start_date'] + if obs == 'aeronet': eval_date = [ @@ -815,8 +811,8 @@ def update_eval(obs): dcc.DatePickerRange( id='eval-date-picker', min_date_allowed=dt.strptime(start_date, "%Y%m%d"), - max_date_allowed=dt.strptime(end_date, "%Y%m%d"), - initial_visible_month=dt.strptime(end_date, "%Y%m%d"), + max_date_allowed=dt.strptime(END_DATE, "%Y%m%d"), + initial_visible_month=dt.strptime(END_DATE, "%Y%m%d"), display_format='DD MMM YYYY', # end_date=end_date, updatemode='bothdates', @@ -843,17 +839,13 @@ def update_eval(obs): dcc.DatePickerSingle( id='eval-date-picker', min_date_allowed=dt.strptime(start_date, "%Y%m%d"), - max_date_allowed=dt.strptime(end_date, "%Y%m%d"), - initial_visible_month=dt.strptime(end_date, "%Y%m%d"), + max_date_allowed=dt.strptime(END_DATE, "%Y%m%d"), + initial_visible_month=dt.strptime(END_DATE, "%Y%m%d"), display_format='DD MMM YYYY', # date=end_date, # with_portal=True, )] -# fig_mod = get_figure(model='median', var=DEFAULT_VAR, -# selected_date=end_date, tstep=4) -# fig_obs = get_figure(model=obs, var=DEFAULT_VAR, -# selected_date=end_date, tstep=0, zoom=fig_mod.zoom, center=fig_mod.center) fig_mod = get_figure() fig_obs = get_figure(tag='modis') diff --git a/tabs/forecast.py b/tabs/forecast.py index a98975ec46a01420d2cde0da8a96a37a570136e1..68ab1e6b3653c57729adcabefeb6610e10cba197 100644 --- a/tabs/forecast.py +++ b/tabs/forecast.py @@ -1,28 +1,156 @@ -#!/usr/bin/env python3 # -*- coding: utf-8 -*- -""" Dash Server """ +""" Forecast Layout module """ + +from datetime import datetime as dt +from datetime import timedelta import dash_bootstrap_components as dbc from dash import dcc from dash import html -from datetime import datetime as dt -from datetime import timedelta - -from data_handler import PROB +from data_handler import FREQ +from data_handler import DEBUG from data_handler import STYLES -from data_handler import DATES -from data_handler import MODELS +from data_handler import START_DATE, END_DATE, DELAY, DELAY_DATE from data_handler import WAS from data_handler import DISCLAIMER_MODELS -from data_handler import DEFAULT_MODEL -from data_handler import DEFAULT_VAR -from data_handler import GRAPH_HEIGHT -start_date = DATES['start_date'] -end_date = DATES['end_date'] or (dt.now() - timedelta(days=1)).strftime("%Y%m%d") -forecast_days = ('Today', 'Tomorrow') +def get_forecast_days(curdate=END_DATE): + """ Return forecast days according to configuration file """ + delay = DELAY + st_date = DELAY_DATE + if delay and st_date: + if dt.strptime(curdate, "%Y%m%d") >= dt.strptime(st_date, "%Y%m%d"): + days = 2 + else: + days = 3 + elif delay: + days = 2 + elif not delay and st_date: + if dt.strptime(curdate, "%Y%m%d") >= dt.strptime(st_date, "%Y%m%d"): + days = 3 + else: + days = 2 + else: + days = 3 + + return [ + (dt.strptime(curdate, "%Y%m%d") + + timedelta(days=idx)).strftime("%a %d").upper() + for idx in range(days) + ] + +def gen_ts_marks(ts_type, tstep, ts_min, curdate=END_DATE): + """ Generate time slider marks """ + + if DEBUG: print("TS MARKS", ts_type, tstep, ts_min) + + if ts_type in ('prob', 'was'): + return get_forecast_days(curdate)[tstep-ts_min] + + if DEBUG: print("RET", f'{tstep:d}') + return f'{tstep:d}' + +def gen_time_slider(ts_type='prob', end_date=END_DATE): + """ Generate time slider """ + ts_dict = { + 'model': + { + 'min': 0, + 'max': 72, + 'step': FREQ, + 'play': True + }, + 'prob': + { + 'min': 0, + 'max': len(get_forecast_days(end_date))-1, + 'step': 1, + 'play': False + }, + 'was': + { + 'min': 0, + 'max': len(get_forecast_days(end_date))-1, + 'step': 1, + 'play': False + }, + } + + ts_min = ts_dict[ts_type]['min'] + ts_max = ts_dict[ts_type]['max'] + ts_step = ts_dict[ts_type]['step'] + ts_play = ts_dict[ts_type]['play'] + + marks = { + tstep: {'label': gen_ts_marks(ts_type, tstep, ts_min, end_date)} + for tstep in range(ts_min, ts_max+ts_step, ts_step) + } + + if DEBUG: print("FCST MARKS", marks[list(marks.keys())[-1]]) + if ts_type in ('prob', 'was'): + marks[list(marks.keys())[-1]]['style'] = {} + marks[list(marks.keys())[-1]]['style']['left'] = '' + marks[list(marks.keys())[-1]]['style']['right'] = '-40px' + + return ts_play, dcc.Slider( + id=f'{ts_type}-slider-graph', + min=ts_min, max=ts_max, step=ts_step, value=ts_min, + marks=marks + ) + +def gen_time_bar(ts_type='prob', start_date=START_DATE, end_date=END_DATE): + """ Generate time bar according to the section """ + + date_picker = html.Span( + dcc.DatePickerSingle( + id=f'{ts_type}-date-picker', + min_date_allowed=dt.strptime(start_date, "%Y%m%d"), + max_date_allowed=dt.strptime(end_date, "%Y%m%d"), + initial_visible_month=dt.strptime(end_date, "%Y%m%d"), + display_format='DD MMM YYYY', + date=end_date, + clearable=True, + placeholder='DD MON YYYY', + reopen_calendar_on_clear=True, + ), + className="timesliderline", + ) + + play_button = html.Span( + children=[ + html.Button(title='Play', + id='btn-play', n_clicks=0, + className='fa fa-play text-center'), + ], + className="timesliderline anim-buttons", + ) + + ts_play, ts_content = gen_time_slider(ts_type, end_date) + time_slider = html.Span( + ts_content, + id=f'{ts_type}-slider-container', + className="timesliderline", + ) + + if DEBUG: print("TIME SLIDER", time_slider) + + if ts_play: + return html.Div([ + date_picker, + play_button, + time_slider + ], + className="timeslider" + ) + + return html.Div([ + date_picker, + time_slider + ], + className="timeslider" + ) def layout_view(): """ Return the menu for the various mapview types""" @@ -41,8 +169,9 @@ def layout_view(): ), active=active ) - for style, active in zip(list(STYLES.keys()), [True if i == 'carto-positron' - else False for i in STYLES]) + for style, active in zip(list(STYLES.keys()), + [True if i == 'carto-positron' + else False for i in STYLES]) ], direction="up", in_navbar=True, @@ -87,113 +216,7 @@ def layout_layers(): ), )]) -def time_slider(end_date=end_date): - """ Return the html for the timeslider for timeseries animations """ - return html.Div([ - html.Span( - dcc.DatePickerSingle( - id='model-date-picker', - min_date_allowed=dt.strptime(start_date, "%Y%m%d"), - max_date_allowed=dt.strptime(end_date, "%Y%m%d"), - initial_visible_month=dt.strptime(end_date, "%Y%m%d"), - display_format='DD MMM YYYY', - date=end_date, - clearable=True, - placeholder='DD MON YYYY', - reopen_calendar_on_clear=True, - ), - className="timesliderline", - ), - html.Span( - children=[ - html.Button(title='Play', - id='btn-play', n_clicks=0, - className='fa fa-play text-center'), - ], - className="timesliderline anim-buttons", - ), - html.Span( - dcc.Slider( - id='slider-graph', - min=0, max=72, step=3, value=0, - marks={ - tstep: '{:d}'.format(tstep) - # if tstep%2 == 0 else '' - for tstep in range(0, 75, 3) - }, - # updatemode='drag', - ), - className="timesliderline", - ), - ], - className="timeslider" - ) - -def prob_time_slider(end_date=end_date): - """ Return the slider for the probability maps """ - return html.Div([ - html.Span( - dcc.DatePickerSingle( - id='prob-date-picker', - min_date_allowed=dt.strptime(start_date, "%Y%m%d"), - max_date_allowed=dt.strptime(end_date, "%Y%m%d"), - initial_visible_month=dt.strptime(end_date, "%Y%m%d"), - display_format='DD MMM YYYY', - date=end_date, - clearable=True, - placeholder='DD MON YYYY', - reopen_calendar_on_clear=True, - ), - className="timesliderline", - ), - html.Span( - dcc.Slider( - id='prob-slider-graph', - min=0, max=1, step=1, value=0, - marks={ - tstep: forecast_days[tstep] - for tstep in range(2) - }, - ), - className="timesliderline", - ), - ], - className="timeslider" - ) - -def was_time_slider(end_date): - """ Return the slider for the Warning Adivsory System maps""" - return html.Div([ - html.Span( - dcc.DatePickerSingle( - id='was-date-picker', - min_date_allowed=dt.strptime(start_date, "%Y%m%d"), - max_date_allowed=dt.strptime(end_date, "%Y%m%d"), - initial_visible_month=dt.strptime(end_date, "%Y%m%d"), - display_format='DD MMM YYYY', - date=end_date, - clearable=True, - placeholder='DD MON YYYY', - reopen_calendar_on_clear=True, - ), - className="timesliderline", - ), - html.Span( - dcc.Slider( - id='was-slider-graph', - min=1, max=2, step=1, value=1, - marks={ - tstep: forecast_days[tstep-1] - for tstep in range(1, 3) - }, - ), - className="timesliderline", - ), - ], - className="timeslider" - ) - -def models_children(end_date=end_date): +def models_children(start_date=START_DATE, end_date=END_DATE): """ Return the html for models maps """ return [ html.Div( @@ -202,7 +225,6 @@ def models_children(end_date=end_date): index='models', ) ), - # login_modal, dbc.Alert( "To explore the forecast, please select a variable and click on APPLY.", id="alert-forecast", @@ -212,15 +234,6 @@ def models_children(end_date=end_date): color="primary", style={ 'overflow': 'auto', 'marginBottom': 0 } ), - dbc.Alert( - "If you close the location tooltip, please refresh the page before clicking on another specific location on the map.", - id="alert-popup", - is_open=False, - duration=6000, - fade=True, - color="primary", - style={ 'overflow': 'auto', 'marginBottom': 0 } - ), html.Div( id='div-collection', # children=[dbc.Spinner( @@ -251,7 +264,7 @@ def models_children(end_date=end_date): )), dbc.NavbarSimple([ html.Div([ - time_slider(end_date), + gen_time_bar('model', start_date=start_date, end_date=end_date), layout_view(), time_series(), # layout_layers(), @@ -259,15 +272,16 @@ def models_children(end_date=end_date): id='layout-dropdown', className="layout-dropdown", ), - ], - className='fixed-bottom navbar-timebar', - fluid=True, - expand='lg', - dark=True, - fixed='bottom',) - ] - -def prob_children(end_date=end_date): + ], + className='fixed-bottom navbar-timebar', + fluid=True, + expand='lg', + dark=True, + fixed='bottom', + ) + ] + +def prob_children(start_date=START_DATE, end_date=END_DATE): """ Return html for probablility maps""" return [ html.Div( @@ -283,23 +297,22 @@ def prob_children(end_date=end_date): className='disclaimer'), dbc.NavbarSimple([ html.Div([ - prob_time_slider(end_date), + gen_time_bar('prob', start_date=start_date, end_date=end_date), layout_view(), - # layout_layers(), ], id='layout-dropdown', className="layout-dropdown", ), - ], - className='fixed-bottom navbar-timebar', - fluid=True, - expand='lg', - dark=True, - fixed='bottom', - ) + ], + className='fixed-bottom navbar-timebar', + fluid=True, + expand='lg', + dark=True, + fixed='bottom', + ) ] -def was_children(end_date=end_date): +def was_children(start_date=START_DATE, end_date=END_DATE): """ Return html for WAS maps""" return [ html.Div( @@ -321,28 +334,27 @@ def was_children(end_date=end_date): className='disclaimer'), dbc.NavbarSimple([ html.Div([ - was_time_slider(end_date), + gen_time_bar('was', start_date=start_date, end_date=end_date), layout_view(), - # layout_layers(), ], id='layout-dropdown', className="layout-dropdown", ), - ], - className='fixed-bottom navbar-timebar', - fluid=True, - expand='lg', - dark=True, - fixed='bottom', - ) + ], + className='fixed-bottom navbar-timebar', + fluid=True, + expand='lg', + dark=True, + fixed='bottom', + ) ] -def tab_forecast(window='models', end_date=end_date): +def tab_forecast(window='models', start_date=START_DATE, end_date=END_DATE): """ The MAIN function to return all appropriate html for selected tab and sidebar selection """ windows = { - 'models': models_children(end_date), - 'was': was_children(end_date), - 'prob': prob_children(end_date), + 'models': models_children(start_date=start_date, end_date=end_date), + 'was': was_children(start_date=start_date, end_date=end_date), + 'prob': prob_children(start_date=start_date, end_date=end_date), } return dcc.Tab(label='Forecast', @@ -356,15 +368,17 @@ def tab_forecast(window='models', end_date=end_date): def expand_dropdown(window): """ Build a dictionary to return appropriate expanded sidebar dropdown""" - expand_dropdown = { + expand_dropdown_dict = { 'models': False, 'prob': False, 'was': False } - expand_dropdown[window]=True - return expand_dropdown + expand_dropdown_dict[window]=True + return expand_dropdown_dict -def sidebar_forecast(variables, default_var, models, default_model, window='models', country='burkinafaso'): +def sidebar_forecast(variables, default_var, models, default_model, window='models', + country='burkinafaso'): + """ Build forecast sidebar """ #get which sidebar dropdown should be expanded for complex url search dropdown = expand_dropdown(window) @@ -487,22 +501,16 @@ def sidebar_forecast(variables, default_var, models, default_model, window='mode html.Div([ dbc.Row([ dbc.Col( - dbc.Button( - "", - id="info-button", - ), + dbc.Button("", id="info-button"), width=3, ), dbc.Col( - dbc.Button( - "DOWNLOAD", - id="download-button", - ), + dbc.Button("DOWNLOAD", id="download-button"), width=9, ), ], - no_gutters=True, - ), + no_gutters=True, + ), dbc.Row([ dbc.Col([ dbc.Collapse( @@ -511,11 +519,10 @@ def sidebar_forecast(variables, default_var, models, default_model, window='mode dbc.Button('USER GUIDE', id='btn-userguide-download', n_clicks=0, - href="https://dust.aemet.es/products/overview/user-guide/@@download", + href="/products/overview/user-guide/@@download", className='download-section', ), - html.P(""" - Please check out the User Guide for more information."""), + html.P("""Please check out the User Guide for more information."""), ], className="card-text", )), @@ -546,73 +553,16 @@ def sidebar_forecast(variables, default_var, models, default_model, window='mode target="_blank", className='download-section', ), - # html.Button('GIF ANIM', - # id='btn-anim-download', - # n_clicks=0, - # className='download-section', - # ), - # dbc.Spinner( - # dcc.Download( - # id="anim-download", - # base64=True, - # ), - # ), - # html.Label("ALL MODELS"), - # dbc.Button('PNG FRAME', - # id='btn-all-frame-download', - # n_clicks=0, - # href="#", - # external_link=True, - # target="_blank", - # className='download-section', - # ), - ## dbc.Spinner( - ## dcc.Download( - ## id="all-frame-download", - ## base64=True, - ## ), - ## ), - # dbc.Button('GIF ANIM', - # id='btn-all-anim-download', - # n_clicks=0, - # href="#", - # external_link=True, - # target="_blank", - # className='download-section', - # ), -# dbc.Spinner( - # dcc.Download( - # id="all-anim-download", - # base64=True, - # ), - # ), -html.Label("NUMERICAL DATA"), - dbc.Button('NETCDF', - id='btn-netcdf-download', - n_clicks=0, - href="/products/data-download", - external_link=True, - target="_blank", - className='download-section', - ), -# html.A('TEST', - # id='btn-img-download', - # href="#", - # # target="_blank", - # className='download-section', - # ), -# dbc.Spinner( - # dcc.Download( - # id="netcdf-download", - # base64=True, - # ), - # ), -# html.P("""This button allows you to get selected models netCDF files."""), -# html.P([ - # """To get access to the forecast archive please click """, - # dcc.Link('here', href="https://dust03.bsc.es/products/data-download"), - # ]), -], + html.Label("NUMERICAL DATA"), + dbc.Button('NETCDF', + id='btn-netcdf-download', + n_clicks=0, + href="/products/data-download", + external_link=True, + target="_blank", + className='download-section', + ), + ], className="card-text", )), id="download-collapse", @@ -626,50 +576,3 @@ html.Label("NUMERICAL DATA"), className="sidebar-bottom", ) ] - -#login_modal = html.Div( -# id='open-login', -# children=[ -# dbc.Modal([ -# dbc.ModalHeader("Download authentication"), -# dbc.ModalBody([ -# dbc.Alert( -# "The username/password is incorrect. Please try again or click outside the window to exit.", -# id="alert-login-error", -# is_open=False, -# duration=6000, -# fade=True, -# color="primary", -# style={ 'overflow': 'auto', 'marginBottom': 0 } -# ), -# dbc.Alert( -# "Sorry, you don't have permission to download the latest forecast. Please download a previous forecast or click outside the window to exit.", -# id="alert-login-wrong", -# is_open=False, -# duration=6000, -# fade=True, -# color="primary", -# style={ 'overflow': 'auto', 'marginBottom': 0 } -# ), -# dcc.Input( -# id="input_username", -# type="text", -# placeholder="username", -# ), -# dcc.Input( -# id="input_password", -# type="password", -# placeholder="password", -# ), -# html.Button('Login', id='submit-login', n_clicks=0), -# ]), -# ], -# id='login-modal', -# size='sm', -# centered=True, -# is_open=False, -# ), -# ] -# #style={'display': 'none'}, -#) - diff --git a/tabs/forecast_callbacks.py b/tabs/forecast_callbacks.py index 121461d7db223e53f54c3259363945dbac5891b4..97c73c5af5a6dd163c73fedfa72a50d4073fd880 100644 --- a/tabs/forecast_callbacks.py +++ b/tabs/forecast_callbacks.py @@ -1,4 +1,13 @@ +# -*- coding: utf-8 -*- """ TAB FORECAST """ + +from datetime import datetime as dt +from datetime import timedelta +import time +import math +from random import random +import orjson + import dash import dash_bootstrap_components as dbc from dash import dcc @@ -7,8 +16,6 @@ from dash.dependencies import Output from dash.dependencies import Input from dash.dependencies import State from dash.dependencies import ALL -from dash.dependencies import MATCH -from dash.dependencies import ClientsideFunction from dash.exceptions import PreventUpdate import dash_leaflet as dl @@ -20,34 +27,15 @@ from data_handler import MODELS from data_handler import STYLES from data_handler import FREQ from data_handler import DEBUG -from data_handler import DATES +from data_handler import END_DATE from data_handler import PROB from data_handler import GRAPH_HEIGHT from data_handler import MODEBAR_CONFIG_TS from data_handler import MODEBAR_LAYOUT_TS -from utils import calc_matrix +from data_handler import cache, cache_timeout from tabs.forecast import tab_forecast +from utils import calc_matrix -import requests -import pandas as pd -from datetime import datetime as dt -from datetime import timedelta -import time -from io import BytesIO -from PIL import Image -import zipfile -import tempfile -import os.path -import orjson -import math -from random import random - -start_date = DATES['start_date'] -end_date = DATES['end_date'] or (dt.now() - timedelta(days=1)).strftime("%Y%m%d") - -from data_handler import cache, cache_timeout -#def register_callbacks(app, cache, cache_timeout): -# """ Registering callbacks """ @dash.callback( [Output('collapse-1', 'is_open'), @@ -65,7 +53,8 @@ from data_handler import cache, cache_timeout State('collapse-3', 'is_open'),], prevent_initial_call=True ) -def render_forecast_tab(modbutton, probbutton, wasbutton, var, modopen, probopen, wasopen): +def render_forecast_tab(modbutton, probbutton, wasbutton, var, modopen, + probopen, wasopen): """ Function rendering requested tab """ ctx = dash.callback_context @@ -93,12 +82,11 @@ def render_forecast_tab(modbutton, probbutton, wasbutton, var, modopen, probopen if wasopen: return True, False, False, False, True, dash.no_update return modopen, probopen, wasopen, False, True, dash.no_update - else: - if modopen: - return True, False, False, True, True, dash.no_update + + if modopen: return True, False, False, True, True, dash.no_update + return True, False, False, True, True, dash.no_update - raise PreventUpdate @dash.callback( [Output('prob-dropdown', 'options'), @@ -128,6 +116,7 @@ def update_prob_dropdown(var): prevent_initial_call=True ) def update_models_dropdown(variable, checked): + """ Update Models maps dropdown """ btn_style = { 'display' : 'block' } models = VARS[variable]['models'] @@ -154,6 +143,7 @@ def update_models_dropdown(variable, checked): [Input('collapse-1', 'is_open')], ) def rotate_models_caret(collapse_open): + """ Rotates models menu caret """ rotate_caret = { 'top':'.05rem', 'transform': 'rotate(180deg)', @@ -168,6 +158,7 @@ def rotate_models_caret(collapse_open): [Input('collapse-2', 'is_open')], ) def rotate_prob_caret(collapse_open): + """ Rotates probability menu caret """ rotate_caret = { 'transform': 'rotate(0deg)', '-ms-transform': 'rotate(0deg)', @@ -181,6 +172,7 @@ def rotate_prob_caret(collapse_open): [Input('collapse-3', 'is_open')], ) def rotate_was_caret(collapse_open): + """ Rotates was menu caret """ rotate_caret = { 'transform': 'rotate(0deg)', '-ms-transform': 'rotate(0deg)', @@ -199,31 +191,32 @@ def rotate_was_caret(collapse_open): prevent_initial_call=True ) def sidebar_bottom(n_info, n_download, open_info, open_download): + """ Returns bottom sidebar with info and download menus """ ctx = dash.callback_context if ctx.triggered: button_id = ctx.triggered[0]["prop_id"].split(".")[0] - if button_id == 'info-button': - if DEBUG: print('clicked INFO', not open_info, False) - return not open_info, False - elif button_id == 'download-button': - if DEBUG: print('clicked DOWN', False, not open_download) - return False, not open_download + if button_id == 'info-button': + if DEBUG: print('clicked INFO', not open_info, False) + return not open_info, False + elif button_id == 'download-button': + if DEBUG: print('clicked DOWN', False, not open_download) + return False, not open_download if DEBUG: print('clicked NONE', False, False) - raise PreventUdate + raise PreventUpdate @dash.callback( Output('btn-anim-download', 'href'), [Input('model-dropdown', 'value'), Input('variable-dropdown-forecast', 'value'), Input('model-date-picker', 'date'), - Input('slider-graph', 'value')], + Input('model-slider-graph', 'value')], prevent_initial_call=False, ) def download_anim_link(models, variable, date, tstep): """ Download PNG frame """ - if date is None or tstep is None: + if variable is None or date is None or tstep is None: raise PreventUpdate # from tools import get_figure @@ -239,62 +232,18 @@ def download_anim_link(models, variable, date, tstep): return anim @dash.callback( - Output('all-frame-download', 'data'), - [Input('btn-all-frame-download', 'n_clicks')], - [State('variable-dropdown-forecast', 'value'), - State('model-date-picker', 'date'), - State('slider-graph', 'value') - ], - prevent_initial_call=True, -) -def download_all_frame(btn, models, variable, date, tstep): - """ Download PNG frame """ - # from tools import get_figure - from tools import download_image - - ctx = dash.callback_context - - if ctx.triggered: - button_id = ctx.triggered[0]["prop_id"].split(".")[0] - if button_id == 'btn-all-frame-download': - if DEBUG: print('GIF', btn, models, variable, date) - try: - curdate = dt.strptime(date, '%Y-%m-%d').strftime('%Y%m%d') - except: - curdate = date - data = download_image(models, variable, curdate, tstep=tstep, anim=False) - if DEBUG: print('DATA', type(data), data.keys(), [data[k] for k in data if k != 'content']) - return data - - raise PreventUpdate - -@dash.callback( - Output('all-anim-download', 'data'), - [Input('btn-all-anim-download', 'n_clicks')], - [State('variable-dropdown-forecast', 'value'), - State('model-date-picker', 'date')], - prevent_initial_call=True, + Output('was-slider-container', 'children'), + [Input('was-date-picker', 'date')], ) -def download_all_anim(btn, variable, date): - """ Download PNG frame """ - # from tools import get_figure - from tools import download_image - - ctx = dash.callback_context - - if ctx.triggered: - button_id = ctx.triggered[0]["prop_id"].split(".")[0] - if button_id == 'btn-all-anim-download': - if DEBUG: print('GIF', btn, variable, date) - try: - curdate = dt.strptime(date, '%Y-%m-%d').strftime('%Y%m%d') - except: - curdate = date - data = download_image(['all'], variable, curdate, anim=True) - if DEBUG: print('DATA', type(data), data.keys(), [data[k] for k in data if k != 'content']) - return data - - raise PreventUpdate +def update_was_timeslider(date): + """ Update time slider number of days """ + from tabs.forecast import gen_time_slider + try: + date = dt.strptime(date, "%Y-%m-%d").strftime("%Y%m%d") + except: + pass + _, time_slider = gen_time_slider('was', date) + return time_slider @dash.callback( [Output('was-graph', 'children'), @@ -340,11 +289,13 @@ def update_was_figure(n_clicks, date, day, was, var, previous, view, zoom, cente date, "%Y-%m-%d").strftime("%Y%m%d") except: pass - if DEBUG: print('SERVER: callback date {}'.format(date)) + if DEBUG: + print(f'SERVER: callback date {date}') else: - date = end_date + date = END_DATE - if DEBUG: print("WAS figure " + date, was, day) + if DEBUG: + print("WAS figure " + date, was, day) if was: view = list(STYLES.keys())[view.index(True)] geojson, legend, info = get_was_figure(was, day, selected_date=date) @@ -353,6 +304,19 @@ def update_was_figure(n_clicks, date, day, was, var, previous, view, zoom, cente raise PreventUpdate +@dash.callback( + Output('prob-slider-container', 'children'), + [Input('prob-date-picker', 'date')], +) +def update_prob_timeslider(date): + """ Update time slider number of days """ + from tabs.forecast import gen_time_slider + try: + date = dt.strptime(date, "%Y-%m-%d").strftime("%Y%m%d") + except: + pass + _, time_slider = gen_time_slider('prob', date) + return time_slider @dash.callback( Output('prob-graph', 'children'), @@ -368,7 +332,7 @@ def update_was_figure(n_clicks, date, day, was, var, previous, view, zoom, cente ) @cache.memoize(timeout=cache_timeout) def update_prob_figure(n_clicks, date, day, prob, var, view, zoom, center): - """ Update Warning Advisory Systems maps """ + """ Update Probability maps """ from tools import get_prob_figure from tools import get_figure @@ -399,7 +363,7 @@ def update_prob_figure(n_clicks, date, day, prob, var, view, zoom, center): pass if DEBUG: print('SERVER: callback date {}'.format(date)) else: - date = end_date + date = END_DATE if prob: prob = prob.replace('prob_', '') @@ -432,9 +396,9 @@ def update_was_styles_button(*args): # if DEBUG: print("CURRENT ARGS", str(args)) # if DEBUG: print("NUM GRAPHS", num_graphs) - url = [STYLES[button_id['index']]['url'] for x in range(num_graphs)] - attr = [STYLES[button_id['index']]['attribution'] for x in range(num_graphs)] - res = [False for i in active] + url = [STYLES[button_id['index']]['url'] for _ in range(num_graphs)] + attr = [STYLES[button_id['index']]['attribution'] for _ in range(num_graphs)] + res = [False for _ in active] st_idx = list(STYLES.keys()).index(button_id['index']) if active[st_idx] is False: res[st_idx] = True @@ -467,9 +431,9 @@ def update_prob_styles_button(*args): # if DEBUG: print("CURRENT ARGS", str(args)) # if DEBUG: print("NUM GRAPHS", num_graphs) - url = [STYLES[button_id['index']]['url'] for x in range(num_graphs)] - attr = [STYLES[button_id['index']]['attribution'] for x in range(num_graphs)] - res = [False for i in active] + url = [STYLES[button_id['index']]['url'] for _ in range(num_graphs)] + attr = [STYLES[button_id['index']]['attribution'] for _ in range(num_graphs)] + res = [False for _ in active] st_idx = list(STYLES.keys()).index(button_id['index']) if active[st_idx] is False: res[st_idx] = True @@ -502,9 +466,9 @@ def update_models_styles_button(*args): if DEBUG: print("CURRENT ARGS", str(args)) active = args[-1] if DEBUG: print("NUM GRAPHS", num_graphs) - url = [STYLES[button_id['index']]['url'] for x in range(num_graphs)] - attr = [STYLES[button_id['index']]['attribution'] for x in range(num_graphs)] - res = [False for i in active] + url = [STYLES[button_id['index']]['url'] for _ in range(num_graphs)] + attr = [STYLES[button_id['index']]['attribution'] for _ in range(num_graphs)] + res = [False for _ in active] st_idx = list(STYLES.keys()).index(button_id['index']) if active[st_idx] is False: res[st_idx] = True @@ -515,78 +479,6 @@ def update_models_styles_button(*args): if DEBUG: print('NOTHING TO DO') raise PreventUpdate - -# @dash.callback( -# [Output({'tag': 'model-tile-layer', 'index': MATCH}, 'url'), -# Output({'tag': 'model-tile-layer', 'index': MATCH}, 'attribution')], -# [Input('airports', 'n_clicks')] + -# [Input({'tag': 'view-style', 'index': MATCH}, 'n_clicks')], -# [State({'tag': 'view-style', 'index': MATCH}, 'active')], -# prevent_initial_call=True -# ) -# # @cache.memoize(timeout=cache_timeout) -# def update_styles(*args): -# """ Function updating map layout cartography """ -# ctx = dash.callback_context -# active = args[-1] -# # urls, attributions = args[-2], args[-1] -# if DEBUG: print("CURRENT STYLES 2", str(active)) -# -# if ctx.triggered: -# button_id = ctx.triggered[0]["prop_id"].split(".")[0] -# if button_id in STYLES: -# if DEBUG: -# print("STYLE", button_id) -# url = STYLES[button_id]['url'] -# attr = STYLES[button_id]['attribution'] -# return url, attr -# elif button_id == 'airports': -# traces_list = [trace for trace in figures['data']] -# for trace in figures['data']: -# if trace['name'] == 'Airports': -# figures['data'].remove(trace) -# return figures -# -# fname = "/data/daily_dashboard/obs/airports/airports.dat" -# df = pd.read_csv(fname) -# clon = df['Longitude'] -# clat = df['Latitude'] -# calt = df['Altitude'] -# cname = df['Name'] -# cicao = df['ICAO'] -# ccity = df['City'] -# ccountry = df['Country'] -# figures['data'].append( -# dict( -# type='scattermapbox', -# name='Airports', -# below='', -# lon=clon, -# lat=clat, -# text=cname, -# customdata=cicao, -# #name='{} ({})'.format(cname, cicao), -# mode='markers', -# hovertemplate="lon: %{lon:.2f}
" + -# "lat: %{lat:.2f}
" + -# "name: %{text} (%{customdata})", -# opacity=0.6, -# showlegend=False, -# marker=dict( -# # autocolorscale=True, -# # symbol='square', -# color='#2B383E', -# opacity=0.6, -# size=10, -# showscale=False, -# ) -# ), -# ) -# return figures -# -# raise PreventUpdate - - @dash.callback( [Output('model-clicked-coords', 'data'), Output('current-popups-stored', 'data'), @@ -595,13 +487,14 @@ def update_models_styles_button(*args): [State(dict(tag='model-map', index=ALL, n_clicks=ALL), 'id'), State(dict(tag='model-map', index=ALL, n_clicks=ALL), 'children'), State('model-date-picker', 'date'), - State('slider-graph', 'value'), + State('model-slider-graph', 'value'), State('variable-dropdown-forecast', 'value'), State('model-clicked-coords', 'data'), State('current-popups-stored', 'data')], prevent_initial_call=False ) def models_popup(click_data, map_ids, res_list, date, tstep, var, coords, popups): + """ Manages models popups for timeseries """ from tools import get_single_point if DEBUG: print("CLICK:", str(click_data)) if click_data.count(None) == len(click_data): @@ -621,7 +514,7 @@ def models_popup(click_data, map_ids, res_list, date, tstep, var, coords, popups trigger = orjson.loads(ctxt) if DEBUG: print('TRIGGER', trigger, type(trigger)) - curr_models = [m['index'] for m in map_ids] + # curr_models = [m['index'] for m in map_ids] res = res_list if trigger in map_ids: model = trigger['index'] @@ -719,10 +612,8 @@ def models_popup(click_data, map_ids, res_list, date, tstep, var, coords, popups return coords, popups, res return {}, {}, dash.no_update - # raise PreventUpdate -# retrieve timeseries according to coordinates selected @dash.callback( [Output('ts-modal', 'children'), Output('ts-modal', 'is_open'), @@ -737,7 +628,8 @@ def models_popup(click_data, map_ids, res_list, date, tstep, var, coords, popups ) @cache.memoize(timeout=cache_timeout) def show_timeseries(ts_button, mod, date, variable, coords, popups): - """ Renders model comparison timeseries """ + """ Renders model comparison timeseries, retrieve timeseries according to + coordinates selected. """ from tools import get_timeseries ctx = dash.callback_context @@ -775,8 +667,6 @@ def show_timeseries(ts_button, mod, date, variable, coords, popups): return ts_body, True, [0 for _ in ts_button] return dash.no_update, False, [0 for _ in ts_button] return dash.no_update, False, [0 for _ in ts_button] - # raise PreventUpdate - @dash.callback( [Output({'tag': 'model-map', 'index': ALL, "n_clicks": ALL}, 'zoom'), @@ -790,7 +680,7 @@ def show_timeseries(ts_button, mod, date, variable, coords, popups): prevent_initial_call=True ) def zoom_country(n_clicks, model, zoom, lat, lon): - """Set zoom and center over coordinates entered""" + """ Set zoom and center over coordinates entered """ count = len(model) if count == 0 or zoom is None: raise PreventUpdate @@ -808,10 +698,13 @@ def zoom_country(n_clicks, model, zoom, lat, lon): prevent_initial_call=True ) def zooms(viewport, models): - """Syncronize all maps to have same center and zoom in mosaic""" + """ Syncronize all maps to have same center and zoom in mosaic """ ctx = dash.callback_context if ctx.triggered: + print('=============models', models) + print('=============ct', ctx.triggered_id) changed = dict(ctx.triggered_id) + print('=============ctchanged', changed) index = models.index(changed) if viewport[index] is not None: return 1, \ @@ -820,8 +713,6 @@ def zooms(viewport, models): viewport[index]['center'][1] raise PreventUpdate - -# start/stop animation @dash.callback( [Output('slider-interval', 'disabled'), Output('slider-interval', 'n_intervals'), @@ -831,7 +722,7 @@ def zooms(viewport, models): ], Input('btn-play', 'n_clicks'), [State('slider-interval', 'disabled'), - State('slider-graph', 'value')], + State('model-slider-graph', 'value')], prevent_initial_call=True ) def start_stop_autoslider(n_play, disabled, value): @@ -853,24 +744,23 @@ def start_stop_autoslider(n_play, disabled, value): raise PreventUpdate @dash.callback( - Output('slider-graph', 'value'), + Output('model-slider-graph', 'value'), [Input('slider-interval', 'n_intervals')], prevent_initial_call=True ) @cache.memoize(timeout=cache_timeout) def update_slider(n): """ Update slider value according to the number of intervals """ - if DEBUG: print('SERVER: updating slider-graph ' + str(n)) + if DEBUG: print('SERVER: updating model-slider-graph ' + str(n)) if not n: return if n >= 24: tstep = int(round(24*math.modf(n/24)[0], 0)) else: tstep = int(n) - if DEBUG: print('SERVER: updating slider-graph ' + str(tstep*FREQ)) + if DEBUG: print('SERVER: updating model-slider-graph ' + str(tstep*FREQ)) return tstep*FREQ - @dash.callback( Output('forecast-tab', 'children'), [Input('models-apply', 'n_clicks'), @@ -881,6 +771,7 @@ def update_slider(n): ) @cache.memoize(timeout=cache_timeout) def update_tab_content(models_clicks, prob_clicks, was_clicks, curtab): + """ Updates tab content """ ctx = dash.callback_context if ctx.triggered: @@ -903,7 +794,7 @@ def update_tab_content(models_clicks, prob_clicks, was_clicks, curtab): @dash.callback( Output('graph-collection', 'children'), [Input('models-apply', 'n_clicks'), - Input('slider-graph', 'value'), + Input('model-slider-graph', 'value'), Input('model-date-picker', 'date')], [State('model-dropdown', 'value'), State('variable-dropdown-forecast', 'value'), @@ -943,7 +834,7 @@ def update_models_figure(n_clicks, tstep, date, model, variable, static, view, z pass if DEBUG: print('SERVER: callback date {}'.format(date)) else: - date = end_date + date = END_DATE if model is None: model = DEFAULT_MODEL @@ -973,8 +864,8 @@ def update_models_figure(n_clicks, tstep, date, model, variable, static, view, z if len(model) != len(zoom): if DEBUG: print("##############", len(model), len(zoom), "**********") - zoom = [None for item in model] - center = [None for item in model] + zoom = [None for _ in model] + center = [None for _ in model] if DEBUG: print('#### ZOOM, CENTER:', zoom, center, model, ncols, nrows) view = list(STYLES.keys())[view.index(True)] @@ -999,8 +890,8 @@ def update_models_figure(n_clicks, tstep, date, model, variable, static, view, z figures.append(figure) if DEBUG: - for f in figures: - print("************", f.id, f.style, "**************") + for fig in figures: + print("************", fig.id, fig.style, "**************") res = [ dbc.Row( @@ -1016,6 +907,5 @@ def update_models_figure(n_clicks, tstep, date, model, variable, static, view, z no_gutters=True, ) for row in range(nrows) ] - if DEBUG: print(ncols, nrows, len(res), [(type(i), len(i.children)) for i in res]) if DEBUG: print("**** REQUEST TIME", str(time.time() - st_time)) return res diff --git a/tabs/observations.py b/tabs/observations.py index cc01973c802e3f516f0266e3b8ca7fdd2ea7745b..f07dbf863460af981dceef8b74b576dbbc17fdc2 100644 --- a/tabs/observations.py +++ b/tabs/observations.py @@ -7,7 +7,7 @@ from data_handler import DEFAULT_MODEL from data_handler import FREQ from data_handler import VARS from data_handler import MODELS -from data_handler import DATES +from data_handler import START_DATE, END_DATE from data_handler import STYLES from data_handler import DISCLAIMER_NO_FORECAST # from tabs.forecast import layout_view @@ -16,8 +16,6 @@ from utils import get_vis_edate from datetime import datetime as dt from datetime import timedelta -start_date = DATES["start_date"] -end_date = DATES['end_date'] or dt.now().strftime("%Y%m%d") aod_end_date = '20210318' @@ -43,7 +41,7 @@ layout_view = html.Div([ )]) -def obs_time_slider(div='obs', start=0, end=23, step=1): +def obs_time_slider(div='obs', start=0, end=23, step=1, start_date=START_DATE, end_date=END_DATE): # if DEBUG: print("------------\n", start, type(start), step, type(step), "------------\n") default_tstep = 0 @@ -86,7 +84,7 @@ def obs_time_slider(div='obs', start=0, end=23, step=1): # if tstep%2 == 0 else '' for tstep in range(start, end+1, step) } - if DEBUG: print("MARKS", marks[list(marks.keys())[-1]]) + if DEBUG: print("VIS MARKS", marks[list(marks.keys())[-1]]) marks[list(marks.keys())[-1]]['style'] = {} marks[list(marks.keys())[-1]]['style']['left'] = '' marks[list(marks.keys())[-1]]['style']['right'] = '-32px' @@ -125,7 +123,7 @@ def obs_time_slider(div='obs', start=0, end=23, step=1): fixed='bottom', ) -def tab_observations(window='rgb'): +def tab_observations(window='rgb', start_date=START_DATE, end_date=END_DATE): """ """ rgb_children = [ html.Span( @@ -165,7 +163,7 @@ def tab_observations(window='rgb'): alt='EUMETSAT RGB - NOT AVAILABLE', ), html.Div( - obs_time_slider(div='obs'), + obs_time_slider(div='obs', start_date=start_date, end_date=end_date), className="layout-dropdown", )], className='centered-image', @@ -237,7 +235,10 @@ def tab_observations(window='rgb'): obs_time_slider(div='obs-vis', start=0, end=18, - step=6), + step=6, + start_date=start_date, + end_date=end_date + ), #layout_view, html.Br(), html.Br(), diff --git a/tabs/observations_callbacks.py b/tabs/observations_callbacks.py index e01860d0b8a3679911550aff1efebead288bbe76..30b050076a3a903bb0c17646527209faf694bba0 100644 --- a/tabs/observations_callbacks.py +++ b/tabs/observations_callbacks.py @@ -1,38 +1,18 @@ """ TAB OBSERVATIONS """ import dash -import dash_bootstrap_components as dbc -from dash import dcc -from dash import html from dash.dependencies import Output from dash.dependencies import Input from dash.dependencies import State from dash.dependencies import ALL -from dash.dependencies import MATCH from dash.exceptions import PreventUpdate -from data_handler import DEFAULT_VAR -from data_handler import DEFAULT_MODEL -from data_handler import VARS -from data_handler import MODELS -from data_handler import OBS from data_handler import DEBUG -from data_handler import DATES -from data_handler import STYLES - -from utils import calc_matrix +from data_handler import START_DATE, END_DATE from tabs.observations import tab_observations -# from tabs.evaluation import STATS from datetime import datetime as dt -from datetime import timedelta -import pandas as pd -import os.path import math - -start_date = DATES['start_date'] -end_date = DATES['end_date'] or dt.now().strftime("%Y%m%d") - from data_handler import cache, cache_timeout #def register_callbacks(app, cache, cache_timeout): @@ -41,15 +21,12 @@ from data_handler import cache, cache_timeout @dash.callback( [Output('observations-tab', 'children'), Output('rgb', 'style'), - # Output('aod', 'style'), Output('visibility', 'style'), Output('variable-dropdown-observation', 'value')], [Input('rgb', 'n_clicks'), - # Input('aod', 'n_clicks'), Input('visibility', 'n_clicks')], prevent_initial_call=True ) -# def render_observations_tab(rgb_button, aod_button, vis_button): def render_observations_tab(rgb_button, vis_button): """ Function rendering requested tab """ ctx = dash.callback_context @@ -57,19 +34,23 @@ def render_observations_tab(rgb_button, vis_button): bold = { 'font-weight': 'bold' } norm = { 'font-weight': 'normal' } + if DEBUG: + print(rgb_button, vis_button) + if ctx.triggered: + if DEBUG: + print(ctx.triggered[0]["prop_id"].split(".")) button_id = ctx.triggered[0]["prop_id"].split(".")[0] if button_id == "rgb" and rgb_button: - return tab_observations('rgb'), bold, norm, button_id + return tab_observations('rgb', START_DATE, END_DATE), bold, norm, button_id # elif button_id == "aod" and aod_button: # return tab_observations('aod'), norm, bold, norm, button_id elif button_id == "visibility" and vis_button: - return tab_observations('visibility'), norm, bold, button_id - else: - raise PreventUpdate + return tab_observations('visibility', START_DATE, END_DATE), norm, bold, button_id - return dash.no_update, bold, norm, 'rgb' + return dash.no_update, bold, norm, 'rgb' + raise PreventUpdate # @dash.callback( # Output('aod-image', 'src'), @@ -145,6 +126,7 @@ def render_observations_tab(rgb_button, vis_button): State('btn-middleeast', 'active')], prevent_initial_call=True ) +@cache.memoize(timeout=cache_timeout) def update_image_src(btn_fulldisc, btn_middleeast, date, tstep, btn_fulldisc_active, btn_middleeast_active): if DEBUG: @@ -190,6 +172,7 @@ def update_image_src(btn_fulldisc, btn_middleeast, date, tstep, btn_fulldisc_act State('obs-slider-graph', 'value')], prevent_initial_call=True ) +@cache.memoize(timeout=cache_timeout) def start_stop_obs_autoslider(n_play, disabled, value): """ Play/Pause map animation """ ctx = dash.callback_context @@ -210,6 +193,7 @@ def start_stop_obs_autoslider(n_play, disabled, value): [Input('obs-slider-interval', 'n_intervals')], prevent_initial_call=True ) +@cache.memoize(timeout=cache_timeout) def update_obs_slider(n): """ Update slider value according to the number of intervals """ if DEBUG: print('SERVER: updating slider-graph ' + str(n)) @@ -281,7 +265,7 @@ def update_vis_figure(date, tstep, zoom, center): except: pass else: - date = end_date + date = END_DATE if zoom: zoom = zoom[0] @@ -299,8 +283,3 @@ def update_vis_figure(date, tstep, zoom, center): if DEBUG: print("POINTS LAYER", points_layer) fig = get_figure(model=None, var=None, layer=points_layer, zoom=zoom, center=center, tag='obs-vis') return fig -# return html.Div( -# fig, -# id='obs-vis-graph', -# className='graph-with-slider' -# ) diff --git a/tests/test_data_handler.py b/tests/test_data_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..594dab15c2b7c9ef9201b3dfb5e6a54aa89199cc --- /dev/null +++ b/tests/test_data_handler.py @@ -0,0 +1,305 @@ +import pytest +import importlib +code = importlib.import_module('data_handler') +from data_handler import START_DATE +from data_handler import END_DATE +from data_handler import OBS + +@pytest.fixture +def aeronet_instance(): + return code.Observations1dHandler('20220404','20220405' , 'aeronet') + +def test_generate_obs1d_tstep_trace1(aeronet_instance): + run = aeronet_instance.generate_obs1d_tstep_trace('OD550_DUST') + assert str(type(run[1])) == "" + assert run[0].shape ==(120, 3) + +# @pytest.fixture +# def obsTSHandler_instance(): +# return code.ObsTimeSeriesHandler('aeronet','20220404','20220405', 'OD550_DUST', ['median', 'monarch']) +# +# def test_retrieve_timeseries(): +# assert obsTSHandler_instance.retrieve_timeseries(idx, st_name, model) == 'hello' + + +# =================== TIME SERIES HANDLER ============================ +@pytest.fixture +def TSHandler(): + return code.TimeSeriesHandler('median', '20220606', 'OD550_DUST') + +def test_TimeSeriesHandler(TSHandler): + run = TSHandler.retrieve_single_point(1, 45, 45, model='median') + assert float(run) == 0.10633552074432373 + +def test_TimeSeriesHandler_1(TSHandler): + run = TSHandler.retrieve_single_point(3, 5, 5, model=None) + assert float(run) == 0.00830854382365942 + +def test_retrieve_timeseries(TSHandler): + run = TSHandler.retrieve_timeseries(5, 5, model=None, method='netcdf', forecast=False) + assert run.layout.title.text =='Dust Optical Depth @ lat = 5 and lon = 5' + +def test_retrieve_timeseries_1(TSHandler): + run = TSHandler.retrieve_timeseries(35, 15, model='monarch', method='netcdf', forecast=True) + assert run.layout.title.text =='Dust Optical Depth @ lat = 35 and lon = 15' + +def test_retrieve_timeseries_2(TSHandler): + run = TSHandler.retrieve_timeseries(35, 15, model='monarch', method='netcdf', forecast=True) + assert run.data[0].name == 'MULTI-MODEL (35.25, 15.25)' + +# =================== FIGURE HANDLER ============================ +@pytest.fixture +def FigureHandler(): + return code.FigureHandler('median', '20220606') + +def test_FH_get_center(FigureHandler): + assert FigureHandler.get_center([35,45]) == [35, 45] + assert FigureHandler.get_center([-35,-45]) == [-35, -45] + assert FigureHandler.get_center() == [43.25, 16.5] + + +def test_set_data_AOD(FigureHandler): + assert len(FigureHandler.set_data('OD550_DUST', tstep=0)[0]) == 22620 + assert FigureHandler.set_data('OD550_DUST', tstep=0)[0][0] == -26.75 + + assert len(FigureHandler.set_data('OD550_DUST', tstep=3)[0]) == 22620 + assert FigureHandler.set_data('OD550_DUST', tstep=3)[0][0] == -26.75 + +def test_set_data_SCONC(FigureHandler): + assert len(FigureHandler.set_data('SCONC_DUST', tstep=0)[0]) == 22620 + assert FigureHandler.set_data('SCONC_DUST', tstep=0)[0][0] == -26.75 + + +def test_retrieve_cdatetime(FigureHandler): + assert str(FigureHandler.retrieve_cdatetime(tstep=0)) == '2022-06-06 12:00:00' + assert str(FigureHandler.retrieve_cdatetime(tstep=6)) == '2022-06-07 06:00:00' + assert str(FigureHandler.retrieve_cdatetime(tstep=12)) == '2022-06-08 00:00:00' + +def test_generate_contour_tstep_trace_leaflet(FigureHandler): + result = "GeoJSON(hideout={'colorscale': ['rgba(255,255,255,0.4)', '#a1ede3', '#5ce3ba', '#fcd775', '#da7230', '#9e6226', '#714921', '#392511', '#1d1309'], 'bounds': array([ 0. , 0.1, 0.2, 0.4, 0.8, 1.2, 1.6, 3.2, 6.4, 10. ]" + assert result in str(FigureHandler.generate_contour_tstep_trace_leaflet('OD550_DUST', tstep=0)[0]) + result2 = "Colorbar(classes=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], colorscale=['rgba(255,255,255,0.4)', '#a1ede3', '#5ce3ba', '#fcd775', '#da7230', '#9e6226', '#714921', '#392511', '#1d1309']" + assert result2 in str(FigureHandler.generate_contour_tstep_trace_leaflet('OD550_DUST', tstep=0)[1]) + +def test_generate_contour_tstep_trace_AOD(FigureHandler): + assert FigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=0)['name'] == 'Dust Optical Depth_contours' + assert FigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=0)['showlegend'] == False + +def test_generate_contour_tstep_trace_SCONC(FigureHandler): + assert FigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=0)['name'] == 'Dust Surface Conc. (µg/m³)_contours' + assert FigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=0)['showlegend'] == False + +def test_generate_var_tstep_trace_leaflet_AOD(FigureHandler): + assert str(FigureHandler.generate_var_tstep_trace_leaflet(varname='OD550_DUST', tstep=0))[0:200] == "GeoJSON(data={'type': 'FeatureCollection', 'features': [{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [-26.75, 0.25]}, 'properties': {'value': 0.0, 'tooltip': 'Lat 0.25 Lon -26.75 V" + +def test_generate_var_tstep_trace_leaflet_SCONC(FigureHandler): + assert str(FigureHandler.generate_var_tstep_trace_leaflet(varname='SCONC_DUST', tstep=0))[0:200] == "GeoJSON(data={'type': 'FeatureCollection', 'features': [{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [-26.75, 0.25]}, 'properties': {'value': 0.009999999776482582, 'tooltip': 'Lat " \ + + +def test_generate_var_tstep_trace_AOD(FigureHandler): + assert FigureHandler.generate_var_tstep_trace(varname='OD550_DUST', tstep=0)['name'] == "MULTI-MODEL" + assert len(FigureHandler.generate_var_tstep_trace(varname='OD550_DUST', tstep=6)['lat']) == 22620 + + +def test_generate_var_tstep_trace_SCONC(FigureHandler): + assert FigureHandler.generate_var_tstep_trace(varname='SCONC_DUST', tstep=6)['name'] == "MULTI-MODEL" + assert len(FigureHandler.generate_var_tstep_trace(varname='SCONC_DUST', tstep=0)['lat']) == 22620 + + +def test_get_title(FigureHandler): + assert FigureHandler.get_title('OD550_DUST', tstep=0) == 'MULTI-MODEL Dust Optical Depth (550nm)
Valid: 12h 06 Jun 2022 (H+00)' + assert FigureHandler.get_title('SCONC_DUST', tstep=9) == 'MULTI-MODEL Dust Surface Conc. (µg/m³)
Valid: 15h 07 Jun 2022 (H+27)' + + +def test_hour_to_step(FigureHandler): + assert FigureHandler.hour_to_step(0) == 0 + assert FigureHandler.hour_to_step(1) == 0 + assert FigureHandler.hour_to_step(5) == 0 + assert FigureHandler.hour_to_step(36) == 8 + assert FigureHandler.hour_to_step(54) == 14 + assert FigureHandler.hour_to_step(70) == 0 + +def test_retrieve_var_tstep(FigureHandler): + assert FigureHandler.retrieve_var_tstep(varname='OD550_DUST', tstep=0, hour=None, static=True, aspect=(1,1), center=None, selected_tiles='carto-positron', zoom=None, layer=None, tag='empty').children[0].id == {'tag': 'model-tile-layer', 'index': 'median'} + assert FigureHandler.retrieve_var_tstep(varname='OD550_DUST', tstep=0, hour=None, static=True, aspect=(1,1), center=None, selected_tiles='carto-positron', zoom=None, layer=None, tag='empty').children[3].classes == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + assert str(FigureHandler.retrieve_var_tstep(varname='OD550_DUST', tstep=0, hour=None, static=True, aspect=(1,1), center=None, selected_tiles='carto-positron', zoom=None, layer=None, tag='empty').children[4].children) == "P(B(['MULTI-MODEL Dust Optical Depth (550nm)', Br(None), 'Valid: 12h 06 Jun 2022 (H+00)']))" + +# =================== Scores Figure Handler ============================ +@pytest.fixture +def ScoresFigureHandler(): + return code.ScoresFigureHandler('aeronet', 'bias', '202206') + + +def test_get_mapbox(ScoresFigureHandler): + assert ScoresFigureHandler.get_mapbox('carto-positron', False, 2.8, None)['bearing'] == 0 + assert ScoresFigureHandler.get_mapbox('carto-positron', False, 2.8, None)['pitch'] == 0 + assert ScoresFigureHandler.get_mapbox('carto-positron', False, 2.8, None)['uirevision'] == True + assert ScoresFigureHandler.get_mapbox('carto-positron', False, 2.8, None)['zoom'] == 2.8 + assert str(ScoresFigureHandler.get_mapbox('carto-positron', False, 2.8, None)['center']) == "layout.mapbox.Center({\n 'lat': 33.5, 'lon': 16.5\n})" + + +def test_generate_trace(ScoresFigureHandler): + stats = ['Lille', 'Mainz', 'Palaiseau', 'Paris', 'Kyiv'] + vals = [-0.11, -0.12, -0.07, -0.09, -0.14] + assert ScoresFigureHandler.generate_trace(35, 35, stats, vals)['name'] == 'MBE score' + assert ScoresFigureHandler.generate_trace(35, 35, stats, vals)['marker']['cmin'] == -0.1 + assert ScoresFigureHandler.generate_trace(35, 35, stats, vals)['marker']['cmax'] == 0.1 + assert ScoresFigureHandler.generate_trace(35, 35, stats, vals)['marker']['colorbar']['x'] == 0.94 + +def test_retrieve_scores(ScoresFigureHandler): + assert ScoresFigureHandler.retrieve_scores('median', aspect=(1,1), center=None).data[1].name == 'MBE score' + assert ScoresFigureHandler.retrieve_scores('median', aspect=(1,1), center=None).data[1].type == 'scattermapbox' + + +# =================== Vis Handler ============================ +@pytest.fixture +def VisFigureHandler(): + return code.VisFigureHandler('20220808') + +def test_set_data(VisFigureHandler): + assert VisFigureHandler.set_data(0)[5][1][0] == [11] + assert VisFigureHandler.set_data(0)[5][3][0][1] == 1 + assert VisFigureHandler.set_data(24) == ([], [], [], [], [], ()) + assert VisFigureHandler.set_data(36) == ([], [], [], [], [], ()) + + +def test_generate_var_tstep_trace(VisFigureHandler): + returned = VisFigureHandler.generate_var_tstep_trace([], [], [], [], [], (), ('#714921', '#da7230', '#fcd775', 'CadetBlue'), ('<1 km', '1 - 2 km', '2 - 5 km', 'Haze'), ('o', 'o', 'o', '^'), 6) + assert len(returned) == 2 + assert str(type(returned[0])) == "" + assert returned[1][1].id == 'vis-info' + assert returned[1][1].children ==['NO DATA AVAILABLE'] + assert returned[1][0].data['type'] == 'FeatureCollection' + +def test_vis_get_title(VisFigureHandler): + assert str(VisFigureHandler.get_title(tstep=0)) =="['Visibility reduced by airborne dust', Br(None), '08 August 2022 00-06 UTC']" + assert str(VisFigureHandler.get_title(tstep=9)) =="['Visibility reduced by airborne dust', Br(None), '08 August 2022 09-15 UTC']" + assert str(VisFigureHandler.get_title(tstep=39)) =="['Visibility reduced by airborne dust', Br(None), '08 August 2022 39-45 UTC']" + assert str(VisFigureHandler.get_title(tstep=72)) =="['Visibility reduced by airborne dust', Br(None), '08 August 2022 72-78 UTC']" + +def test_vis_retrieve_var_tstep(VisFigureHandler): + result = VisFigureHandler.retrieve_var_tstep(tstep=0, hour=None, static=True, aspect=(1,1), center=None) + assert str(type(result[0])) == "" + assert result[1][0].data['type'] =='FeatureCollection' + assert result[1][0].data['features'][0]['geometry']['coordinates'] ==[50.82, 28.97] + + result = VisFigureHandler.retrieve_var_tstep(tstep=1, hour=1, static=True, aspect=(1,1), center=None) + assert str(type(result[0])) == "" + assert result[1][0].data['type'] =='FeatureCollection' + +# =================== Prob Handler ============================ +@pytest.fixture +def ProbFigureHandler(): + return code.ProbFigureHandler('OD550_DUST', 0.1, '20220808') + + +def test_prob_set_data(ProbFigureHandler): + assert ProbFigureHandler.set_data('OD550_DUST', tstep=0)[0].data[0] == -24.75 + assert ProbFigureHandler.set_data('OD550_DUST', tstep=0)[0].data[9] == -20.25 + assert ProbFigureHandler.set_data('OD550_DUST', tstep=0)[1].data[9] == 5.25 + + +def test_prob_retrieve_cdatetime(ProbFigureHandler): + assert str(ProbFigureHandler.retrieve_cdatetime(tstep=0)) == '2022-08-08 00:00:00' + assert str(ProbFigureHandler.retrieve_cdatetime(tstep=1)) == '2022-08-09 00:00:00' + +def test_prob_generate_contour_tstep_trace_AOD(ProbFigureHandler): + #test day 0 + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=0)[0].url == '/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/00_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=0)[0].options == {'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=0)[1].classes ==[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + #test day 1 + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=1)[0].url == '/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/01_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=1)[0].options == {'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + assert ProbFigureHandler.generate_contour_tstep_trace('OD550_DUST', tstep=1)[1].classes ==[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + +def test_prob_generate_contour_tstep_trace_SCONC(ProbFigureHandler): + #test day 0 + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=0)[0].url == '/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/00_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=0)[0].options == {'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=0)[1].classes ==[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + #test day 1 + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=1)[0].url == '/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/01_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=1)[0].options == {'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + assert ProbFigureHandler.generate_contour_tstep_trace('SCONC_DUST', tstep=1)[1].classes ==[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + +def test_prob_get_title(ProbFigureHandler): + assert ProbFigureHandler.get_title('OD550_DUST', tstep=0) =='Daily Mean of Dust AOD
Probability of exceeding 0.1
ENS members: 10 Run: 08 Aug 2022 Valid: 08 Aug 2022' + assert ProbFigureHandler.get_title('SCONC_DUST', tstep=1) == 'Daily Mean of Dust Surface Concentration
Probability of exceeding 0.1µg/m³
ENS members: 10 Run: 08 Aug 2022 Valid: 09 Aug 2022' + +def test_prob_retrieve_var_tstep(ProbFigureHandler): + assert ProbFigureHandler.retrieve_var_tstep('OD550_DUST', 0, True, (1,1))[0].url =='/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/00_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.retrieve_var_tstep('OD550_DUST', 1, True, (1,1))[0].options =={'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + + assert ProbFigureHandler.retrieve_var_tstep('SCONC_DUST', 1, True, (1,1))[0].url =='/dashboard/assets/geojsons/prob/od550_dust/0.1/geojson/20220808/01_20220808_OD550_DUST.geojson' + assert ProbFigureHandler.retrieve_var_tstep('SCONC_DUST', 0, True, (1,1))[0].options =={'style': {'variable': 'forecastTab.forecastMaps.styleHandle'}} + + +# =================== Was Handler ============================ +@pytest.fixture +def WasFigureHandler(): + return code.WasFigureHandler(was='burkinafaso', model='median', variable='SCONC_DUST', selected_date='20220808') + +def test_was_get_regions_data(WasFigureHandler): + #day 1 + assert WasFigureHandler.get_regions_data(day=1)[0][0] == 'Boucle du Mouhoun' + assert WasFigureHandler.get_regions_data(day=1)[0][1] == 'Cascades' + assert WasFigureHandler.get_regions_data(day=1)[0][-1] == 'Sud-Ouest' + + assert WasFigureHandler.get_regions_data(day=1)[1][0] == 'green' + assert WasFigureHandler.get_regions_data(day=1)[1][1] == 'green' + assert WasFigureHandler.get_regions_data(day=1)[1][-1] == 'green' + + assert WasFigureHandler.get_regions_data(day=1)[2][0] == 'Normal' + assert WasFigureHandler.get_regions_data(day=1)[2][1] == 'Normal' + assert WasFigureHandler.get_regions_data(day=1)[2][-1] =='Normal' + + #day 2 + assert WasFigureHandler.get_regions_data(day=2)[0][0] == 'Boucle du Mouhoun' + assert WasFigureHandler.get_regions_data(day=2)[0][1] == 'Cascades' + assert WasFigureHandler.get_regions_data(day=2)[0][-1] == 'Sud-Ouest' + + assert WasFigureHandler.get_regions_data(day=2)[1][0] == 'green' + assert WasFigureHandler.get_regions_data(day=2)[1][1] == 'green' + assert WasFigureHandler.get_regions_data(day=2)[1][-1] == 'green' + + assert WasFigureHandler.get_regions_data(day=2)[2][0] == 'Normal' + assert WasFigureHandler.get_regions_data(day=2)[2][1] == 'Normal' + assert WasFigureHandler.get_regions_data(day=2)[2][-1] =='Normal' + + +def test_was_get_geojson_url(WasFigureHandler): + assert WasFigureHandler.get_geojson_url(day=1) == '/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_1.geojson' + assert WasFigureHandler.get_geojson_url(day=2) == '/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_2.geojson' + +def test_was_retrieve_cdatetime(WasFigureHandler): + assert str(WasFigureHandler.retrieve_cdatetime(tstep=0)) == '2022-08-08 12:00:00' + assert str(WasFigureHandler.retrieve_cdatetime(tstep=1)) == '2022-08-08 15:00:00' + assert str(WasFigureHandler.retrieve_cdatetime(tstep=2)) == '2022-08-08 18:00:00' + assert str(WasFigureHandler.retrieve_cdatetime(tstep=3)) == '2022-08-08 21:00:00' + + +def test_was_generate_contour_tstep_trace(WasFigureHandler): + assert WasFigureHandler.generate_contour_tstep_trace(1)[0].options == {'style': {'variable': 'forecastTab.wasMaps.styleHandle'}} + assert WasFigureHandler.generate_contour_tstep_trace(1)[0].url == '/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_1.geojson' + assert WasFigureHandler.generate_contour_tstep_trace(1)[1].className == 'was-legend' + + assert WasFigureHandler.generate_contour_tstep_trace(2)[0].options == {'style': {'variable': 'forecastTab.wasMaps.styleHandle'}} + assert WasFigureHandler.generate_contour_tstep_trace(2)[0].url == '/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_2.geojson' + assert WasFigureHandler.generate_contour_tstep_trace(2)[1].className == 'was-legend' + + +def test_was_get_title(WasFigureHandler): + assert str(WasFigureHandler.get_title(0)) =="Barcelona Dust Regional Center - Burkina Faso WAS.
Expected concentration of airborne dust.
Issued: 08 Aug 2022. Valid: 08 Aug 2022" + assert str(WasFigureHandler.get_title(1)) =="Barcelona Dust Regional Center - Burkina Faso WAS.
Expected concentration of airborne dust.
Issued: 08 Aug 2022. Valid: 09 Aug 2022" + + +def test_was_retrieve_var_tstep(WasFigureHandler): + assert WasFigureHandler.retrieve_var_tstep(1, True, (1,1))[0].url =='/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_1.geojson' + assert WasFigureHandler.retrieve_var_tstep(1, True, (1,1))[0].options =={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}} + + assert WasFigureHandler.retrieve_var_tstep(2, True, (1,1))[0].url =='/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_2.geojson' + assert WasFigureHandler.retrieve_var_tstep(2, True, (1,1))[0].options =={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}} + diff --git a/tests/test_evaluation.py b/tests/test_evaluation.py index fc25146abed294de2328fb88881bbc813610affc..ee0ecf400b8083c4621d052794d605d247c69f0a 100644 --- a/tests/test_evaluation.py +++ b/tests/test_evaluation.py @@ -1,15 +1,26 @@ import pytest -import pandas as pd import importlib -import numpy as np -from contextvars import copy_context -import dash -from dash._callback_context import context_value -from dash._utils import AttributeDict code = importlib.import_module('tabs.evaluation') -#=================test sidebar evaluation =========================== -def test_sidebar_evaluation(): +#=================TEST SIDEBAR EVALUATION =========================== +def test_sidebar_evaluation_nrt(): assert "Button(children='Visual comparison', id='nrt-evaluation', color='link', style={'fontWeight': 'bold'}" in str(code.sidebar_evaluation('nrt')) + + assert "[Div(children=[Label('Variable'), Dropdown(options=[{'label': 'AOD', 'value': 'OD550_DUST'}, {'label': 'Concentration', 'value': 'SCONC_DUST'}, {'label': 'Dry deposition', 'value': 'DUST_DEPD'}" in str(code.sidebar_evaluation('nrt')) + +def test_sidebar_evaluation_scores(): assert "children='Statistics', id='scores-evaluation', color='link', style={'fontWeight': 'bold'})], className='sidebar-item')]" in str(code.sidebar_evaluation('scores')) + + assert "[Div(children=[Label('Variable'), Dropdown(options=[{'label': 'AOD', 'value': 'OD550_DUST'}, {'label': 'Concentration', 'value': 'SCONC_DUST'}" in str(code.sidebar_evaluation('scores')) + +#=================TEST TAB EVALUATION =========================== +def test_tab_evaluation_nrt(): + assert "Tab(children=[Span(children=P('Visual comparison'), className='description-title'), Span(children=P('The visual comparison offers a quick overview of the quality of the forecast." in str(code.tab_evaluation(window='nrt')) + + assert "id='open-eval-timeseries')], id='loading-ts-eval-modal', fullscreen=True, fullscreen_style={'opacity': '0.5', 'zIndex': 1000})], id='evaluation-tab', className='horizontal-menu', label='Evaluation', value='evaluation-tab')" in str(code.tab_evaluation(window='nrt')) + +def test_tab_evaluation_scores(): + assert "Tab(children=[Alert(children='To explore the evaluation of the forecast, please make a selection of Network, Models, Statistics, Timescale and click on APPLY.', id='alert-evaluation'" in str(code.tab_evaluation(window='scores')) + + assert "id='eval-tables', style={'padding': '1rem 0.1rem 0.1rem 1rem', 'width': '95%'})], id='evaluation-tab', className='horizontal-menu', label='Evaluation', value='evaluation-tab')" in str(code.tab_evaluation(window='scores')) diff --git a/tests/test_forecast.py b/tests/test_forecast.py index 24a209efdb6a16a3df3335d57bdb786aa6d5c5c2..8a15aaed951e5cb3786f74f5179f9bfc3ae9c1f9 100644 --- a/tests/test_forecast.py +++ b/tests/test_forecast.py @@ -1,6 +1,4 @@ -import pytest import importlib -import dash from data_handler import VARS from data_handler import MODELS code = importlib.import_module('tabs.forecast') @@ -14,33 +12,33 @@ def test_time_series(): def test_layout_layers(): assert "Div([Span(DropdownMenu(children=[DropdownMenuItem(children='AIRPORTS', id='airports')], id='map-layers-dropdown', direction='up', label='LAYERS'))])" in str(code.layout_layers()) -def test_time_slider(): - assert "Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='model-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-play', className='fa fa-play text-center', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=72, step=3, marks={0: '0', 3: '3', 6: '6', 9: '9', 12: '12', 15: '15', 18: '18', 21: '21', 24: '24', 27: '27', 30: '30', 33: '33', 36: '36', 39: '39', 42: '42', 45: '45', 48: '48', 51: '51', 54: '54', 57: '57', 60: '60', 63: '63', 66: '66', 69: '69', 72: '72'}, value=0, id='slider-graph'), className='timesliderline')], className='timeslider')" in str(code.time_slider('20220808')) +def test_model_time_bar(): + assert "Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='model-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-play', className='fa fa-play text-center', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=72, step=3, marks={0: {'label': '0'}, 3: {'label': '3'}, 6: {'label': '6'}, 9: {'label': '9'}, 12: {'label': '12'}, 15: {'label': '15'}, 18: {'label': '18'}, 21: {'label': '21'}, 24: {'label': '24'}, 27: {'label': '27'}, 30: {'label': '30'}, 33: {'label': '33'}, 36: {'label': '36'}, 39: {'label': '39'}, 42: {'label': '42'}, 45: {'label': '45'}, 48: {'label': '48'}, 51: {'label': '51'}, 54: {'label': '54'}, 57: {'label': '57'}, 60: {'label': '60'}, 63: {'label': '63'}, 66: {'label': '66'}, 69: {'label': '69'}, 72: {'label': '72'}}, value=0, id='model-slider-graph'), id='model-slider-container', className='timesliderline')], className='timeslider')" in str(code.gen_time_bar('model', '20120120', '20220808')) -def test_prob_time_slider(): - assert "Div(children=[Span(children=DatePickerSingle(date='20220809', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 9, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 9, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: 'Today', 1: 'Tomorrow'}, value=0, id='prob-slider-graph'), className='timesliderline')], className='timeslider')" in str(code.prob_time_slider('20220809')) +def test_prob_time_bar(): + assert "Div(children=[Span(children=DatePickerSingle(date='20220809', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 9, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 9, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'TUE 09'}, 1: {'label': 'WED 10', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='prob-slider-graph'), id='prob-slider-container', className='timesliderline')], className='timeslider')" in str(code.gen_time_bar('prob', '20120120', '20220809')) -def test_was_time_slider(): - assert "Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=1, max=2, step=1, marks={1: 'Today', 2: 'Tomorrow'}, value=1, id='was-slider-graph'), className='timesliderline')], className='timeslider')" in str(code.was_time_slider('20220808')) +def test_was_time_bar(): + assert "Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'MON 08'}, 1: {'label': 'TUE 09', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='was-slider-graph'), id='was-slider-container', className='timesliderline')], className='timeslider')" in str(code.gen_time_bar('was', '20120120', '20220808')) def test_models_children(): - assert "[Div(id={'tag': 'tab-name', 'index': 'models'}), Alert(children='To explore the forecast, please select a variable and click on APPLY.', id='alert-forecast', color='primary', duration=6000, fade=True, is_open=True, style={'overflow': 'auto', 'marginBottom': 0}), Alert(children='If you close the location tooltip, please refresh the page before clicking on another specific location on the map.', id='alert-popup', color='primary', duration=6000, fade=True, is_open=False, style={'overflow': 'auto', 'marginBottom': 0}), Div(children=[Container(children=[], id='graph-collection', fluid=True), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer')], id='div-collection'), Div([Store(id='model-clicked-coords'), Store(id='current-popups-stored')]), Div(Interval(id='slider-interval', disabled=True, interval=1000, n_intervals=0))" in str(code.models_children('20220808')) + assert "[Div(id={'tag': 'tab-name', 'index': 'models'}), Alert(children='To explore the forecast, please select a variable and click on APPLY.', id='alert-forecast', color='primary', duration=6000, fade=True, is_open=True, style={'overflow': 'auto', 'marginBottom': 0}), Div(children=[Container(children=[], id='graph-collection', fluid=True), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer')], id='div-collection'), Div([Store(id='model-clicked-coords'), Store(id='current-popups-stored')]), Div(Interval(id='slider-interval', disabled=True, interval=1000, n_intervals=0))" in str(code.models_children('20120120', '20220808')) def test_prob_children(): - assert "[Div(id={'tag': 'tab-name', 'index': 'prob'}), Div(id='prob-graph', className='graph-with-slider'), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: 'Today', 1: 'Tomorrow'}, value=0, id='prob-slider-graph'), className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)]" in str(code.prob_children('20220808')) + assert "[Div(id={'tag': 'tab-name', 'index': 'prob'}), Div(id='prob-graph', className='graph-with-slider'), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'MON 08'}, 1: {'label': 'TUE 09', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='prob-slider-graph'), id='prob-slider-container', className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)]" in str(code.prob_children('20120120', '20220808')) def test_was_children(): - assert "id='was-graph', className='graph-with-slider')), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=1, max=2, step=1, marks={1: 'Today', 2: 'Tomorrow'}, value=1, id='was-slider-graph'), className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)]" in str(code.was_children('20220808')) + assert "id='was-graph', className='graph-with-slider')), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'MON 08'}, 1: {'label': 'TUE 09', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='was-slider-graph'), id='was-slider-container', className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)]" in str(code.was_children('20120120', '20220808')) def test_tab_forecast(): #TEST MODELS - assert " '57', 60: '60', 63: '63', 66: '66', 69: '69', 72: '72'}, value=0, id='slider-graph'), className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div'), Div(children=[Spinner(children=[Modal(children=[], id='ts-modal', centered=True, is_open=False, size='xl')], id='loading-ts-modal', fullscreen=True, fullscreen_style={'opacity': '0.5', 'zIndex': '200000'}, show_initially=False)], id='open-timeseries')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('models', '20220808')) + assert "Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div'), Div(children=[Spinner(children=[Modal(children=[], id='ts-modal', centered=True, is_open=False, size='xl')], id='loading-ts-modal', fullscreen=True, fullscreen_style={'opacity': '0.5', 'zIndex': '200000'}, show_initially=False)], id='open-timeseries')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('models', '20120120', '20220808')) #TEST WAS - assert "id='was-graph', className='graph-with-slider')), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=1, max=2, step=1, marks={1: 'Today', 2: 'Tomorrow'}, value=1, id='was-slider-graph'), className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('was', '20220808')) + assert "id='was-graph', className='graph-with-slider')), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='was-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'MON 08'}, 1: {'label': 'TUE 09', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='was-slider-graph'), id='was-slider-container', className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('was', '20120120', '20220808')) #TEST PROB - assert "Tab(children=[Div(id={'tag': 'tab-name', 'index': 'prob'}), Div(id='prob-graph', className='graph-with-slider'), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: 'Today', 1: 'Tomorrow'}, value=0, id='prob-slider-graph'), className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('prob', '20220808')) + assert "Div(id='prob-graph', className='graph-with-slider'), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer'), NavbarSimple(children=[Div(children=[Div(children=[Span(children=DatePickerSingle(date='20220808', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 8, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 8, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='prob-date-picker'), className='timesliderline'), Span(children=Slider(min=0, max=1, step=1, marks={0: {'label': 'MON 08'}, 1: {'label': 'TUE 09', 'style': {'left': '', 'right': '-40px'}}}, value=0, id='prob-slider-graph'), id='prob-slider-container', className='timesliderline')], className='timeslider'), Div(children=[Span(DropdownMenu(children=[DropdownMenuItem(children='Light', id={'tag': 'view-style', 'index': 'carto-positron'}, active=True), DropdownMenuItem(children='Open street map', id={'tag': 'view-style', 'index': 'open-street-map'}, active=False), DropdownMenuItem(children='Terrain', id={'tag': 'view-style', 'index': 'stamen-terrain'}, active=False), DropdownMenuItem(children='ESRI', id={'tag': 'view-style', 'index': 'esri-world'}, active=False)], id='map-view-dropdown', direction='up', in_navbar=True, label='VIEW'))], id='map-view-dropdown-div')], id='layout-dropdown', className='layout-dropdown')], className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)], id='forecast-tab', className='horizontal-menu', label='Forecast', value='forecast-tab')" in str(code.tab_forecast('prob', '20120120', '20220808')) def test_expand_dropdown(): assert "{'models': True, 'prob': False, 'was': False}" in str(code.expand_dropdown('models')) diff --git a/tests/test_forecast_callbacks.py b/tests/test_forecast_callbacks.py new file mode 100644 index 0000000000000000000000000000000000000000..34fb246232963dd67bf21e7d458c12dccf27d8c0 --- /dev/null +++ b/tests/test_forecast_callbacks.py @@ -0,0 +1,269 @@ +import pytest +import importlib +from contextvars import copy_context +import dash +from dash._callback_context import context_value +from dash._utils import AttributeDict +code = importlib.import_module('tabs.forecast_callbacks') + + + +# =======================START render forecast test =========================== +def test_render_forecast_tab_group_1(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "group-1-toggle.n_clicks"},{"prop_id": "group-2-toggle.n_clicks"},{"prop_id": "group-3-toggle.n_clicks"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "collapse-1.is_open"},{"prop_id": "collapse-2.is_open"},{"prop_id": "collapse-3.is_open"}]})) + return code.render_forecast_tab(1, 0, 0, 'SCONC_DUST', True, False, False ) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output[0] == False + assert output[1] == False + assert output[2] == False + assert output[3] == dash.no_update + assert output[4] == dash.no_update + assert output[5] == dash.no_update + +def test_render_forecast_tab_group_2(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "group-1-toggle.n_clicks"},{"prop_id": "group-2-toggle.n_clicks"},{"prop_id": "group-3-toggle.n_clicks"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "collapse-1.is_open"},{"prop_id": "collapse-2.is_open"},{"prop_id": "collapse-3.is_open"}]})) + return code.render_forecast_tab(0, 1, 0, 'SCONC_DUST', False, True, False ) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output[0] == False + assert output[1] == True + assert output[2] == False + assert output[3] == False + assert output[4] == False + assert output[5] == dash.no_update + +def test_render_forecast_tab_group_3(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "group-1-toggle.n_clicks"},{"prop_id": "group-2-toggle.n_clicks"},{"prop_id": "group-3-toggle.n_clicks"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "collapse-1.is_open"},{"prop_id": "collapse-2.is_open"},{"prop_id": "collapse-3.is_open"}]})) + return code.render_forecast_tab(0,0,1, 'SCONC_DUST', False, False, True ) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output[0] == False + assert output[1] == False + assert output[2] == True + assert output[3] == False + assert output[4] == False + assert output[5] == dash.no_update + +def test_render_forecast_tab_SCONC_DUST(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "group-1-toggle.n_clicks"},{"prop_id": "group-2-toggle.n_clicks"},{"prop_id": "group-3-toggle.n_clicks"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "collapse-1.is_open"},{"prop_id": "collapse-2.is_open"},{"prop_id": "collapse-3.is_open"}]})) + return code.render_forecast_tab(0,0,0, 'SCONC_DUST', False, False, True ) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output[0] == False + assert output[1] == False + assert output[2] == True + assert output[3] == False + assert output[4] == False + assert output[5] == dash.no_update + +def test_render_forecast_tab_OD550(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "group-1-toggle.n_clicks"},{"prop_id": "group-2-toggle.n_clicks"},{"prop_id": "group-3-toggle.n_clicks"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "collapse-1.is_open"},{"prop_id": "collapse-2.is_open"},{"prop_id": "collapse-3.is_open"}]})) + return code.render_forecast_tab(0,0,0, 'OD550_DUST', False, False, True ) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output[0] == True + assert output[1] == False + assert output[2] == False + assert output[3] == False + assert output[4] == True + assert output[5] == dash.no_update +# =======================END render forecast test =========================== + + +# =======================START update_prob_dropdown test =========================== +def test_update_prob_dropdown(): + assert str(code.update_prob_dropdown('OD550_DUST')) == "([{'label': '> 0.1 ', 'value': 'prob_0.1'}, {'label': '> 0.2 ', 'value': 'prob_0.2'}, {'label': '> 0.5 ', 'value': 'prob_0.5'}, {'label': '> 0.8 ', 'value': 'prob_0.8'}], 'prob_0.1')" + + assert str(code.update_prob_dropdown('SCONC_DUST')) == "([{'label': '> 50 µg/m³', 'value': 'prob_50'}, {'label': '> 100 µg/m³', 'value': 'prob_100'}, {'label': '> 200 µg/m³', 'value': 'prob_200'}, {'label': '> 500 µg/m³', 'value': 'prob_500'}], 'prob_50')" +# =======================END update_prob_dropdown test =========================== + + +# =======================START update_models_dropdown test =========================== +def test_update_models_dropdown(): + assert str(code.update_models_dropdown('OD550_DUST', ['median', 'monarch'])) == "([{'label': 'MULTI-MODEL', 'value': 'median', 'disabled': False}, {'label': 'MONARCH', 'value': 'monarch', 'disabled': False}, {'label': 'CAMS-IFS', 'value': 'cams', 'disabled': False}, {'label': 'DREAM8-CAMS', 'value': 'dream8-macc', 'disabled': False}, {'label': 'NASA-GEOS', 'value': 'nasa-geos', 'disabled': False}, {'label': 'MetOffice-UM', 'value': 'metoffice', 'disabled': False}, {'label': 'NCEP-GEFS', 'value': 'ncep-gefs', 'disabled': False}, {'label': 'EMA-RegCM4', 'value': 'ema-regcm4', 'disabled': False}, {'label': 'SILAM', 'value': 'silam', 'disabled': False}, {'label': 'LOTOS-EUROS', 'value': 'lotos-euros', 'disabled': False}, {'label': 'ICON-ART', 'value': 'icon-art', 'disabled': False}, {'label': 'NOA-WRF-CHEM', 'value': 'noa', 'disabled': False}, {'label': 'WRF-NEMO', 'value': 'wrf-nemo', 'disabled': False}, {'label': 'ALADIN', 'value': 'aladin', 'disabled': False}, {'label': 'ZAMG-WRF-CHEM', 'value': 'zamg', 'disabled': False}, {'label': 'MOCAGE', 'value': 'mocage', 'disabled': False}], ['median', 'monarch'], {'display': 'none'})" + + assert str(code.update_models_dropdown('SCONC_DUST', ['cams', 'silam'])) == "([{'label': 'MULTI-MODEL', 'value': 'median', 'disabled': False}, {'label': 'MONARCH', 'value': 'monarch', 'disabled': False}, {'label': 'CAMS-IFS', 'value': 'cams', 'disabled': False}, {'label': 'DREAM8-CAMS', 'value': 'dream8-macc', 'disabled': False}, {'label': 'NASA-GEOS', 'value': 'nasa-geos', 'disabled': False}, {'label': 'MetOffice-UM', 'value': 'metoffice', 'disabled': False}, {'label': 'NCEP-GEFS', 'value': 'ncep-gefs', 'disabled': False}, {'label': 'EMA-RegCM4', 'value': 'ema-regcm4', 'disabled': False}, {'label': 'SILAM', 'value': 'silam', 'disabled': False}, {'label': 'LOTOS-EUROS', 'value': 'lotos-euros', 'disabled': False}, {'label': 'ICON-ART', 'value': 'icon-art', 'disabled': False}, {'label': 'NOA-WRF-CHEM', 'value': 'noa', 'disabled': False}, {'label': 'WRF-NEMO', 'value': 'wrf-nemo', 'disabled': False}, {'label': 'ALADIN', 'value': 'aladin', 'disabled': False}, {'label': 'ZAMG-WRF-CHEM', 'value': 'zamg', 'disabled': False}, {'label': 'MOCAGE', 'value': 'mocage', 'disabled': False}], ['cams', 'silam'], {'display': 'none'})" +# =======================END update_models_dropdown test =========================== + + +# =======================START CARET TESTS =========================== +def test_rotate_models_caret(): + assert code.rotate_models_caret(True) == None + assert code.rotate_models_caret(False) == {'top': '.05rem', 'transform': 'rotate(180deg)', '-ms-transform': 'rotate(180deg)', '-webkit-transform': 'rotate(180deg)'} + +def test_rotate_prob_caret(): + assert code.rotate_prob_caret(False) == None + assert code.rotate_prob_caret(True) == {'transform': 'rotate(0deg)', '-ms-transform': 'rotate(0deg)', '-webkit-transform': 'rotate(0deg)'} + +def test_rotate_was_caret(): + assert code.rotate_was_caret(False) == None + assert code.rotate_was_caret(True) == {'transform': 'rotate(0deg)', '-ms-transform': 'rotate(0deg)', '-webkit-transform': 'rotate(0deg)'} +# =======================END CARET TESTS =========================== + +# =======================START SIDEBAR_BOTTOM TESTS =========================== +def test_sidebar_bottom_info(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "info-button.n_clicks"},{"prop_id": "download-button.n_clicks"},{"prop_id": "info-collapse.is_open"},{"prop_id": "download_collapse.is_open"}]})) + return code.sidebar_bottom(1,0,True,False) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (False, False) + +def test_sidebar_bottom_info2(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "info-button.n_clicks"},{"prop_id": "download-button.n_clicks"},{"prop_id": "info-collapse.is_open"},{"prop_id": "download_collapse.is_open"}]})) + return code.sidebar_bottom(1,0,True,False) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (False, False) + +def test_sidebar_bottom_download_clicked(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "info-button.n_clicks"},{"prop_id": "download-button.n_clicks"},{"prop_id": "info-collapse.is_open"},{"prop_id": "download_collapse.is_open"}]})) + return code.sidebar_bottom(0,1,True,False) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (False, False) + +def test_sidebar_bottom_download_clicked2(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "info-button.n_clicks"},{"prop_id": "download-button.n_clicks"},{"prop_id": "info-collapse.is_open"},{"prop_id": "download_collapse.is_open"}]})) + return code.sidebar_bottom(0,1,False,False) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (True, False) +# =======================END SIDEBAR_BOTTOM TESTS =========================== + + +# =======================START DOWNLOAD_ANIM_LINK TESTS=========================== +def test_download_anim_link(): + assert code.download_anim_link(['monarch'], 'SCONC_DUST', '20220831', 3) == 'assets/comparison/monarch/sconc_dust/2022/08/20220831_monarch_loop.gif' + assert code.download_anim_link(['median','monarch'], 'SCONC_DUST', '20220831', 3) == 'assets/comparison/all/sconc_dust/2022/08/20220831_all_loop.gif' + assert code.download_anim_link(['monarch'], 'OD550_DUST', '20220831', 3) == 'assets/comparison/monarch/od550_dust/2022/08/20220831_monarch_loop.gif' + assert code.download_anim_link(['median','monarch'], 'OD550_DUST', '20220831', 3) == 'assets/comparison/all/od550_dust/2022/08/20220831_all_loop.gif' +# =======================END DOWNLOAD_ANIM_LINK TESTS=========================== + +def test_zoom_country(): + assert code.zoom_country(1, ['monarch'], 2, 45, 35) == ([2.0], [[45.0, 35.0]]) + assert code.zoom_country(1, ['median', 'monarch'], 2, 45, 35) == ([2.0, 2.0], [[45.0, 35.0], [45.0, 35.0]]) + +# def test_zooms(): +# def run_callback(): +# context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "{'tag': 'model-map', 'index': 'median', 'n_clicks': 0}"},{"prop_id": "{'tag': 'model-map', 'index': 'median', 'n_clicks': 0}"}]})) +# return code.zooms([None, None],[{'tag': 'model-map', 'index': 'median', 'n_clicks': 0}, {'tag': 'model-map', 'index': 'monarch', 'n_clicks': 1}]) +# ctx = copy_context() +# output = ctx.run(run_callback) +# assert output == (True, False) +# +# +# ORJSON ERROR +# def test_update_was_styles_button(): +# def run_callback(): +# context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "view-style.n_clicks"},{"prop_id": "was-tile-layer.url"},{"prop_id": "view-style.active"}]})) +# return code.update_was_styles_button.uncached([None, None, 1, None], ['https://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}{r}.png'], [True, False, False, False]) +# +# ctx = copy_context() +# output = ctx.run(run_callback) +# assert output == (True, False) + +# ORJSON ERROR +# def test_models_popup(): +# def run_callback(): +# context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "model-map.click_lat_lng"},{"prop_id": "model-map.id"},{"prop_id": "model-map.children"},{"prop_id": "model-date-picker.date"},{"prop_id": "slider-graph.value"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "model-clicked-coords.data"},{"prop_id": "current-popups-stored.data"}]})) +# return code.models_popup([[56.739260373724775, 91.93359375]], [], [], '20230404', 0, 'OD550_DUST', None, None) +# +# ctx = copy_context() +# output = ctx.run(run_callback) +# assert output == (True, False) + + +# =======================Start start_stop_autoslider tests=========================== +def test_start_stop_autoslider_pause(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "btn-play.n_clicks"},{"prop_id": "slider-interval.disabled"},{"prop_id": "slider-graph.value"}]})) + return code.start_stop_autoslider(1, False, 3) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (True, 1, {'display': 'block'}, 'fa fa-play text-center') + +def test_start_stop_autoslider_play(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "btn-play.n_clicks"},{"prop_id": "slider-interval.disabled"},{"prop_id": "slider-graph.value"}]})) + return code.start_stop_autoslider(1, True, 9) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (False, 3, {'display': 'none'}, 'fa fa-pause text-center') +# =======================END start_stop_autoslider tests=========================== + +# =======================START update_slider tests=========================== +def test_update_slider(): + assert code.update_slider.uncached(1) == 3 + assert code.update_slider.uncached(2) == 6 + assert code.update_slider.uncached(3) == 9 + assert code.update_slider.uncached(4) == 12 + assert code.update_slider.uncached(24) == 0 + assert code.update_slider.uncached(33) == 27 + assert code.update_slider.uncached(72) == 0 +# =======================END update_slider tests=========================== + +# =======================START UPDATE WAS FIGURE TESTS=========================== +def test_update_was_figure(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "was-apply.n_clicks"},{"prop_id": "was-date-picker.date"},{"prop_id": "was-slider-graph.value"},{"prop_id": "was-dropdown.value"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "was-previous.data"},{"prop_id": "view-style.active"},{"prop_id": "was-map.zoom"},{"prop_id": "was-map.center"}]})) + return code.update_was_figure.uncached(1, '20230404', 1, 'burkinafaso', None, [True, False, False, False], [True], [], []) + + ctx = copy_context() + output = ctx.run(run_callback) + assert "url='https://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}{r}.png'), FullscreenControl(position='topright'), None, None, None, GeoJSON(hideout={'colorscale': ['green', 'gold', 'darkorange', 'red'], 'bounds': [0, 1, 2, 3], 'style': {'weight': 1, 'opacity': 1, 'color': 'white', 'dashArray': '3', 'fillOpacity': 0.7}, 'colorProp': 'value'}, hoverStyle={'arrow': {'weight': 2, 'color': 'white', 'dashArray': '', 'fillOpacity': 0.7}}, options={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}}, url='/dashboard/assets/geojsons/was/burkinafaso/geojson/20230404/20230404_SCONC_DUST_1.geojson'), Div(children=[Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'green'}), Span(children='Normal', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'gold'}), Span(children='High', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'darkorange'}), Span(children='Very High', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'red'}), Span(children='Extremely High', className='was-legend-label')], style={'display': 'block'})], className='was-legend'), Div(children=P(B(['Barcelona Dust Regional Center - Burkina Faso WAS.', Br(None), 'Expected concentration of airborne dust.', Br(None), 'Issued: 04 Apr 2023. Valid: 05 Apr 2023'])), id='was-info', className='info', style={'position': 'absolute', 'top': '10px', 'left': '10px', 'zIndex': '1000', 'fontFamily': " in str(output) + +def test_update_was_figure_zooms(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "was-apply.n_clicks"},{"prop_id": "was-date-picker.date"},{"prop_id": "was-slider-graph.value"},{"prop_id": "was-dropdown.value"},{"prop_id": "variable-dropdown-forecast.value"},{"prop_id": "was-previous.data"},{"prop_id": "view-style.active"},{"prop_id": "was-map.zoom"},{"prop_id": "was-map.center"}]})) + return code.update_was_figure.uncached(2, '20230404', 1, 'cabo_verde', 'burkinafaso', [True, False, False, False],[True], [8], [[11.982397942974229, -2.6312255859375004]]) + + ctx = copy_context() + output = ctx.run(run_callback) + assert "url='https://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}{r}.png'), FullscreenControl(position='topright'), None, None, None, GeoJSON(hideout={'colorscale': ['green', 'gold', 'darkorange', 'red'], 'bounds': [0, 1, 2, 3], 'style': {'weight': 1, 'opacity': 1, 'color': 'white', 'dashArray': '3', 'fillOpacity': 0.7}, 'colorProp': 'value'}, hoverStyle={'arrow': {'weight': 2, 'color': 'white', 'dashArray': '', 'fillOpacity': 0.7}}, options={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}}, url='/dashboard/assets/geojsons/was/cabo_verde/geojson/20230404/20230404_SCONC_DUST_1.geojson'), Div(children=[Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'green'}), Span(children='Normal', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'gold'}), Span(children='High', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'darkorange'}), Span(children='Very High', className='was-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='was-legend-point', style={'backgroundColor': 'red'}), Span(children='Extremely High', className='was-legend-label')], style={'display': 'block'})], className='was-legend'), Div(children=P(B(['Barcelona Dust Regional Center - Cape Verde WAS.', Br(None), 'Expected concentration of airborne dust.', Br(None), 'Issued: 04 Apr 2023. Valid: 05 Apr 2023'])), id='was-info', className='info', style={'position': 'absolute', 'top': '10px', 'left': '10px', 'zIndex': '1000', 'fontFamily':" in str(output) +# =======================END UPDATE WAS FIGURE TESTS=========================== + + + + + + + + + + + + + + + + + + + + + + + + +#some comment diff --git a/tests/test_observations.py b/tests/test_observations.py index a9e8c4f69524bcee9902f0aa3798cc49313bada3..9a1eecb4fd8519502413ce668ddf6a2f70836e0a 100644 --- a/tests/test_observations.py +++ b/tests/test_observations.py @@ -12,13 +12,17 @@ code = importlib.import_module('tabs.observations') #=================TEST OBS_TIME_SLIDER=========================== def test_obs_time_slider(): #TEST DIV=OBS - assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" in str (code.obs_time_slider(div='obs', start=0, end=23, step=1)) + assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" in str(code.obs_time_slider(div='obs', start=0, end=23, step=1, start_date='20120120', end_date='20220831')) #TEST DIV=OBS-VIS - assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-vis-date-picker'), className='timesliderline'), None, Span(children=Slider(min=0, max=23, step=1, marks={0: {'label': '00-01'}, 1: {'label': '01-02'}, 2: {'label': '02-03'}, 3: {'label': '03-04'}, 4: {'label': '04-05'}, 5: {'label': '05-06'}, 6: {'label': '06-07'}, 7: {'label': '07-08'}, 8: {'label': '08-09'}, 9: {'label': '09-10'}, 10: {'label': '10-11'}, 11: {'label': '11-12'}, 12: {'label': '12-13'}, 13: {'label': '13-14'}, 14: {'label': '14-15'}, 15: {'label': '15-16'}, 16: {'label': '16-17'}, 17: {'label': '17-18'}, 18: {'label': '18-19'}, 19: {'label': '19-20'}, 20: {'label': '20-21'}, 21: {'label': '21-22'}, 22: {'label': '22-23'}, 23: {'label': '23-24', 'style': {'left': '', 'right': '-32px'}}}, value=6, id='obs-vis-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" in str (code.obs_time_slider(div='obs-vis', start=0, end=23, step=1)) + from utils import get_vis_edate + from datetime import datetime + hour = datetime.now().hour + _, default_tstep = get_vis_edate('20220831', hour=hour) + assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-vis-date-picker'), className='timesliderline'), None, Span(children=Slider(min=0, max=18, step=6, marks={0: {'label': '00-06'}, 6: {'label': '06-12'}, 12: {'label': '12-18'}, 18: {'label': '18-24', 'style': {'left': '', 'right': '-32px'}}}, value=%(default_tstep)s, id='obs-vis-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" % { 'default_tstep': default_tstep } in str(code.obs_time_slider(div='obs-vis', start=0, end=18, step=6, start_date='20120120', end_date='20220831')) #TEST DIV = OBS-AOD - assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20210318', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2021, 3, 18, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2021, 3, 18, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-aod-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-aod-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-aod-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" in str (code.obs_time_slider(div='obs-aod', start=0, end=23, step=1)) +# assert "NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20210318', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2021, 3, 18, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2021, 3, 18, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-aod-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-aod-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-aod-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True)" in str (code.obs_time_slider(div='obs-aod', start=0, end=23, step=1)) @@ -31,12 +35,11 @@ def test_sidebar_observations(): #=================TEST TAB_OBSERVATIONS=========================== def test_tab_observations(): #TEST RGB - assert "Tab(children=[Span(children=P('EUMETSAT RGB'), className='description-title'), Span(children=P([B('\\n You can explore key observations that can be used to track dust events. \\n '), ' All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. RGB is a qualitative satellite product that indicates desert dust in the entire atmospheric column (represented by pink colour).']), className='description-body'), Div(children=[Button(children='HEMISPHERIC', id='btn-fulldisc', active=True), Button(children='MIDDLE EAST', id='btn-middleeast', active=False)], id='rgb-buttons'), Div(children=[Img(id='rgb-image', alt='EUMETSAT RGB - NOT AVAILABLE', src='./assets/eumetsat/FullDiscHD/archive/20220831/FRAME_OIS_RGB-dust-all_202208310000.gif'), Div(children=NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), className='layout-dropdown')], className='centered-image'), Div(Interval(id='obs-slider-interval', disabled=True, interval=1000, n_intervals=0))], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" in str(code.tab_observations('rgb')) + assert "Tab(children=[Span(children=P('EUMETSAT RGB'), className='description-title'), Span(children=P([B('\\n You can explore key observations that can be used to track dust events. \\n '), ' All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. RGB is a qualitative satellite product that indicates desert dust in the entire atmospheric column (represented by pink colour).']), className='description-body'), Div(children=[Button(children='HEMISPHERIC', id='btn-fulldisc', active=True), Button(children='MIDDLE EAST', id='btn-middleeast', active=False)], id='rgb-buttons'), Div(children=[Img(id='rgb-image', alt='EUMETSAT RGB - NOT AVAILABLE', src='./assets/eumetsat/FullDiscHD/archive/20220831/FRAME_OIS_RGB-dust-all_202208310000.gif'), Div(children=NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), className='layout-dropdown')], className='centered-image'), Div(Interval(id='obs-slider-interval', disabled=True, interval=1000, n_intervals=0))], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" in str(code.tab_observations('rgb', start_date='20120120', end_date='20220831')) #TEST VISIBILITY - ots = code.obs_time_slider(div='obs-vis', - start=0, - end=18, - step=6) - default_tstep = ots.children[0].children[2].children.value - assert "Tab(children=[Span(children=P('Visibility'), className='description-title'), Span(children=P([B('You can explore key observations that can be used to track dust events. '), 'All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. The reduction of VISIBILITY is an indirect measure of the occurrence of sand and dust storms on the surface.']), className='description-body'), Div(children=[], id='obs-vis-graph', className='graph-with-slider'), Div(children=[NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-vis-date-picker'), className='timesliderline'), None, Span(children=Slider(min=0, max=18, step=6, marks={0: {'label': '00-06'}, 6: {'label': '06-12'}, 12: {'label': '12-18'}, 18: {'label': '18-24', 'style': {'left': '', 'right': '-32px'}}}, value=%(default_tstep)s, id='obs-vis-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), Br(None), Br(None), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer')], className='layout-dropdown rgb-layout-dropdown')], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" % { 'default_tstep': default_tstep } in str(code.tab_observations('visibility')) + from utils import get_vis_edate + from datetime import datetime + hour = datetime.now().hour + _, default_tstep = get_vis_edate('20220831', hour=hour) + assert "Tab(children=[Span(children=P('Visibility'), className='description-title'), Span(children=P([B('You can explore key observations that can be used to track dust events. '), 'All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. The reduction of VISIBILITY is an indirect measure of the occurrence of sand and dust storms on the surface.']), className='description-body'), Div(children=[], id='obs-vis-graph', className='graph-with-slider'), Div(children=[NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-vis-date-picker'), className='timesliderline'), None, Span(children=Slider(min=0, max=18, step=6, marks={0: {'label': '00-06'}, 6: {'label': '06-12'}, 12: {'label': '12-18'}, 18: {'label': '18-24', 'style': {'left': '', 'right': '-32px'}}}, value=%(default_tstep)s, id='obs-vis-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), Br(None), Br(None), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer')], className='layout-dropdown rgb-layout-dropdown')], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" % { 'default_tstep': default_tstep } in str(code.tab_observations('visibility', start_date='20120120', end_date='20220831')) diff --git a/tests/test_observations_callbacks.py b/tests/test_observations_callbacks.py index 2a2ce1d8366a43ef13d63a02c5d2b9c195def72d..bb7b3982cffcde82f962404c134b6d4930551200 100644 --- a/tests/test_observations_callbacks.py +++ b/tests/test_observations_callbacks.py @@ -11,7 +11,9 @@ code = importlib.import_module('tabs.observations_callbacks') def test_render_observations_tab(): # CASE 1, RGB 1 CLICK, VISIBILITY NO CLICK def run_callback(): - context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "rgb.n_clicks"},{"prop_id": "visibility.n_clicks"}]})) + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "rgb.n_clicks"}, {"prop_id": "visibility.n_clicks"}]})) + code.START_DATE = '20120120' + code.END_DATE = '20220831' return code.render_observations_tab(1, 0) ctx = copy_context() @@ -23,12 +25,70 @@ def test_render_observations_tab(): def test_render_observations_tab_visibility(): # CASE 2, RGB NO CLICK, VISIBILITY 1 CLICK def run_callback(): - context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "rgb.n_clicks"},{"prop_id": "visibility.n_clicks"}]})) - return code.render_observations_tab(1, 1) + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "visibility.n_clicks"}]})) + code.START_DATE = '20120120' + code.END_DATE = '20220831' + return code.render_observations_tab(0, 1) + from utils import get_vis_edate + from datetime import datetime + hour = datetime.now().hour + _, default_tstep = get_vis_edate('20220831', hour=hour) ctx = copy_context() output = ctx.run(run_callback) - assert "' All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. RGB is a qualitative satellite product that indicates desert dust in the entire atmospheric column (represented by pink colour).']), className='description-body'), Div(children=[Button(children='HEMISPHERIC', id='btn-fulldisc', active=True), Button(children='MIDDLE EAST', id='btn-middleeast', active=False)], id='rgb-buttons'), Div(children=[Img(id='rgb-image', alt='EUMETSAT RGB - NOT AVAILABLE', src='./assets/eumetsat/FullDiscHD/archive/20220831/FRAME_OIS_RGB-dust-all_202208310000.gif'), Div(children=NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-date-picker'), className='timesliderline'), Span(children=[Button(id='btn-obs-play', className='fa fa-play', n_clicks=0, title='Play')], className='timesliderline anim-buttons'), Span(children=Slider(min=0, max=23, step=1, marks={0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: '10', 11: '11', 12: '12', 13: '13', 14: '14', 15: '15', 16: '16', 17: '17', 18: '18', 19: '19', 20: '20', 21: '21', 22: '22', 23: '23'}, value=0, id='obs-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), className='layout-dropdown')], className='centered-image'), Div(Interval(id='obs-slider-interval', disabled=True, interval=1000, n_intervals=0))], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" in str(output[0]) - assert output[1:] == ({ 'font-weight': 'bold' }, { 'font-weight': 'normal' }, 'rgb') + print("*********************") + print(output[0]) + print(output[1:]) + print("*********************") + assert "'All observations are kindly offered by Partners of the WMO Barcelona Dust Regional Center. The reduction of VISIBILITY is an indirect measure of the occurrence of sand and dust storms on the surface.']), className='description-body'), Div(children=[], id='obs-vis-graph', className='graph-with-slider'), Div(children=[NavbarSimple(children=[Div(children=[Span(children=DatePickerSingle(date='20220831', min_date_allowed=datetime.datetime(2012, 1, 20, 0, 0), max_date_allowed=datetime.datetime(2022, 8, 31, 0, 0), placeholder='DD MON YYYY', initial_visible_month=datetime.datetime(2022, 8, 31, 0, 0), clearable=True, reopen_calendar_on_clear=True, display_format='DD MMM YYYY', id='obs-vis-date-picker'), className='timesliderline'), None, Span(children=Slider(min=0, max=18, step=6, marks={0: {'label': '00-06'}, 6: {'label': '06-12'}, 12: {'label': '12-18'}, 18: {'label': '18-24', 'style': {'left': '', 'right': '-32px'}}}, value=%(default_tstep)s, id='obs-vis-slider-graph'), className='timesliderline')], className='timeslider')], id='rgb-navbar', className='fixed-bottom navbar-timebar', dark=True, expand='lg', fixed='bottom', fluid=True), Br(None), Br(None), Div(children=[Span(children=P('Dust data ©2023 WMO Barcelona Dust Regional Center.'), id='forecast-disclaimer')], className='disclaimer')], className='layout-dropdown rgb-layout-dropdown')], id='observations-tab', className='horizontal-menu', label='Observations', value='observations-tab')" % { 'default_tstep': default_tstep }in str(output[0]) + assert output[1:] == ({ 'font-weight': 'normal' }, { 'font-weight': 'bold' }, 'visibility') + + + +#NEED TO MOCK FIRST RETURN OBJECT +# def test_update_image_src(): +# def run_callback(): +# context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "btn_fulldisc.n_clicks"},{"prop_id": "btn_middleeast.n_clicks"},{"prop_id": "obs-date-picker.date"},{"prop_id": "obs-slider-graph.value"},{"prop_id": "btn_fulldisc.active"},{"prop_id": "btn_middleeast.active"}]})) +# return code.update_image_src.uncached(0, 0, '20220808', 0, True, False) +# +# ctx = copy_context() +# output = ctx.run(run_callback) +# assert output == (True, 3, 'fa fa-play text-center') + +def test_start_stop_obs_autoslider_pause(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "btn-obs-play.n_clicks"},{"prop_id": "slider-interval.disabled"},{"prop_id": "slider-graph.value"}]})) + return code.start_stop_obs_autoslider.uncached(1, False, 3) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (True, 3, 'fa fa-play text-center') + +def test_start_stop_obs_autoslider_play(): + def run_callback(): + context_value.set(AttributeDict(**{"triggered_inputs": [{"prop_id": "btn-obs-play.n_clicks"},{"prop_id": "slider-interval.disabled"},{"prop_id": "slider-graph.value"}]})) + return code.start_stop_obs_autoslider.uncached(1, True, 9) + + ctx = copy_context() + output = ctx.run(run_callback) + assert output == (False, 9, 'fa fa-pause text-center') + +def test_update_obs_slider(): + assert code.update_obs_slider.uncached(1) == 1 + assert code.update_obs_slider.uncached(9) == 9 + assert code.update_obs_slider.uncached(25) == 1 + assert code.update_obs_slider.uncached(72) == 0 + assert code.update_obs_slider.uncached(56) == 8 + +def test_update_vis_figure(): + run = code.update_vis_figure.uncached('20220808', 1, [5], [45,45]) + assert "Map(children=[TileLayer(id={'tag': 'obs-vis-tile-layer', 'index': 'None'}" in str(run) + + assert "id='vis-info', className='info', style={'position': 'absolute', 'top': '10px', 'left': '10px', 'zIndex': '1000', 'fontFamily'" in str(run) + +def test_update_vis_figure_date_none(): + run = code.update_vis_figure.uncached(None, 1, [5], [45,45]) + assert "id={'tag': 'obs-vis-map', 'index': 'None'}, animate=False, center=45, inertia=True, minZoom=2, preferCanvas=True, wheelDebounceTime=80, wheelPxPerZoomLevel=120, zoom=5, zoomSnap=0.1)" in str(run) + diff --git a/tests/test_router.py b/tests/test_router.py index d47859590772e94a4ebd9575009c423c765f3b50..34bf61160190aad696bd2fc6d230873d7fc52a1b 100644 --- a/tests/test_router.py +++ b/tests/test_router.py @@ -31,10 +31,10 @@ def test_eval_section_query(): #============ TEST get_url_queries================================= def test_get_url_queries(): - url = '/dashboard/?var=aod&model=cams&date=20230110' + url = 'http://localhost/?var=aod&model=cams&date=20230110' assert code.get_url_queries(url, ROUTE_DEFAULTS) == {'date': ['20230110'], 'eval_option': ['nrt'], 'for_option': ['models'], 'model': ['cams'], 'obs_option': ['rgb'], 'tab': ['forecast-tab'], 'var': ['OD550_DUST'], 'country':['burkinafaso'], 'download': [None]} - url = "http://localhost/?tab=forecast§ion=models&model=multi-model&var=concentration&country=chad" + url = "http://localhost/?tab=forecast§ion=models&model=multi-model&var=concentration&country=chad&date=20220831" result = code.get_url_queries(url) expected = {'tab': ['forecast-tab'], 'var': ['SCONC_DUST'], diff --git a/tests/test_tools.py b/tests/test_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..bacf932db358504bc304551e084c08d5e674d31c --- /dev/null +++ b/tests/test_tools.py @@ -0,0 +1,64 @@ +import pytest +import importlib +code = importlib.import_module('tools') + +def test_download_image_link(): + assert code.download_image_link(['median', 'monarch'], 'OD550_DUST', '20220808', 1, False) == 'assets/comparison/all/od550_dust/2022/08/20220808_all_01.png' + + assert code.download_image_link(['median'], 'DEPW_DUST', '20220808', 6, True) == 'assets/comparison/median/depw_dust/2022/08/20220808_median_loop.gif' + +def test_get_timeseries(): + assert code.get_timeseries('median', '20220808', 'OD550_DUST', 45, 45, forecast=False).layout.title.text == 'Dust Optical Depth @ lat = 45 and lon = 45' + + assert code.get_timeseries('monarch', '20220808', 'SCONC_DUST', 5, 44, forecast=True).layout.title.text == 'Dust Surface Conc. (µg/m³) @ lat = 5 and lon = 44' + +def test_get_single_point(): + assert float(code.get_single_point('median', '20220808', 1, 'SCONC_DUST', 5, 44)) == 6.047785205964828e-08 + assert float(code.get_single_point('median', '20220808', 3, 'OD550_DUST', 25, 40)) == 0.47600093483924866 + assert float(code.get_single_point('cams', '20220808', 12, 'SCONC_DUST', 15, 4)) == 1.8746418106729834e-07 + +def test_get_scores_figure(): + run1 = code.get_scores_figure('aeronet', 'monarch', 'bias', '202208') + assert str(run1.data[0]) == 'Scattermapbox()' + assert str(run1.data[1].name) == 'MBE score' + + run2 = code.get_scores_figure('modis', 'median', 'bias', '202208') + assert str(run2.data[0]) == 'Scattermapbox()' + assert list(run2.data[1].lat[1:5]) ==[0.25, 0.25, 0.25, 0.25] + +def test_get_prob_figure(): + assert str(code.get_prob_figure('OD550_DUST', prob=0.5, day=0, selected_date='20220808')[2].children) == "P(B(['Daily Mean of Dust AOD', Br(None), 'Probability of exceeding 0.5', Br(None), 'ENS members: 10 Run: 08 Aug 2022 Valid: 08 Aug 2022']))" + assert code.get_prob_figure('OD550_DUST', prob=0.8, day=1, selected_date='20220808')[1].classes ==[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + +def test_get_was_figure(): + result = "(GeoJSON(hideout={'colorscale': ['green', 'gold', 'darkorange', 'red'], 'bounds': [0, 1, 2, 3], 'style': {'weight': 1, 'opacity': 1, 'color': 'white', 'dashArray': '3', 'fillOpacity': 0.7}, 'colorProp': 'value'}, hoverStyle={'arrow': {'weight': 2, 'color': 'white', 'dashArray': '', 'fillOpacity': 0.7}}, options={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}}, url='/dashboard/assets/geojsons/was/burkinafaso/geojson/20220808/20220808_SCONC_DUST_1.geojson'" + assert result in str(code.get_was_figure(was='burkinafaso', day=1, selected_date='20220808')) + + result2 = "(GeoJSON(hideout={'colorscale': ['green', 'gold', 'darkorange', 'red'], 'bounds': [0, 1, 2, 3], 'style': {'weight': 1, 'opacity': 1, 'color': 'white', 'dashArray': '3', 'fillOpacity': 0.7}, 'colorProp': 'value'}, hoverStyle={'arrow': {'weight': 2, 'color': 'white', 'dashArray': '', 'fillOpacity': 0.7}}, options={'style': {'variable': 'forecastTab.wasMaps.styleHandle'}}, url='/dashboard/assets/geojsons/was/chad/geojson/20220808/20220808_SCONC_DUST_2.geojson')" + assert result2 in str(code.get_was_figure(was='chad', day=2, selected_date='20220808')) + +def test_get_vis_figure(): + result = "Div(children=['Visibility reduced by airborne dust', Br(None), '08 August 2022 00-06 UTC'], id='vis-info', className='info', style={'position': 'absolute', 'top': '10px', 'left': '10px', 'zIndex': '1000', 'fontFamily': " + result2 = "Div(children=[Div(children=[Span(children='', className='vis-legend-point', style={'backgroundColor': '#714921'}), Span(children='<1 km', className='vis-legend-label')], style={'display': 'block'}), Div(children=[Span(children='', className='vis-legend-point', style={'backgroundColor': '#da7230'}), Span(children='1 - 2 km', className='vis-legend-label')]," + run1 = str(code.get_vis_figure(tstep=0, selected_date='20220808')[1][1]) + run2 = str(code.get_vis_figure(tstep=0, selected_date='20220808')[1][2]) + assert result in run1 + assert result2 in run2 + +def test_get_figure(): + result = "/dashboard/assets/geojsons/median/geojson/20220808/00_20220808_OD550_DUST.geojson" + run1 = str(code.get_figure(model='median', var='OD550_DUST', selected_date='20220808', tstep=0, + hour=3, static=True, aspect=(1, 1), center=None, + view='carto-positron', zoom=None, layer=None, tag='empty').children[2].url) + assert result in run1 + + result2 = "{'tag': 'model-tile-layer', 'index': 'monarch'}" + run2 = str(code.get_figure(model='monarch', var='SCONC_DUST', selected_date='20220809', tstep=1, + hour=3, static=True, aspect=(1, 1), center=None, + view='carto-positron', zoom=None, layer=None, tag='empty').children[0].id) + assert result2 in run2 + + + + + diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ac5aecec9c49b0c9b23378288fd581f506a10681 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,103 @@ +import pytest +import importlib +code = importlib.import_module('utils') +import numpy as np + +@pytest.fixture +def netcdf_data(): + return '/data/products/median/netcdf/20220801_3H_MEDIAN.nc' + +def test_concat_dataframes(): + assert code.concat_dataframes('expect_fail', ['AUG', 'SEP'], 'OD550_DUST', None, None) == (None, None) +#needs more tests + +def test_retrieve_single_point_1(netcdf_data): + result = np.float32(0.031516124) + assert code.retrieve_single_point(netcdf_data, 1, 45, 45, 'OD550_DUST') == result + +def test_retrieve_single_point_2(netcdf_data): + result = np.float32(0.00066542445) + assert code.retrieve_single_point(netcdf_data, 9, 145, -45, 'OD550_DUST') == result + +def test_retrieve_single_point_3(netcdf_data): + result = np.float32(0.00046437158) + assert code.retrieve_single_point(netcdf_data, 1, -5, 175, 'OD550_DUST') == result + +def test_retrieve_timeseries_1(netcdf_data): + run = code.retrieve_timeseries(netcdf_data, 0, 0, 'OD550_DUST' , method='netcdf', forecast=False) + assert run[0] == 0.25 + +def test_retrieve_timeseries_2(netcdf_data): + run = code.retrieve_timeseries(netcdf_data, 45, 35, 'OD550_DUST' , method='netcdf', forecast=False) + assert run[0] == 45.25 + +def test_retrieve_timeseries_3(netcdf_data): + run = code.retrieve_timeseries(netcdf_data, 0, 0, 'OD550_DUST' , method='netcdf', forecast=False) + assert run[-1][1].description == 'Dust AOD at 550 nm' + +def test_find_nearest(): + lst = np.array([-7.75, -6.25, -3, -1, 0, 1, 3.35, 5, 7]) + target = 2.5 + assert code.find_nearest(lst, target) == 3.35 + + lst = np.array([-7.75, -6.25, -3, -1, 0, 1, 3.35, 5, 7]) + target = -5 + assert code.find_nearest(lst, target) == -6.25 + + target = -50 + assert code.find_nearest(lst, target) == -7.75 + +# NOT USED +# def find_nearest2(array, value): + +def test_calc_matrix(): + assert code.calc_matrix(0) == (1, 0) + assert code.calc_matrix(1) == (1, 1) + assert code.calc_matrix(2) == (2, 1) + assert code.calc_matrix(3) == (2, 2) + assert code.calc_matrix(4) == (2, 2) + assert code.calc_matrix(5) == (3, 2) + assert code.calc_matrix(6) == (3, 2) + assert code.calc_matrix(9) == (3, 3) + assert code.calc_matrix(12) == (4, 3) + +def test_magnitude(): + assert code.magnitude(0)==0 + assert code.magnitude(1)==0 + assert code.magnitude(12)==1 + assert code.magnitude(20)==1 + assert code.magnitude(21)==1 + assert code.magnitude(211)==2 + assert code.magnitude(3112)==3 + assert code.magnitude(31124)==4 + +def test_normalize_vals(): + bounds = [-0.1, -0.08, -0.06, -0.04, -0.02, 0., 0.02, 0.04, 0.06, 0.08, 0.1] + bounds = np.array(bounds).astype('float32') + result = [0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ] + magn = code.magnitude(bounds[-1]) + n_bounds = code.normalize_vals(bounds, bounds[0], bounds[-1], magn) + assert np.array_equiv(n_bounds, np.array(result).astype('float32')) + + bounds = [0., 0.2, 0.4, 0.6, 0.8, 1., 1.2, 1.4, 1.6, 1.8, 2.] + bounds = np.array(bounds).astype('float32') + result = [0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ] + magn = code.magnitude(bounds[-1]) + n_bounds = code.normalize_vals(bounds, bounds[0], bounds[-1], magn) + assert np.array_equiv(n_bounds, np.array(result).astype('float32')) + +def test_get_colorscale(): + bounds1 = [-0.1, -0.08, -0.06, -0.04, -0.02, 0., 0.02, 0.04, 0.06, 0.08, 0.1] + bounds2 = [0., 0.2, 0.4, 0.6, 0.8, 1., 1.2, 1.4, 1.6, 1.8, 2.] + color1 = 'viridis' + color2 = 'RdBu_r' + assert code.get_colorscale(bounds1, color1, True)[0] == [0.0, 'rgba(68, 1, 84, 255)'] + assert code.get_colorscale(bounds2, color2, True)[0] == [0.0, 'rgba(5, 48, 97, 255)'] + +def test_get_vis_edate(): + assert code.get_vis_edate('20220808', hour=0) == ('20220807', 12) + assert code.get_vis_edate('20220808', hour=1) == ('20220807', 12) + assert code.get_vis_edate('20220809', hour=3) == ('20220808', 18) + assert code.get_vis_edate('20220609', hour=12) == ('20220609', 0) + assert code.get_vis_edate('20220609', hour=33) == ('20220610', 0) + assert code.get_vis_edate('20220609', hour=72) == ('20220611', 12) diff --git a/tools.py b/tools.py index 7242004440defc6eb56e3e8c076fd81f1a337ae6..56ab34156c969d05b303730e2286517e35c81602 100644 --- a/tools.py +++ b/tools.py @@ -2,8 +2,6 @@ # -*- coding: utf-8 -*- """ Tools module with functions related to plots """ -from dash import dcc - from data_handler import FigureHandler from data_handler import WasFigureHandler from data_handler import ProbFigureHandler @@ -13,27 +11,16 @@ from data_handler import TimeSeriesHandler from data_handler import ObsTimeSeriesHandler from data_handler import Observations1dHandler from data_handler import DEBUG -from data_handler import DATES from data_handler import MODELS -from data_handler import FREQ -#from dash_server import cache -from utils import calc_matrix +from data_handler import END_DATE, DELAY, DELAY_DATE from datetime import datetime as dt from datetime import timedelta -from PIL import Image -import tempfile -import gif -import os.path -import subprocess -import base64 - -start_date = DATES['start_date'] -end_date = DATES['end_date'] or (dt.now() - timedelta(days=1)).strftime("%Y%m%d") +import os -def download_image_link(models, variable, curdate, tstep=0, anim=False, allmodels=False): - """ """ +def download_image_link(models, variable, curdate, tstep=0, anim=False): + """ Generates links to animated gifs """ if DEBUG: print('CURRDIR', os.getcwd()) filepath = "assets/comparison/{model}/{variable}/{year}/{month}/{curdate}_{model}_{tstep}.{ext}" @@ -64,39 +51,6 @@ def download_image_link(models, variable, curdate, tstep=0, anim=False, allmodel if DEBUG: print('DOWNLOAD FILENAME', filename) return filename -def download_image(models, variable, curdate, tstep=0, anim=False, allmodels=False): - """ """ - if DEBUG: print('CURRDIR', os.getcwd()) - filepath = "/data/daily_dashboard/comparison/{model}/{variable}/{year}/{month}/{curdate}_{model}_{tstep}.{ext}" - - if len(models) == 1: - model = models[0] - if DEBUG: print('DOWNLOAD MODELS', model) - else: - if DEBUG: print('DOWNLOAD ALL MODELS') - model = "all" - if anim: - tstep = "loop" - ext = "gif" - if DEBUG: print('DOWNLOAD LOOP') - else: - tstep = "%02d" % tstep - ext = "png" - if DEBUG: print('DOWNLOAD PNG', tstep) - filename = filepath.format( - model=model, - variable=variable.lower(), - year=curdate[:4], - month=curdate[4:6], - curdate=curdate, - tstep=tstep, - ext=ext - ) - if DEBUG: print('DOWNLOAD FILENAME', filename) - if os.path.exists(filename): - return dcc.send_file( - filename, - ) def get_eval_timeseries(obs, start_date, end_date, var, idx, name, model): """ Retrieve timeseries """ @@ -114,12 +68,12 @@ def get_timeseries(model, date, var, lat, lon, forecast=False): return th.retrieve_timeseries(lat, lon, method='feather', forecast=forecast) -def get_single_point(model, date, tstep, var, lat, lon, forecast=True): - """ Retrieve timeseries """ +def get_single_point(model, date, tstep, var, lat, lon): + """ Retrieve sigle point """ if DEBUG: print('SERVER: SINGLE POINT init for models {} ... '.format(str(model))) th = TimeSeriesHandler(model, date, var) if DEBUG: print('SERVER: SINGLE POINT generation ... ') - return th.retrieve_single_point(tstep, lat, lon, method='netcdf', forecast=forecast) + return th.retrieve_single_point(tstep, lat, lon) def get_obs1d(sdate, edate, obs, var): @@ -127,12 +81,14 @@ def get_obs1d(sdate, edate, obs, var): obs_handler = Observations1dHandler(sdate, edate, obs) return obs_handler.generate_obs1d_tstep_trace(var) -def get_scores_figure(network, model, statistic, selection=end_date): + +def get_scores_figure(network, model, statistic, selection=END_DATE): """ Retrieve 1D observation """ fh = ScoresFigureHandler(network, statistic, selection) return fh.retrieve_scores(model) -def get_prob_figure(var, prob=None, day=0, selected_date=end_date): + +def get_prob_figure(var, prob=None, day=0, selected_date=END_DATE): """ Retrieve figure """ if DEBUG: print(prob, day, selected_date) try: @@ -149,7 +105,8 @@ def get_prob_figure(var, prob=None, day=0, selected_date=end_date): if DEBUG: print('SERVER: NO PROB Figure') return ProbFigureHandler().retrieve_var_tstep() -def get_was_figure(was=None, day=1, selected_date=end_date): + +def get_was_figure(was=None, day=0, selected_date=END_DATE): """ Retrieve figure """ if DEBUG: print(was, day, selected_date) try: @@ -166,7 +123,8 @@ def get_was_figure(was=None, day=1, selected_date=end_date): if DEBUG: print('SERVER: NO WAS Figure') return WasFigureHandler().retrieve_var_tstep() -def get_vis_figure(tstep=0, selected_date=end_date): + +def get_vis_figure(tstep=0, selected_date=END_DATE): """ Retrieve figure """ if DEBUG: print(tstep, selected_date) try: @@ -183,9 +141,12 @@ def get_vis_figure(tstep=0, selected_date=end_date): if DEBUG: print('SERVER: NO VIS Figure') return VisFigureHandler().retrieve_var_tstep() -def get_figure(model=None, var=None, selected_date=end_date, tstep=0, hour=None, static=True, aspect=(1, 1), center=None, view='carto-positron', zoom=None, layer=None, tag='empty'): + +def get_figure(model=None, var=None, selected_date=END_DATE, tstep=0, + hour=None, static=True, aspect=(1, 1), center=None, + view='carto-positron', zoom=None, layer=None, tag='empty'): """ Retrieve figure """ - if DEBUG: print("***", model, var, selected_date, tstep, hour, 'None' and layer == None or len(layer), "***") + if DEBUG: print("***", model, var, selected_date, tstep, hour, "***") try: selected_date = dt.strptime( selected_date, "%Y-%m-%d %H:%M:%S").strftime("%Y%m%d") @@ -195,15 +156,26 @@ def get_figure(model=None, var=None, selected_date=end_date, tstep=0, hour=None, if model: if DEBUG: print('SERVER: Figure init ... ') if model in MODELS and MODELS[model]['start'] == 12: - if tstep < 4: - selected_date = (dt.strptime(selected_date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d") - tstep = 4 + int(tstep) + if DELAY: + # OLD: for models that starts at 12h considering end_date = current_date - 1 + if tstep < 4: + selected_date = (dt.strptime(selected_date, + "%Y%m%d") - + timedelta(days=1)).strftime("%Y%m%d") + tstep = int(tstep) + 4 + else: + tstep = int(tstep) - 4 else: - tstep = int(tstep) - 4 + # NEW: for models that starts at 12h considering end_date = current_date + selected_date = (dt.strptime(selected_date, + "%Y%m%d") - + timedelta(days=1)).strftime("%Y%m%d") + tstep = int(tstep) + 4 if DEBUG: print('SERVER: Figure generation ... ') fh = FigureHandler(model, selected_date) - return fh.retrieve_var_tstep(var, tstep, hour, static, aspect, center, view, zoom, layer, tag) + return fh.retrieve_var_tstep(var, tstep, hour, static, aspect, center, + view, zoom, layer, tag) if DEBUG: print('SERVER: No Figure') return FigureHandler().retrieve_var_tstep(layer=layer, center=center, selected_tiles=view, zoom=zoom, tag=tag) diff --git a/utils.py b/utils.py index faad11307df95eae4da9012661d3aab88d123d8b..13ddd3175b6bd17721ad013ed7ede699694d7bea 100644 --- a/utils.py +++ b/utils.py @@ -2,23 +2,16 @@ # -*- coding: utf-8 -*- """ Utils module with utility functions """ +import math +import os.path +from datetime import datetime +from datetime import timedelta import matplotlib as mpl from matplotlib import cm import xarray as xr import numpy as np import pandas as pd -import math import feather -import os.path -from datetime import datetime -from datetime import timedelta - - -TIMES = { - 'animation': 900, - 'transition': 500, - 'slider_transition': 500 -} def concat_dataframes(fname_tpl, months, variable, rename_from=None, notnans=None): @@ -48,7 +41,7 @@ def concat_dataframes(fname_tpl, months, variable, rename_from=None, notnans=Non return notnans, mon_dfs[mon_dfs['station'].isin(notnans)] -def retrieve_single_point(fname, tstep, lat, lon, variable, method='netcdf', forecast=True): +def retrieve_single_point(fname, tstep, lat, lon, variable): """ """ from data_handler import DEBUG if DEBUG: print(fname, tstep, lat, lon, variable) @@ -58,12 +51,8 @@ def retrieve_single_point(fname, tstep, lat, lon, variable, method='netcdf', for # print('TIMESERIES', fname, variable, lon, lat) if 'lat' in ds.variables: da = ds[variable].sel(lon=lon, lat=lat, method='nearest') - clat = 'lat' - clon = 'lon' else: da = ds[variable].sel(longitude=lon, latitude=lat, method='nearest') - clat = 'latitude' - clon = 'longitude' if DEBUG: print(da) return da.values[tstep] @@ -83,12 +72,13 @@ def retrieve_timeseries(fname, lat, lon, variable, method='netcdf', forecast=Fal variable = variable.lower() if variable not in df.columns: - return + return None, None, None, None n_lon = find_nearest(df[lon_col].values, lon) n_lat = find_nearest(df[lat_col].values, lat) ts = df[(df[lat_col] == n_lat) & (df[lon_col] == n_lon)][['time', variable]].set_index('time') + return n_lat, n_lon, ts.index, ts[variable] def preprocess(ds, n=8): @@ -124,8 +114,8 @@ def find_nearest2(array, value): if idx > 0 and (idx == len(array) or math.fabs(value - array[idx-1]) < math.fabs(value - array[idx])): return array[idx-1] - else: - return array[idx] + + return array[idx] def calc_matrix(n): @@ -155,7 +145,6 @@ def normalize_vals(vals, valsmin, valsmax, rnd=2): def get_colorscale(bounds, colormap, discrete=True): """ Create colorscale """ - from data_handler import DEBUG if isinstance(colormap, str): colormap = cm.get_cmap(colormap) @@ -202,6 +191,7 @@ def get_vis_edate(end_date, hour=None): edate = datetime.strptime(end_date, fmt_date) + timedelta(hours=now_hour) cdate = edate.strftime(fmt_date) + curr = None for idx, h in enumerate(hours[:-1]): curr = datetime.strptime("{} {:02d}:00".format(cdate, h), fmt_full) curr1 = datetime.strptime("{} {:02d}:00".format(cdate, hours[idx+1]), fmt_full) @@ -213,4 +203,7 @@ def get_vis_edate(end_date, hour=None): if DEBUG: print("NOW", edate, "CURR", curr, "H", curr.hour) break - return curr.strftime(fmt_date), curr.hour + if curr is not None: + return curr.strftime(fmt_date), curr.hour + + return cdate, edate.hour