diff --git a/.gitignore b/.gitignore index 5fd939dc721781d3ba503c57bf301d07b72f3f0d..63be71a286469f2aca018c967cf0152b9217fda5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,7 @@ .obsidian *.orig +run-nord3.sh +__pycache__ +grids/__pycache__ +util/__pycache__ +tests/__pycache__ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..9d58e0a4f3b7a884b846e1e42577745c3ba39ca6 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MAPIES.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MAPIES.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/MAPIES" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MAPIES" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..216fcdb4566caed19d6f59e6033a32bb32043a3d --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +# +# MAPIES documentation build configuration file, created by +# sphinx-quickstart on Thu Aug 22 15:30:56 2024. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'MAPIES' +copyright = u'2024, cmeikle' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.0.1' +# The full version, including alpha/beta/rc tags. +release = '0.0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'MAPIESdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'MAPIES.tex', u'MAPIES Documentation', + u'cmeikle', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'mapies', u'MAPIES Documentation', + [u'cmeikle'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'MAPIES', u'MAPIES Documentation', + u'cmeikle', 'MAPIES', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..387079bd6edb91470ced3b16271d2e87882a0c5a --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,22 @@ +.. MAPIES documentation master file, created by + sphinx-quickstart on Thu Aug 22 15:30:56 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to MAPIES's documentation! +================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..734c2f2091753ac3ee200d14628ca1903cd697ee --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MAPIES.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MAPIES.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/project-management/Guillaume draft.md b/docs/project-management/Guillaume draft.md similarity index 100% rename from project-management/Guillaume draft.md rename to docs/project-management/Guillaume draft.md diff --git a/project-management/MAPIES scope.md b/docs/project-management/MAPIES scope.md similarity index 100% rename from project-management/MAPIES scope.md rename to docs/project-management/MAPIES scope.md diff --git a/project-management/Meetings minutes.md b/docs/project-management/Meetings minutes.md similarity index 100% rename from project-management/Meetings minutes.md rename to docs/project-management/Meetings minutes.md diff --git a/project-management/Notes.md b/docs/project-management/Notes.md similarity index 100% rename from project-management/Notes.md rename to docs/project-management/Notes.md diff --git a/project-management/Plume_emission_estimation_application.md b/docs/project-management/Plume_emission_estimation_application.md similarity index 100% rename from project-management/Plume_emission_estimation_application.md rename to docs/project-management/Plume_emission_estimation_application.md diff --git a/project-management/Review of existent libraries.md b/docs/project-management/Review of existent libraries.md similarity index 100% rename from project-management/Review of existent libraries.md rename to docs/project-management/Review of existent libraries.md diff --git a/mapies/__init__.py b/mapies/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mapies/config/logging_config.yaml b/mapies/config/logging_config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dffcdd1ff4d8eecc3895d0d237b9a2c9faf5d6be --- /dev/null +++ b/mapies/config/logging_config.yaml @@ -0,0 +1,41 @@ +version: 1 +disable_existing_loggers: False + +formatters: + simple: + format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + datefmt: '%Y-%m-%d %H:%M:%S' + +handlers: + console: + class: logging.StreamHandler + level: DEBUG + formatter: simple + stream: ext://sys.stdout + + file: + class: logging.FileHandler + level: INFO + formatter: simple + filename: mapies.log + mode: a + +loggers: + development: + level: DEBUG + handlers: [console] + propagate: no + + staging: + level: INFO + handlers: [console, file] + propagate: no + + production: + level: WARNING + handlers: [file] + propagate: no + +root: + level: DEBUG + handlers: [console] \ No newline at end of file diff --git a/mapies/config/satellite_config.yaml b/mapies/config/satellite_config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0e6b54b595bd288dc2a23faddb7d07230066aa55 --- /dev/null +++ b/mapies/config/satellite_config.yaml @@ -0,0 +1,23 @@ +version: 1 + +viirs: + variables: + time_variable: "Scan_Start_Time" + lon_variable: "Longitude" + lat_variable: "Latitude" + obs_variable: "Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate" # Should be able to update this if needed + da: + obsid: 34 + uncertainty_constants: [0.2, 0.05, 0.02] + grid_repr: "rotated" + +tropomi: + variables: + time_variable: "delta_time" + lon_variable: "longitude" + lat_variable: "latitude" + obs_variable: "nitrogendioxide_tropospheric_column" # Should be able to update this if needed + + + + diff --git a/mapies/grids/__init__.py b/mapies/grids/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mapies/grids/monarch.py b/mapies/grids/monarch.py new file mode 100644 index 0000000000000000000000000000000000000000..93e49431ebd4178d9afb69f2664fe2e918e6d2b3 --- /dev/null +++ b/mapies/grids/monarch.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python +from dataclasses import dataclass +from pandas import DataFrame +import xarray as xr +import pandas as pd +import numpy as np +from typing import Tuple, Dict +from numpy import cos, sin, arctan, pi, nan +from functools import partial, wraps +import geopandas as gpd +from geopandas import GeoDataFrame +from shapely.geometry import Polygon, Point +import logging +from nes import * + +@dataclass +class Grid: + centre_lat: float + centre_lon: float + dlon: float + dlat: float + west: float = None + south: float = None + + + def calculate_grid_coords(self): + """ + Use Nes to create a grid representation + """ + nessy = create_nes(comm=None, info=False, projection=self.projection, + centre_lat=self.centre_lat, centre_lon=self.centre_lon, + west_boundary=self.west, south_boundary=self.south, + inc_rlat=self.dlon, inc_rlon=self.dlat) + nessy.create_shapefile() + self.gdf = nessy.shapefile + self.gdf["grid_cell"] = ["grid_cell_{}".format(i+1) for i in range(len(self.gdf))] + +@dataclass +class RotatedGrid(Grid): + projection: str = "rotated" + + def __post_init__(self): + self.calculate_grid_coords() + + def aggregate(self, lon, lat, obs, obserr=None): + """ + Aggregate + + Parameters: + lon, lat, obs values OPtional obserr + + Returns: + lon, lat, obs values + """ + if obserr is not None: + df = pd.DataFrame({"obs":obs, "lon":lon, "lat":lat, "obserr": obserr}) + else: + df = pd.DataFrame({"obs":obs, "lon":lon, "lat":lat}) + + df['coords'] = list(zip(df['lon'],df['lat'])) + df['coords'] = df['coords'].apply(Point) + + points = gpd.GeoDataFrame(df, geometry='coords', crs='epsg:4326') + + gdf = self.gdf.sjoin(points) + gdf = gdf.groupby("grid_cell").mean().reset_index() + + if obserr is not None: + return gdf["lon"].values, gdf["lat"].values, gdf["obs"].values, gdf["obserr"].values + else: + return gdf["lon"].values, gdf["lat"].values, gdf["obs"].values + + diff --git a/mapies/mapies.py b/mapies/mapies.py new file mode 100644 index 0000000000000000000000000000000000000000..5c84deea67d0c422d2fc08aa3d0a8a1b2346633d --- /dev/null +++ b/mapies/mapies.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# MAPIES base + +from functools import wraps +from datetime import datetime, timedelta +import sys +from util.func_tools import timeit, time_domain_selection, frequency_int, error_estimation +import time +import logging +import numpy as np +import pandas as pd +import xarray as xr +#import matplotlib +#matplotlib.use("TkAgg") # Use tinker to perform plt.show() as it is GUI supported +import matplotlib.pyplot as plt +import cartopy +import cartopy.crs as ccrs + + + + + +class MAPIES: + """ + Base class for only base functions used in every Mapies run + + For example Reading and writing functions + """ + def __init__(self, start_date, end_date, **kwargs): + """ + INITIATE the MAPIES class with the variables used in every run + """ + self.np_typetime = "datetime64[s]" + self.int_type = 'int64' + self.time_orig = np.datetime64("1900-01-01T00:00:00") + self.start_date = start_date + self.end_date = end_date + + + def preprocess_vars(self): + """ + Preprocessing of the dataset + """ + # Get all info about time columns + self.time_dims = self.ds[self.time_var].dims + self.time_shape = self.ds[self.time_var].shape + self.time_attrs = self.ds[self.time_var].attrs # Useful if we need to convert to datetime + + + # Get time values flatten and convert to datetime values + self.time_values = self.ds[self.time_var].values + self.time_values = self.time_values.flatten() + + if self.time_values.dtype == "timedelta64[ns]": + logging.info("Adding time origin to time values as the time variable is in timedelta") + self.time_values = np.add(self.time_orig, self.time_values) + self.time_values = pd.to_datetime(self.time_values) + else: + self.time_values = pd.to_datetime(self.time_values) + + + # Time domain selection + self.time_values, self.time_values_index = time_domain_selection(self.time_values, self.start_date, self.end_date) + + # TODO: We need to do some reindexing of the longitude and latitude variables too so that if we need them for regridding later we have them reindexed + + + + @timeit + def plot_2D_obs(self, outdir="./", **kwargs): + """ + Plotting the observations + """ + # TODO: Make these arguments + figsize = (15,10) + markersize = 2.5 + + # Set Basemap projection + proj = ccrs.PlateCarree() + + # Create the plot and add features, TODO make this adjustable in arguments + fig, ax = plt.subplots(subplot_kw={"projection": proj}, figsize=figsize) + ax.gridlines() + #ax.add_feature(cartopy.feature.BORDERS, linestyle=':', alpha=1) + #ax.add_feature(cartopy.feature.OCEAN,facecolor=("lightblue")) + #ax.add_feature(cartopy.feature.LAND) + ax.coastlines(resolution='10m') + + + + print("Plotting observations") + + + x, y = self.lon_values, self.lat_values + + im = ax.scatter(x,y,markersize,c=self.obs, transform=proj) + + fig.colorbar(im, ax=ax) + + ax.set_title(f'Observation 2D plot of {self.datatype.upper()} data from {self.start_date} to {self.end_date}') + print("Showing figure") + #plt.show() + print("Saving Figure") + plt.savefig("/esarchive/scratch/cmeikle/Projects/data/VIIRS/obslocation_regional_april23.png", format="png") + plt.close(fig) + + + @staticmethod + def reindex( + dependent_var_index, + independent_var_values, + ): + """ + Recutting the data whenever a selction of the data has been made along one of the dimenisons + + Based off of how it has been done in Providentia + """ + # Maybe add a checker try/except or assert + independent_var_values = independent_var_values[dependent_var_index] + + # return reindexed values + return independent_var_values + + + + @staticmethod + def to_xarray(coords:dict, data_vars:dict, **kwargs): + """ + Method to convert numpy arrays to xarray opject + """ + attrs = kwargs.get("attrs") + if attrs is None: + attrs=dict() + + ds = xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs) + return ds + diff --git a/mapies/tests/__init__.py b/mapies/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mapies/tests/test_func_tools.py b/mapies/tests/test_func_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..94587e32f07d2b01b069610efbe6796b9ab97a67 --- /dev/null +++ b/mapies/tests/test_func_tools.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +import pytest +import pandas as pd +import numpy as np +from util.func_tools import time_converter, time_domain_selection + + + +@pytest.mark.parametrize( + "test_input,expected", + [ + ("2024079", pd.to_datetime("2024079", format="%Y%j")), + ("2024-06-07", pd.to_datetime("20240607", format="%Y%m%d")), + ("2024-06-(%$07", pd.to_datetime("20240607", format="%Y%m%d")), + ("202406071411", pd.to_datetime("202406071411", format="%Y%m%d%H%M")), + ] +) +def test_time_converter(test_input, expected): + assert time_converter(test_input) == expected + +def test_time_converter_error(): + with pytest.raises(ValueError): + time_converter("20240607141") + +""" +@pytest.mark.parametrize( + "test_input,expected", + [ + ([], np.array([])), + ] +) +def test_time_domain_selection(test_input, expected): + assert time_domain_selection(test_input) == expected +""" \ No newline at end of file diff --git a/mapies/tropomi.py b/mapies/tropomi.py new file mode 100644 index 0000000000000000000000000000000000000000..24b56ade6be3e7e1de13fc0670ecb24303f7e020 --- /dev/null +++ b/mapies/tropomi.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python +from dataclasses import dataclass, field +from typing import List, Tuple +from netCDF4 import Dataset, num2date, chartostring +from datetime import datetime, timedelta + +from functools import wraps + +from mapies import MAPIES +from util.func_tools import timeit, get_file_list, time_converter, inflate_array +import time +import numpy as np +import pandas as pd +import xarray as xr + + +class TROPOMI(MAPIES): + """ + Class for VIIRS specific data + """ + + def __init__(self, start_date, end_date,**kwargs): + """ + Inherited init class with new variables + """ + super().__init__(start_date, end_date) + + self.time_orig = np.datetime64("1993-01-01T00:00:00") + + # Add quality value filter number + + + if isinstance(self.start_date, str): + self.start_date = time_converter(self.start_date) + + if isinstance(self.end_date, str): + self.end_date = time_converter(self.end_date) + + + self.read_config(**kwargs) + + + @timeit + def read_config(self, **kwargs): + """ + Read yaml config file + """ + module_dir = os.path.dirname(__file__) + config = yaml.safe_load(open(os.path.join(module_dir,"config/satellite_config.yaml"))) + print(config) + variable_dict = config[self.datatype]["variables"] + self.time_var = variable_dict["time_variable"] + self.lon_var = variable_dict["lon_variable"] + self.lat_var = variable_dict["lat_variable"] + + # If obs_var passed then run data analysis on that variable if not pull default + obs_var = kwargs.get("obs_var") + if obs_var: + self.obs_var = obs_var + else: + self.obs_var = variable_dict["obs_variable"] + + + @timeit + def preprocess_vars(self): + """ + Preprocessing of the dataset + """ + super().preprocess_vars() + + + # no2 column values + obs_dims = self.ds[self.obs_var].dims + obs_shape = self.ds[self.obs_var].shape + obs_attrs = self.ds[self.obs_var].attrs + + # Duplicate values in the time array to get the same shape as the flattened variable array + self.time_values = inflate_array(self.time_values, self.time_shape, no2_shape) + self.time_values_index = inflate_array(self.time_values_index, self.time_shape, obs_shape) + # Lon and lat values + lon_dims = self.ds[self.lon_var].dims + lon_shape = self.ds[self.lon_var].shape + lon_attrs = self.ds[self.lon_var].attrs + self.lon_values = self.ds[self.lon_var].values.flatten() + + # Reindex lon column values + self.lon_values = self.reindex( + self.time_values_index, + self.lon_values, + ) + + + lat_dims = self.ds[self.lat_var].dims + lat_shape = self.ds[self.lat_var].shape + lat_attrs = self.ds[self.lat_var].attrs + self.lat_values = self.ds[self.lat_var].values.flatten() + + # Reindex aod column values + self.lat_values = self.reindex( + self.time_values_index, + self.lat_values, + ) + + # TODO: Check if this works well, if the inflated values are along the right dimensions + self.obs = self.ds[self.obs_var].values.flatten() + + # Reindex no2 column values + self.obs = self.reindex( + self.time_values_index, + self.obs, + ) + + # Need to also + + + @timeit + def read_nc(self): + """ + Read netcdf files with xarray + """ + + files = get_file_list("/esarchive/obs/sentinel/tropomi/original_files/tropomi_OFFL_NO2/S5P_OFFL_L2__NO2____20230101T220249_20230101*.nc") + print(files) + + + # Open dataset with xarray and dask + # Tropomi requires you choose the group + # If we want to read anything other than a single tropomi dataset we will need to do it in parallel + self.ds = xr.open_mfdataset(files, group="PRODUCT") + + + @timeit + def to_plumes(self): + """ + Restructing the data to be passed to calculate the plumes + """ + + coords = coords = dict(time=time_values,) + + ds = self.to_xarray(coords=coords, data_vars=data_vars) + + + +if __name__ == "__main__": + start_date = "202301011430" + end_date = "202401011531" + c = TROPOMI(start_date, end_date) + c.read_nc() + c.preprocess_vars() \ No newline at end of file diff --git a/mapies/util/__init__.py b/mapies/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mapies/util/func_tools.py b/mapies/util/func_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..1d955dfb48dcf2318fbcadd3d01aa0318309e4b2 --- /dev/null +++ b/mapies/util/func_tools.py @@ -0,0 +1,183 @@ +import time +import logging +import numpy as np +import pandas as pd +import xarray as xr + +from functools import wraps +from numpy import cos, sin, arctan, pi, nan +from glob import glob +from typing import List +from datetime import datetime + + + +#Timing decorator +def timeit(func): + @wraps(func) + def timeit_wrapper(*args, **kwargs): + start_time = time.perf_counter() + result = func(*args, **kwargs) + end_time = time.perf_counter() + total_time = end_time - start_time + print(f'Function {func.__name__}{args} {kwargs} Took {total_time:.4f} seconds') + return result + return timeit_wrapper + + +# Function from Guillaume's script could be useful +# Only supported in python3.10 +#def get_file_list(patterns: str | List[str]) -> List[str]: +def get_file_list(patterns) -> List[str]: + """ + :param patterns: one or several glob patterns (or exact file names) pointing to the location of files on disk. + :return: the matching list of files + """ + + # 1st make sure we have a list of patterns + if isinstance(patterns, str): + patterns = [patterns] + + # Then decode them one by one + files = [] + for pattern in patterns: + logging.debug(f"getting file list matching {pattern}") + files.extend(glob(pattern)) + + return files + +def exception_factory(exception, message): + return exception(message) + + +def time_converter(date:str) -> pd.Timestamp: + """ + :param date: date that you wish convert to standardised format + """ + # Remove any special characters + for ch in ['\\','`','*','_','{','}','[',']','(',')','%','>','#','+','-','.','!','$','\'', ':', ' ']: + if ch in date: + date = date.replace(ch,"") + logging.info(f"Special character {ch} was replaced in the date") + + if len(date) == 14: # Probable str format %Y%m%d%H%M%S + date = pd.to_datetime(date, format="%Y%m%d%H%M%S") + elif len(date) == 12: # Probable str format %Y%m%d%H%M + date = pd.to_datetime(date, format="%Y%m%d%H%M") + elif len(date) == 10: # Probable str format %Y%m%d%H + date = pd.to_datetime(date, format="%Y%m%d%H") + elif len(date) == 8: # Probable str format %Y%m%d + date = pd.to_datetime(date, format="%Y%m%d") + elif len(date) == 7: # Probable str format %Y%J + date =pd.to_datetime(date, format="%Y%j") + else: + raise exception_factory(ValueError, "Invalid date format") + + return date + + + +# Time Domain Selection function based off of xarray Will this work with dask too, a question for later +def time_domain_selection( + time_values, + start_date, + end_date, + closed = None) -> np.array: + """ + :param ds: one or several xarray datasets to cut down to correct size. + :param start_date: the start date that we want to start with for our time domain selection + :param end_date: the end date that we want to end with for our time domain seleciton + :param time_column: the column for the time variables + :param closed: like the pandas old param on the pandas date_range method (None, left, right), default:None + :return: a cut down numpy array + """ + + # But the format of the time column may change aaaaaaaghh + + # For these if it is a string we need to know what the format is (maybe pandas figures that out) + if isinstance(start_date, str): + start_date = pd.to_datetime(start_date) + + if isinstance(end_date, str): + end_date = pd.to_datetime(end_date) + + + # For the time column we can either centralise it in the seperate Instrument objects or pass it here, + # I will start by passing it here + + def is_between_times(array): + + if closed == "left": + return (array > start_date) & (array <= end_date) + elif closed == "right": + return (array >= start_date) & (array < end_date) + else: + return (array >= start_date) & (array <= end_date) + + time_values_index = np.where(is_between_times(time_values))[0] # np.where returns tuple + + time_values = time_values[time_values_index] + + return time_values, time_values_index + + +def inflate_array(var_array, shape1, shape2): + """ + A function to inflate an array by repeating it along one dimension + This is useful if you have a flattened array and need to repeat it due to the original array having less + dimensions that the target array + """ + dif1 = np.setdiff1d(shape1, shape2) + dif2 = np.setdiff1d(shape2, shape1) + + repeats = np.concatenate((dif1, dif2)) + + return np.repeat(var_array, repeats) + + +def frequency_int(time_interval): + """ + Convert frequency to an integer value, used in the DA + """ + if time_interval == "H": + frequency = 1 + else: + frequency = int(time_interval[0]) + return frequency + + +def error_estimation(datatype:str, obs:np.typing.NDArray, unc_const:list) -> np.typing.NDArray: + """ + Error estimation function + """ + if datatype == "viirs": + obserr = np.sqrt((unc_const[0]*obs+unc_const[1])**2 + unc_const[2]**2) + elif datatype == "in-situ": + # TODO: Need to work out how to do this for each different element + #obserr = np.maximum(unc_const[e][1], obs*unc_const[e][0]) + pass + return obserr + + + + +def geo_to_rot(lons, lats, centre_lon: float, centre_lat: float): + """ + Rotating coordinates from cartesian lat/lon to rotated rlon/rlat + """ + + lons = np.radians(lons) + lats = np.radians(lats) + centre_lon = np.radians(centre_lon) + centre_lat = np.radians(centre_lat) + + x = cos(centre_lat) * sin(lats) - sin(centre_lat) * cos(lats) * cos(lons - centre_lon) + y = cos(lats) * sin(lons - centre_lon) + z = cos(centre_lat) * cos(lats) * cos(lons - centre_lon) - sin(centre_lat) * sin(lats) + rlon = np.arctan2(y, z) + rlat = np.arcsin(x) + #rlon[x < 0] += pi + # Convert back to degrees + rlon = np.degrees(rlon) + rlat = np.degrees(rlat) + return rlon, rlat diff --git a/mapies/viirs.py b/mapies/viirs.py new file mode 100644 index 0000000000000000000000000000000000000000..fe46ce5522702b3ccb5d9796dd46529c1756e545 --- /dev/null +++ b/mapies/viirs.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python +from dataclasses import dataclass, field +from typing import List, Tuple +from datetime import datetime, timedelta + +from functools import wraps + +from mapies import MAPIES +from util.func_tools import timeit, get_file_list, time_converter, frequency_int, error_estimation, time_domain_selection, geo_to_rot +from grids.monarch import RotatedGrid +from pathlib import Path +import time +import logging +import yaml +import os +import numpy as np +import pandas as pd +import xarray as xr + + + +# TODO: still need to do a load of filtering huuuhhh +# TODO: Create Yaml config file for time, lat, lon, uncertainty const etc. + + +# UNcertainty constants should be able to be passed as number, array or dict + +class VIIRS(MAPIES): + """ + Class for VIIRS specific data + """ + + def __init__(self, start_date, end_date, **kwargs): + """ + Inherited init class with new variables + """ + super().__init__(start_date, end_date) + + self.time_orig = np.datetime64("1993-01-01T00:00:00") + self.datatype="viirs" + frequency = kwargs.get("frequency") + self.dest = kwargs("dest") + + self.dates_slice = pd.date_range( + self.start_date, + self.end_date, + freq=frequency + ).strftime('%Y%m%d%H%M') + if frequency: + self.frequency = frequency_int(frequency) + + if isinstance(self.start_date, str): + self.start_date = time_converter(self.start_date) + + if isinstance(self.end_date, str): + self.end_date = time_converter(self.end_date) + + self.read_config(**kwargs) + + + @timeit + def read_config(self, **kwargs): + """ + Read yaml config file + """ + module_dir = os.path.dirname(__file__) + config = yaml.safe_load(open(os.path.join(module_dir,"config/satellite_config.yaml"))) + + variable_dict = config[self.datatype]["variables"] + self.time_var = variable_dict["time_variable"] + self.lon_var = variable_dict["lon_variable"] + self.lat_var = variable_dict["lat_variable"] + + # If obs_var passed then run data analysis on that variable if not pull default + obs_var = kwargs.get("obs_var") + if obs_var: + self.obs_var = obs_var + else: + self.obs_var = variable_dict["obs_variable"] + + + da_dict = config[self.datatype]["da"] + self.obsid_da = da_dict["obsid"] #int + + unc_const = kwargs.get("unc_const") + if unc_const: + self.unc_const = unc_const + else: + self.unc_const = da_dict["uncertainty_constants"] #float, int, list, dict + print(self.unc_const) + + grid_repr = kwargs.get("grid_repr") + if grid_repr: + self.grid_repr = grid_repr + else: + self.grid_repr = da_dict["uncertainty_constants"] #str + + + + @timeit + def preprocess_vars(self): + """ + Preprocessing of the dataset + """ + super().preprocess_vars() + + # Lon and lat values + lon_dims = self.ds[self.lon_var].dims + lon_shape = self.ds[self.lon_var].shape + lon_attrs = self.ds[self.lon_var].attrs + self.lon_values = self.ds[self.lon_var].values.flatten() + + # Reindex lon column values + self.lon_values = self.reindex( + self.time_values_index, + self.lon_values, + ) + + lat_dims = self.ds[self.lat_var].dims + lat_shape = self.ds[self.lat_var].shape + lat_attrs = self.ds[self.lat_var].attrs + self.lat_values = self.ds[self.lat_var].values.flatten() + + # Reindex aod column values + self.lat_values = self.reindex( + self.time_values_index, + self.lat_values, + ) + + + # AOD column values, default "Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate" + aod_dims = self.ds[self.obs_var].dims + aod_shape = self.ds[self.obs_var].shape + aod_attrs = self.ds[self.obs_var].attrs + self.obs = self.ds[self.obs_var].values.flatten() + + # Reindex aod column values + self.obs = self.reindex( + self.time_values_index, + self.obs, + ) + + + # Load in unit conversion done by Dene and apply new formulas + @timeit + def rotate(self): + """ + Perform Rotation of Grid representation + """ + + r = RotatedGrid(centre_lon=20, centre_lat=35, dlon=.1, dlat=.1, west=-51, south=-35) + lon, lat, obs = r.aggregate(self.lon_values, self.lat_values, self.obs) + self.lon_values = lon + self.lat_values = lat + self.obs = obs + + # This part needs improving with paralellization + @timeit + def read_nc(self): + """ + Read netcdf files with xarray + """ + + # TODO: maybe change this to NETCDF4 + files = get_file_list('/esarchive/obs/nasa/viirs_noaa20_aerdb_l2_nrt/original_files/VIIRS/2024/114/AERDB_L2_VIIRS_NOAA20.A2024114*') + + # A bit of preprocessing to start before reading in the files + def preprocess(ds): + ds = ds.expand_dims(dim={"index": [ds.attrs["product_name"]]}) + return ds + + # Open dataset with xarray and dask + self.ds = xr.open_mfdataset(files, preprocess=preprocess) + + @timeit + def to_da(self): + """ + Function that returns all the needed variables for the DA + """ + #self.read_nc() + #self.preprocess_vars() + # TODO: Move this + r = RotatedGrid(centre_lon=20, centre_lat=35, dlon=.1, dlat=.1, west=-51, south=-35) + # Calculate error + self.obserr = error_estimation(self.datatype, self.obs, self.unc_const) + + outfiles = [] + + # Cut to the frequencies that we want + # Look at start and end date - Find date ranges in between with that frequency + # Then find the boundaries + # We take the start date as the midpoint of the lower boundary + for date in self.dates_slice: + l_border = (datetime.strptime(date, "%Y%m%d%H%M") - timedelta(hours=self.frequency/2)).strftime('%Y%m%d%H%M') + r_border = (datetime.strptime(date, "%Y%m%d%H%M") + timedelta(hours=self.frequency/2)).strftime('%Y%m%d%H%M') + # Included by Guillaume in his DA function so I believe he needs this to be true + filename = Path(self.dest).joinpath(f'obs{date}.nc') + # Cut down the time interval again then reindex obs, obserr, lat and lon + # Returning also arrays for type, lev, id and n with the same index as the time var + time_values, frequency_index = time_domain_selection( + self.time_values, + l_border, + r_border, + ) + + + # Reindex + obs = self.reindex(frequency_index, self.obs) + lon = self.reindex(frequency_index, self.lon_values) + lat = self.reindex(frequency_index, self.lat_values) + obserr = self.reindex(frequency_index, self.obserr) + # Run aggregation with grid representation + lon, lat, obs, obserr = r.aggregate(lon, lat, obs, obserr) + rlon, rlat = geo_to_rot(lon, lat, centre_lon=20, centre_lat=35) + + + # Create new arrays with same length as obs + obsid = np.full(shape=obs.shape, fill_value=self.obsid_da, dtype=self.int_type) + obstype = np.full(shape=obs.shape, fill_value=30, dtype=self.int_type) + obslev = np.full(shape=obs.shape, fill_value=-99999, dtype=self.int_type) + obsn = np.full(shape=obs.shape, fill_value=1, dtype=self.int_type) + time_values = np.full(shape=obs.shape, fill_value=date, dtype=str) + + # Coords equals index + + assert obs.shape == obserr.shape + assert lon.shape == time_values.shape + coords = dict(index=("index", np.indices(obs.shape)[0, :])) + data_vars = dict( + time=(["index",], time_values), + lon=(["index",], lon), + lat=(["index",], lat), + rlon=(["index",], rlon), + rlat=(["index",], rlat), #Also return rlon/rlat values if available + obs=(["index",], obs), + obserr=(["index", ], obserr), + obsid=(["index", ], obsid), + obstype=(["index", ], obstype), + obslev=(["index", ], obslev), + obsn=(["index", ], obsn), + ) + + ds = self.to_xarray(coords=coords, data_vars=data_vars) + + ds.to_netcdf(filename, encoding={}) + outfiles.append(filename) + + return outfiles + + # Plots + def plot_2D_obs(self, outdir="./", **kwargs): + """ + Plotting the observations specific to VIIRS + """ + self.read_nc() + self.preprocess_vars() + self.rotate() + super().plot_2D_obs() + + +if __name__ == "__main__": + start_date = "202404230730" + end_date = "202404231830" + c = VIIRS(start_date, end_date, frequency="3H", dest="/esarchive/scratch/cmeikle/Projects/data/VIIRS") + #c.read_nc() + #c.preprocess_vars() + + c.plot_2D_obs() + #c.to_da() \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..beb6d1aed7050906e389b28023f20ab5bd8ca64a --- /dev/null +++ b/setup.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright 2024 Barcelona Supercomputing Center - Centro Nacional de +# Supercomputación (BSC-CNS) + +# This file is part of MAPIES + +# MAPIES is currently an in-house software for processing large amounts of satellite and in-situ data + +from setuptools import find_packages +from setuptools import setup + +# Could update this using versioneer +version="0.0.1" + +setup( + name="mapies", + version=version, + author="Calum Meikle", + author_email="calum.meikle@bsc.es", + packages=find_packages(), +) +