diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..5e8f6be69075097306848e366b49ef7a7772a5f1
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,162 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+.vscode/
\ No newline at end of file
diff --git a/VERSION b/VERSION
index adb7b04cb2fa210fbe2ce1b09f9926f7bedea01e..c9d9681b383a1746eb074946893c6ce60c0d8090 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.0.27
+4.0.0b1
diff --git a/autosubmit_api.egg-info/PKG-INFO b/autosubmit_api.egg-info/PKG-INFO
deleted file mode 100644
index 31c238680622f81390d66c097e13a3e991728e12..0000000000000000000000000000000000000000
--- a/autosubmit_api.egg-info/PKG-INFO
+++ /dev/null
@@ -1,14 +0,0 @@
-Metadata-Version: 2.1
-Name: autosubmit-api
-Version: 1.0.27
-Summary: An extension to the Autosubmit package that serves its information as an API
-Home-page: https://earth.bsc.es/gitlab/wuruchi/autosubmit_api
-Author: Wilmer Uruchi
-Author-email: wilmer.uruchi@bsc.es
-License: GNU GPL
-Keywords: autosubmit,API
-Classifier: Development Status :: 4 - Beta
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
-Classifier: Operating System :: POSIX :: Linux
-Classifier: Programming Language :: Python :: 3.7
diff --git a/autosubmit_api.egg-info/SOURCES.txt b/autosubmit_api.egg-info/SOURCES.txt
deleted file mode 100644
index f0ccb7254c04d36adeeb15db22c11d7fdce57470..0000000000000000000000000000000000000000
--- a/autosubmit_api.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,161 +0,0 @@
-setup.py
-autosubmit_api/__init__.py
-autosubmit_api/app.py
-autosubmit_api/get-pip.py
-autosubmit_api.egg-info/PKG-INFO
-autosubmit_api.egg-info/SOURCES.txt
-autosubmit_api.egg-info/dependency_links.txt
-autosubmit_api.egg-info/requires.txt
-autosubmit_api.egg-info/top_level.txt
-autosubmit_api/autosubmit_legacy/__init__.py
-autosubmit_api/autosubmit_legacy/autosubmit.py
-autosubmit_api/autosubmit_legacy/job/__init__.py
-autosubmit_api/autosubmit_legacy/job/job.py
-autosubmit_api/autosubmit_legacy/job/job_common.py
-autosubmit_api/autosubmit_legacy/job/job_dict.py
-autosubmit_api/autosubmit_legacy/job/job_exceptions.py
-autosubmit_api/autosubmit_legacy/job/job_grouping.py
-autosubmit_api/autosubmit_legacy/job/job_list.py
-autosubmit_api/autosubmit_legacy/job/job_list_persistence.py
-autosubmit_api/autosubmit_legacy/job/job_package_persistence.py
-autosubmit_api/autosubmit_legacy/job/job_packager.py
-autosubmit_api/autosubmit_legacy/job/job_packages.py
-autosubmit_api/autosubmit_legacy/job/job_utils.py
-autosubmit_api/autosubmit_legacy/platforms/__init__.py
-autosubmit_api/autosubmit_legacy/platforms/ecmwf_adaptor.py
-autosubmit_api/autosubmit_legacy/platforms/ecplatform.py
-autosubmit_api/autosubmit_legacy/platforms/locplatform.py
-autosubmit_api/autosubmit_legacy/platforms/lsfplatform.py
-autosubmit_api/autosubmit_legacy/platforms/mn_adaptor.py
-autosubmit_api/autosubmit_legacy/platforms/paramiko_platform.py
-autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.py
-autosubmit_api/autosubmit_legacy/platforms/pbsplatform.py
-autosubmit_api/autosubmit_legacy/platforms/platform.py
-autosubmit_api/autosubmit_legacy/platforms/psplatform.py
-autosubmit_api/autosubmit_legacy/platforms/saga_platform.py
-autosubmit_api/autosubmit_legacy/platforms/saga_submitter.py
-autosubmit_api/autosubmit_legacy/platforms/sgeplatform.py
-autosubmit_api/autosubmit_legacy/platforms/slurmplatform.py
-autosubmit_api/autosubmit_legacy/platforms/submitter.py
-autosubmit_api/autosubmit_legacy/platforms/headers/__init__.py
-autosubmit_api/autosubmit_legacy/platforms/headers/ec_cca_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/ec_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/local_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/lsf_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/pbs10_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/pbs11_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/pbs12_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/ps_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/sge_header.py
-autosubmit_api/autosubmit_legacy/platforms/headers/slurm_header.py
-autosubmit_api/autosubmit_legacy/platforms/wrappers/__init__.py
-autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_builder.py
-autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_factory.py
-autosubmit_api/builders/__init__.py
-autosubmit_api/builders/basic_builder.py
-autosubmit_api/builders/configuration_facade_builder.py
-autosubmit_api/builders/experiment_history_builder.py
-autosubmit_api/builders/joblist_helper_builder.py
-autosubmit_api/builders/joblist_loader_builder.py
-autosubmit_api/builders/pkl_organizer_builder.py
-autosubmit_api/common/__init__.py
-autosubmit_api/common/utils.py
-autosubmit_api/common/utils_for_testing.py
-autosubmit_api/components/__init__.py
-autosubmit_api/components/experiment/__init__.py
-autosubmit_api/components/experiment/configuration_facade.py
-autosubmit_api/components/experiment/pkl_organizer.py
-autosubmit_api/components/experiment/test.py
-autosubmit_api/components/jobs/__init__.py
-autosubmit_api/components/jobs/job_factory.py
-autosubmit_api/components/jobs/job_support.py
-autosubmit_api/components/jobs/joblist_helper.py
-autosubmit_api/components/jobs/joblist_loader.py
-autosubmit_api/components/jobs/test.py
-autosubmit_api/components/jobs/utils.py
-autosubmit_api/components/representations/__init__.py
-autosubmit_api/components/representations/graph/__init__.py
-autosubmit_api/components/representations/graph/edge.py
-autosubmit_api/components/representations/graph/graph.py
-autosubmit_api/components/representations/graph/test.py
-autosubmit_api/components/representations/tree/__init__.py
-autosubmit_api/components/representations/tree/test.py
-autosubmit_api/components/representations/tree/tree.py
-autosubmit_api/config/__init__.py
-autosubmit_api/config/basicConfig.py
-autosubmit_api/config/config_common.py
-autosubmit_api/database/__init__.py
-autosubmit_api/database/autosubmit.py
-autosubmit_api/database/db_common.py
-autosubmit_api/database/db_jobdata.py
-autosubmit_api/database/db_manager.py
-autosubmit_api/database/db_structure.py
-autosubmit_api/experiment/__init__.py
-autosubmit_api/experiment/as_times_db_manager.py
-autosubmit_api/experiment/common_db_requests.py
-autosubmit_api/experiment/common_requests.py
-autosubmit_api/experiment/experiment_common.py
-autosubmit_api/experiment/experiment_db_manager.py
-autosubmit_api/experiment/test.py
-autosubmit_api/experiment/utils.py
-autosubmit_api/git/__init__.py
-autosubmit_api/git/autosubmit_git.py
-autosubmit_api/history/__init__.py
-autosubmit_api/history/experiment_history.py
-autosubmit_api/history/experiment_status.py
-autosubmit_api/history/experiment_status_manager.py
-autosubmit_api/history/internal_logging.py
-autosubmit_api/history/strategies.py
-autosubmit_api/history/test.py
-autosubmit_api/history/test_job_history.py
-autosubmit_api/history/test_strategies.py
-autosubmit_api/history/test_utils.py
-autosubmit_api/history/utils.py
-autosubmit_api/history/data_classes/__init__.py
-autosubmit_api/history/data_classes/experiment_run.py
-autosubmit_api/history/data_classes/job_data.py
-autosubmit_api/history/database_managers/__init__.py
-autosubmit_api/history/database_managers/database_manager.py
-autosubmit_api/history/database_managers/database_models.py
-autosubmit_api/history/database_managers/experiment_history_db_manager.py
-autosubmit_api/history/database_managers/experiment_status_db_manager.py
-autosubmit_api/history/database_managers/test.py
-autosubmit_api/history/platform_monitor/__init__.py
-autosubmit_api/history/platform_monitor/platform_monitor.py
-autosubmit_api/history/platform_monitor/platform_utils.py
-autosubmit_api/history/platform_monitor/slurm_monitor.py
-autosubmit_api/history/platform_monitor/slurm_monitor_item.py
-autosubmit_api/history/platform_monitor/test.py
-autosubmit_api/monitor/__init__.py
-autosubmit_api/monitor/diagram.py
-autosubmit_api/monitor/monitor.py
-autosubmit_api/monitor/utils.py
-autosubmit_api/notifications/__init__.py
-autosubmit_api/notifications/mail_notifier.py
-autosubmit_api/notifications/notifier.py
-autosubmit_api/performance/__init__.py
-autosubmit_api/performance/performance_metrics.py
-autosubmit_api/performance/utils.py
-autosubmit_api/statistics/__init__.py
-autosubmit_api/statistics/job_stat.py
-autosubmit_api/statistics/statistics.py
-autosubmit_api/statistics/stats_summary.py
-autosubmit_api/statistics/test.py
-autosubmit_api/statistics/utils.py
-autosubmit_api/workers/__init__.py
-autosubmit_api/workers/populate_details_db.py
-autosubmit_api/workers/populate_graph.py
-autosubmit_api/workers/populate_queue_run_times.py
-autosubmit_api/workers/populate_running_experiments.py
-autosubmit_api/workers/test.py
-autosubmit_api/workers/test_esarchive.py
-autosubmit_api/workers/verify_complete.py
-autosubmit_api/workers/business/__init__.py
-autosubmit_api/workers/business/populate_times.py
-autosubmit_api/workers/business/process_graph_drawings.py
-autosubmit_api/workers/deprecated/__init__.py
-autosubmit_api/workers/deprecated/fix_historic.py
-autosubmit_api/workers/deprecated/fix_historic_energy.py
-autosubmit_api/workers/populate_details/__init__.py
-autosubmit_api/workers/populate_details/populate.py
-autosubmit_api/workers/populate_details/test.py
\ No newline at end of file
diff --git a/autosubmit_api.egg-info/dependency_links.txt b/autosubmit_api.egg-info/dependency_links.txt
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/autosubmit_api.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/autosubmit_api.egg-info/requires.txt b/autosubmit_api.egg-info/requires.txt
deleted file mode 100644
index d8ff9e05a6fd86e529fee22dcb82571fd4c49046..0000000000000000000000000000000000000000
--- a/autosubmit_api.egg-info/requires.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-jwt==1.3.1
-requests==2.28.1
-flask_cors==3.0.10
-bscearth.utils==0.5.2
-pysqlite3==0.4.7
-numpy==1.21.6
-pydotplus==2.0.2
-portalocker==2.6.0
-networkx==2.6.3
-scipy==1.7.3
-paramiko==2.12.0
diff --git a/autosubmit_api.egg-info/top_level.txt b/autosubmit_api.egg-info/top_level.txt
deleted file mode 100644
index 7ecd916442b11be3fd54d6ad68e3a2ef05838217..0000000000000000000000000000000000000000
--- a/autosubmit_api.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-autosubmit_api
diff --git a/autosubmit_api/__init__.py b/autosubmit_api/__init__.py
index 95eb0feb76c72b5058951af785851049aa255ad3..38aa811c261c7fab40e9abcb939223d72c96aa44 100644
--- a/autosubmit_api/__init__.py
+++ b/autosubmit_api/__init__.py
@@ -1,3 +1,3 @@
-__version__ = "1.0.0"
-__author__ = 'Wilmer Uruchi'
+__version__ = "4.0.0b1"
+__author__ = 'Luiggi Tenorio, Cristian GutiƩrrez, Julian Berlin, Wilmer Uruchi'
__credits__ = 'Barcelona Supercomputing Center'
diff --git a/autosubmit_api/__init__.pyc b/autosubmit_api/__init__.pyc
deleted file mode 100644
index 224467877f8104b9b5e89aa172bda46b60e446b9..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/app.py b/autosubmit_api/app.py
index 8ea8659fbf2cb60a918b7d98e26480c2e5b1372d..9799534bb8c1b325694317001d8527a3ea1463d7 100644
--- a/autosubmit_api/app.py
+++ b/autosubmit_api/app.py
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with Autosubmit. If not, see .
+from functools import wraps
import os
import sys
import time
@@ -24,435 +25,454 @@ from datetime import datetime, timedelta
import requests
import logging
from flask_cors import CORS, cross_origin
-from flask import Flask, request, session, redirect, url_for
-from bscearth.utils.log import Log
-from .database.db_common import get_current_running_exp, update_experiment_description_owner
-from .experiment import common_requests as CommonRequests
-from .experiment import utils as Utiles
-from .performance.performance_metrics import PerformanceMetrics
-from .database.db_common import search_experiment_by_id
-from .config.basicConfig import BasicConfig
-from .builders.joblist_helper_builder import JobListHelperBuilder, JobListHelperDirector
+from flask import Flask, request, session, redirect
+
+from autosubmit_api.database.extended_db import ExtendedDB
+from autosubmit_api.database.db_common import get_current_running_exp, update_experiment_description_owner
+from autosubmit_api.experiment import common_requests as CommonRequests
+from autosubmit_api.experiment import utils as Utiles
+from autosubmit_api.performance.performance_metrics import PerformanceMetrics
+from autosubmit_api.database.db_common import search_experiment_by_id
+from autosubmit_api.config.basicConfig import APIBasicConfig
+from autosubmit_api.builders.joblist_helper_builder import JobListHelperBuilder, JobListHelperDirector
from multiprocessing import Manager, Lock
import jwt
import sys
-
-JWT_SECRET = os.environ.get("SECRET_KEY")
-JWT_ALGORITHM = "HS256"
-JWT_EXP_DELTA_SECONDS = 84000*5 # 5 days
-
-sys.path.insert(0, os.path.abspath('.'))
-
-app = Flask(__name__)
-
-D = Manager().dict()
-
-# CAS Stuff
-CAS_LOGIN_URL = os.environ.get("CAS_LOGIN_URL") # 'https://cas.bsc.es/cas/login'
-CAS_VERIFY_URL = os.environ.get("CAS_VERIFY_URL") # 'https://cas.bsc.es/cas/serviceValidate'
-
-CORS(app)
-gunicorn_logger = logging.getLogger('gunicorn.error')
-app.logger.handlers = gunicorn_logger.handlers
-app.logger.setLevel(gunicorn_logger.level)
-
-app.logger.info("PYTHON VERSION: " + sys.version)
-
-requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += 'HIGH:!DH:!aNULL'
-try:
- requests.packages.urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST += 'HIGH:!DH:!aNULL'
-except AttributeError:
- # no pyopenssl support used / needed / available
- pass
-
-lock = Lock()
-
-CommonRequests.enforceLocal(app.logger)
-
-# CAS Login
-@app.route('/login')
-def login():
- BasicConfig.read()
- ticket = request.args.get('ticket')
- environment = request.args.get('env')
- referrer = request.referrer
- is_allowed = False
- for allowed_client in BasicConfig.ALLOWED_CLIENTS:
- if referrer.find(allowed_client) >= 0:
- referrer = allowed_client
- is_allowed = True
- if is_allowed == False:
- return {'authenticated': False, 'user': None, 'token': None, 'message': "Your client is not authorized for this operation. The API admin needs to add your URL to the list of allowed clients."}
-
- target_service = "{}{}/login".format(referrer, environment)
- if not ticket:
- route_to_request_ticket = "{}?service={}".format(CAS_LOGIN_URL, target_service)
- app.logger.info("Redirected to: " + str(route_to_request_ticket))
- return redirect(route_to_request_ticket)
- environment = environment if environment is not None else "autosubmitapp" # can be used to target the test environment
- cas_verify_ticket_route = CAS_VERIFY_URL + '?service=' + target_service + '&ticket=' + ticket
- response = requests.get(cas_verify_ticket_route)
- user = None
- if response:
- user = Utiles.get_cas_user_from_xml(response.content)
- app.logger.info('CAS verify ticket response: user %s', user)
- if not user:
- return {'authenticated': False, 'user': None, 'token': None, 'message': "Can't verify user."}
- else: # Login successful
- payload = {
- 'user_id': user,
- 'exp': datetime.utcnow() + timedelta(seconds=JWT_EXP_DELTA_SECONDS)
- }
- jwt_token = jwt.encode(payload, JWT_SECRET, JWT_ALGORITHM)
- return {'authenticated': True, 'user': user, 'token': jwt_token, 'message': "Token generated."}
-
-
-@app.route('/updatedesc', methods=['GET', 'POST'])
-@cross_origin(expose_headers="Authorization")
-def update_description():
- """
- Updates the description of an experiment. Requires authenticated user.
- """
- start_time = time.time()
- expid = None
- new_description = None
- if request.is_json:
- body_data = request.json
- expid = body_data.get("expid", None)
- new_description = body_data.get("description", None)
- current_token = request.headers.get("Authorization")
- try:
- jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
- except jwt.ExpiredSignatureError:
- jwt_token = {"user_id": None}
- except Exception as exp:
- jwt_token = {"user_id": None}
- valid_user = jwt_token.get("user_id", None)
- app.logger.info('UDESC|RECEIVED|')
- app.logger.info('UDESC|RTIME|' + str(time.time() - start_time))
- return update_experiment_description_owner(expid, new_description, valid_user)
-
-
-@app.route('/tokentest', methods=['GET', 'POST'])
-@cross_origin(expose_headers="Authorization")
-def test_token():
+from flask_apscheduler import APScheduler
+from autosubmit_api.workers import populate_details_db, populate_queue_run_times, populate_running_experiments, populate_graph, verify_complete
+from autosubmit_api.config import JWT_SECRET, JWT_ALGORITHM, JWT_EXP_DELTA_SECONDS, RUN_BACKGROUND_TASKS_ON_START, CAS_LOGIN_URL, CAS_VERIFY_URL
+
+def with_log_run_times(_logger: logging.Logger, _tag: str):
+ def decorator(func):
+ @wraps(func)
+ def inner_wrapper(*args, **kwargs):
+ start_time = time.time()
+ path = ""
+ try:
+ path = request.path
+ except:
+ pass
+ _logger.info('{}|RECEIVED|{}'.format(_tag, path))
+ response = func(*args, **kwargs)
+ _logger.info('{}|RTIME|{}|{:.3f}'.format(_tag, path,(time.time() - start_time)))
+ return response
+
+ return inner_wrapper
+ return decorator
+
+def create_app():
"""
- Tests if a token is still valid
+ Autosubmit Flask application factory
+ This function initializes the application properly
"""
- start_time = time.time()
- current_token = request.headers.get("Authorization")
- try:
- jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
- except jwt.ExpiredSignatureError:
- jwt_token = {"user_id": None}
- except Exception as exp:
- print(exp)
- jwt_token = {"user_id": None}
-
- valid_user = jwt_token.get("user_id", None)
- app.logger.info('TTEST|RECEIVED')
- app.logger.info('TTEST|RTIME|' + str(time.time() - start_time))
- return {
- "isValid": True if valid_user else False,
- "message": "Session expired" if not valid_user else None
- }
-
-
-@app.route('/cconfig/', methods=['GET'])
-@cross_origin(expose_headers="Authorization")
-def get_current_configuration(expid):
- start_time = time.time()
- current_token = request.headers.get("Authorization")
- try:
- jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
- except Exception as exp:
- jwt_token = {"user_id": None}
- valid_user = jwt_token.get("user_id", None)
- app.logger.info('CCONFIG|RECEIVED|' + str(expid))
- result = CommonRequests.get_current_configuration_by_expid(expid, valid_user, app.logger)
- app.logger.info('CCONFIG|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/expinfo/', methods=['GET'])
-def exp_info(expid):
- start_time = time.time()
- app.logger.info('EXPINFO|RECEIVED|' + str(expid))
- result = CommonRequests.get_experiment_data(expid)
- app.logger.info('EXPINFO|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/expcount/', methods=['GET'])
-def exp_counters(expid):
- start_time = time.time()
- app.logger.info('EXPCOUNT|RECEIVED|' + str(expid))
- result = CommonRequests.get_experiment_counters(expid)
- app.logger.info('EXPCOUNT|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/searchowner///', methods=['GET'])
-@app.route('/searchowner/', methods=['GET'])
-def search_owner(owner, exptype=None, onlyactive=None):
- """
- Same output format as search_expid
- """
- start_time = time.time()
- app.logger.info('SOWNER|RECEIVED|' + str(owner) + "|" + str(exptype) + "|" + str(onlyactive))
- result = search_experiment_by_id(searchString=None, owner=owner, typeExp=exptype, onlyActive=onlyactive)
- app.logger.info('SOWNER|RTIME|' + str(owner) + "|" + str(exptype) + "|" + str(onlyactive) + "|" + str(time.time() - start_time))
- return result
+ sys.path.insert(0, os.path.abspath('.'))
-@app.route('/search///', methods=['GET'])
-@app.route('/search/', methods=['GET'])
-def search_expid(expid, exptype=None, onlyactive=None):
- start_time = time.time()
- app.logger.info('SEARCH|RECEIVED|' + str(expid) + "|" + str(exptype) + "|" + str(onlyactive))
- result = search_experiment_by_id(expid, owner=None, typeExp=exptype, onlyActive=onlyactive)
- app.logger.info('SEARCH|RTIME|' + str(expid) + "|" + str(exptype) + "|" + str(onlyactive) + "|" + str(time.time() - start_time))
- return result
+ app = Flask(__name__)
+ D = Manager().dict()
-@app.route('/running/', methods=['GET'])
-def search_running():
- """
- Returns the list of all experiments that are currently running.
- """
- if 'username' in session:
- print(("USER {}".format(session['username'])))
- start_time = time.time()
- app.logger.info("Active proceses: " + str(D))
- app.logger.info('RUN|RECEIVED|')
- #app.logger.info("Received Currently Running query ")
- result = get_current_running_exp()
- app.logger.info('RUN|RTIME|' + str(time.time() - start_time))
- return result
-
-
-@app.route('/runs/', methods=['GET'])
-def get_runs(expid):
- """
- Get list of runs of the same experiment from the historical db
- """
- start_time = time.time()
- app.logger.info('ERUNS|RECEIVED|{0}'.format(expid))
- result = CommonRequests.get_experiment_runs(expid)
- app.logger.info('ERUNS|RTIME|{0}'.format(str(time.time() - start_time)))
- return result
-
-
-@app.route('/ifrun/', methods=['GET'])
-def get_if_running(expid):
- start_time = time.time()
- app.logger.info('IFRUN|RECEIVED|' + str(expid))
- result = CommonRequests.quick_test_run(expid)
- app.logger.info('IFRUN|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/logrun/', methods=['GET'])
-def get_log_running(expid):
- start_time = time.time()
- app.logger.info('LOGRUN|RECEIVED|' + str(expid))
- result = CommonRequests.get_current_status_log_plus(expid)
- app.logger.info('LOGRUN|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/summary/', methods=['GET'])
-def get_expsummary(expid):
- start_time = time.time()
- user = request.args.get("loggedUser", default="null", type=str)
- app.logger.info('SUMMARY|RECEIVED|' + str(expid))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "summary", True]; lock.release();
- result = CommonRequests.get_experiment_summary(expid, app.logger)
- app.logger.info('Process: ' + str(os.getpid()) + " workers: " + str(D))
- app.logger.info('SUMMARY|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "summary", False]; lock.release();
- if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
- return result
-
-
-@app.route('/shutdown/')
-def shutdown(route):
- """
- This function is invoked from the frontend (AS-GUI) to kill workers that are no longer needed.
- This call is common in heavy parts of the GUI such as the Tree and Graph generation or Summaries fetching.
- """
- start_time = time.time()
+ CORS(app)
+ gunicorn_logger = logging.getLogger('gunicorn.error')
+ app.logger.handlers = gunicorn_logger.handlers
+ app.logger.setLevel(gunicorn_logger.level)
+
+ app.logger.info("PYTHON VERSION: " + sys.version)
+ requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += 'HIGH:!DH:!aNULL'
try:
- user = request.args.get("loggedUser", default="null", type=str)
- expid = request.args.get("expid", default="null", type=str)
- except Exception as exp:
- app.logger.info("Bad parameters for user and expid in route.")
+ requests.packages.urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST += 'HIGH:!DH:!aNULL'
+ except AttributeError:
+ # no pyopenssl support used / needed / available
+ pass
+
+ lock = Lock()
+
+ CommonRequests.enforceLocal(app.logger)
+
+ # Background Scheduler
+ scheduler = APScheduler()
+ scheduler.init_app(app)
+ scheduler.start()
+
+ @scheduler.task('interval', id='populate_details_db', hours=4)
+ @with_log_run_times(app.logger, "WRKPOPDET")
+ def worker_populate_details_db():
+ populate_details_db.main()
+
+ @scheduler.task('interval', id='populate_queue_run_times', minutes=3)
+ @with_log_run_times(app.logger, "WRKPOPQUE")
+ def worker_populate_queue_run_times():
+ populate_queue_run_times.main()
+
+ @scheduler.task('interval', id='populate_running_experiments', minutes=5)
+ @with_log_run_times(app.logger, "WRKPOPREX")
+ def worker_populate_running_experiments():
+ populate_running_experiments.main()
+
+ @scheduler.task('interval', id='verify_complete', minutes=10)
+ @with_log_run_times(app.logger, "WRKVRFCMPT")
+ def worker_verify_complete():
+ verify_complete.main()
+
+ @scheduler.task('interval', id='populate_graph', hours=24)
+ @with_log_run_times(app.logger, "WRKPOPGRPH")
+ def worker_populate_graph():
+ populate_graph.main()
+
+ # Prepare DB
+ config = APIBasicConfig()
+ config.read()
+ ext_db = ExtendedDB(config.DB_DIR, config.DB_FILE, config.AS_TIMES_DB)
+ ext_db.prepare_db()
+
+ if RUN_BACKGROUND_TASKS_ON_START:
+ app.logger.info('Starting populate workers on init...')
+ worker_populate_details_db()
+ worker_populate_queue_run_times()
+ worker_populate_running_experiments()
+ worker_verify_complete()
+ worker_populate_graph()
+
+ # CAS Login
+ @app.route('/login')
+ def login():
+ APIBasicConfig.read()
+ ticket = request.args.get('ticket')
+ environment = request.args.get('env')
+ referrer = request.referrer
+ is_allowed = False
+ for allowed_client in APIBasicConfig.ALLOWED_CLIENTS:
+ if referrer and referrer.find(allowed_client) >= 0:
+ referrer = allowed_client
+ is_allowed = True
+ if is_allowed == False:
+ return {'authenticated': False, 'user': None, 'token': None, 'message': "Your client is not authorized for this operation. The API admin needs to add your URL to the list of allowed clients."}, 401
+
+ target_service = "{}{}/login".format(referrer, environment)
+ if not ticket:
+ route_to_request_ticket = "{}?service={}".format(CAS_LOGIN_URL, target_service)
+ app.logger.info("Redirected to: " + str(route_to_request_ticket))
+ return redirect(route_to_request_ticket)
+ environment = environment if environment is not None else "autosubmitapp" # can be used to target the test environment
+ cas_verify_ticket_route = CAS_VERIFY_URL + '?service=' + target_service + '&ticket=' + ticket
+ response = requests.get(cas_verify_ticket_route)
+ user = None
+ if response:
+ user = Utiles.get_cas_user_from_xml(response.content)
+ app.logger.info('CAS verify ticket response: user %s', user)
+ if not user:
+ return {'authenticated': False, 'user': None, 'token': None, 'message': "Can't verify user."}, 401
+ else: # Login successful
+ payload = {
+ 'user_id': user,
+ 'exp': datetime.utcnow() + timedelta(seconds=JWT_EXP_DELTA_SECONDS)
+ }
+ jwt_token = jwt.encode(payload, JWT_SECRET, JWT_ALGORITHM)
+ return {'authenticated': True, 'user': user, 'token': jwt_token, 'message': "Token generated."}
+
+
+ @app.route('/updatedesc', methods=['GET', 'POST'])
+ @cross_origin(expose_headers="Authorization")
+ @with_log_run_times(app.logger, "UDESC")
+ def update_description():
+ """
+ Updates the description of an experiment. Requires authenticated user.
+ """
+ expid = None
+ new_description = None
+ if request.is_json:
+ body_data = request.json
+ expid = body_data.get("expid", None)
+ new_description = body_data.get("description", None)
+ current_token = request.headers.get("Authorization")
+ try:
+ jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
+ except jwt.ExpiredSignatureError:
+ jwt_token = {"user_id": None}
+ except Exception as exp:
+ jwt_token = {"user_id": None}
+ valid_user = jwt_token.get("user_id", None)
+ return update_experiment_description_owner(expid, new_description, valid_user)
+
+
+ @app.route('/tokentest', methods=['GET', 'POST'])
+ @cross_origin(expose_headers="Authorization")
+ @with_log_run_times(app.logger, "TTEST")
+ def test_token():
+ """
+ Tests if a token is still valid
+ """
+ current_token = request.headers.get("Authorization")
+ try:
+ jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
+ except jwt.ExpiredSignatureError:
+ jwt_token = {"user_id": None}
+ except Exception as exp:
+ print(exp)
+ jwt_token = {"user_id": None}
+
+ valid_user = jwt_token.get("user_id", None)
+ return {
+ "isValid": True if valid_user else False,
+ "message": "Session expired" if not valid_user else None
+ }
- if user != "null":
- app.logger.info('SHUTDOWN|RECEIVED for route: ' + route + " user: " + user + " expid: " + expid)
+
+ @app.route('/cconfig/', methods=['GET'])
+ @cross_origin(expose_headers="Authorization")
+ @with_log_run_times(app.logger, "CCONFIG")
+ def get_current_configuration(expid):
+ current_token = request.headers.get("Authorization")
+ try:
+ jwt_token = jwt.decode(current_token, JWT_SECRET, JWT_ALGORITHM)
+ except Exception as exp:
+ jwt_token = {"user_id": None}
+ valid_user = jwt_token.get("user_id", None)
+ result = CommonRequests.get_current_configuration_by_expid(expid, valid_user, app.logger)
+ return result
+
+
+ @app.route('/expinfo/', methods=['GET'])
+ @with_log_run_times(app.logger, "EXPINFO")
+ def exp_info(expid):
+ result = CommonRequests.get_experiment_data(expid)
+ return result
+
+
+ @app.route('/expcount/', methods=['GET'])
+ @with_log_run_times(app.logger, "EXPCOUNT")
+ def exp_counters(expid):
+ result = CommonRequests.get_experiment_counters(expid)
+ return result
+
+
+ @app.route('/searchowner///', methods=['GET'])
+ @app.route('/searchowner/', methods=['GET'])
+ @with_log_run_times(app.logger, "SOWNER")
+ def search_owner(owner, exptype=None, onlyactive=None):
+ """
+ Same output format as search_expid
+ """
+ result = search_experiment_by_id(searchString=None, owner=owner, typeExp=exptype, onlyActive=onlyactive)
+ return result
+
+
+ @app.route('/search///', methods=['GET'])
+ @app.route('/search/', methods=['GET'])
+ @with_log_run_times(app.logger, "SEARCH")
+ def search_expid(expid, exptype=None, onlyactive=None):
+ result = search_experiment_by_id(expid, owner=None, typeExp=exptype, onlyActive=onlyactive)
+ return result
+
+
+ @app.route('/running/', methods=['GET'])
+ @with_log_run_times(app.logger, "RUN")
+ def search_running():
+ """
+ Returns the list of all experiments that are currently running.
+ """
+ if 'username' in session:
+ print(("USER {}".format(session['username'])))
+ app.logger.info("Active proceses: " + str(D))
+ #app.logger.info("Received Currently Running query ")
+ result = get_current_running_exp()
+ return result
+
+
+ @app.route('/runs/', methods=['GET'])
+ @with_log_run_times(app.logger, "ERUNS")
+ def get_runs(expid):
+ """
+ Get list of runs of the same experiment from the historical db
+ """
+ result = CommonRequests.get_experiment_runs(expid)
+ return result
+
+
+ @app.route('/ifrun/', methods=['GET'])
+ @with_log_run_times(app.logger, "IFRUN")
+ def get_if_running(expid):
+ result = CommonRequests.quick_test_run(expid)
+ return result
+
+
+ @app.route('/logrun/', methods=['GET'])
+ @with_log_run_times(app.logger, "LOGRUN")
+ def get_log_running(expid):
+ result = CommonRequests.get_current_status_log_plus(expid)
+ return result
+
+
+ @app.route('/summary/', methods=['GET'])
+ @with_log_run_times(app.logger, "SUMMARY")
+ def get_expsummary(expid):
+ user = request.args.get("loggedUser", default="null", type=str)
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "summary", True]; lock.release();
+ result = CommonRequests.get_experiment_summary(expid, app.logger)
+ app.logger.info('Process: ' + str(os.getpid()) + " workers: " + str(D))
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "summary", False]; lock.release();
+ if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
+ return result
+
+
+ @app.route('/shutdown/')
+ @with_log_run_times(app.logger, "SHUTDOWN")
+ def shutdown(route):
+ """
+ This function is invoked from the frontend (AS-GUI) to kill workers that are no longer needed.
+ This call is common in heavy parts of the GUI such as the Tree and Graph generation or Summaries fetching.
+ """
+ try:
+ user = request.args.get("loggedUser", default="null", type=str)
+ expid = request.args.get("expid", default="null", type=str)
+ except Exception as exp:
+ app.logger.info("Bad parameters for user and expid in route.")
+
+ if user != "null":
+ app.logger.info('SHUTDOWN|DETAILS|route: ' + route + " user: " + user + " expid: " + expid)
+ try:
+ # app.logger.info("user: " + user)
+ # app.logger.info("expid: " + expid)
+ app.logger.info("Workers before: " + str(D))
+ lock.acquire()
+ for k,v in list(D.items()):
+ if v[0] == user and v[1] == route and v[-1] == True:
+ if v[2] == expid:
+ D[k] = [user, route, expid, False]
+ else:
+ D[k] = [user, route, False]
+ D.pop(k, None)
+ # reboot the worker
+ os.system('kill -HUP ' + str(k))
+ app.logger.info("killed worker " + str(k))
+ lock.release()
+ app.logger.info("Workers now: " + str(D))
+ except Exception as exp:
+ app.logger.info("[CRITICAL] Could not shutdown process " + expid + " by user \"" + user + "\"")
+ return ""
+
+
+ @app.route('/performance/', methods=['GET'])
+ @with_log_run_times(app.logger, "PRF")
+ def get_exp_performance(expid):
+ result = {}
try:
- # app.logger.info("user: " + user)
- # app.logger.info("expid: " + expid)
- app.logger.info("Workers before: " + str(D))
- lock.acquire()
- for k,v in list(D.items()):
- if v[0] == user and v[1] == route and v[-1] == True:
- if v[2] == expid:
- D[k] = [user, route, expid, False]
- else:
- D[k] = [user, route, False]
- D.pop(k, None)
- # reboot the worker
- os.system('kill -HUP ' + str(k))
- app.logger.info("killed worker " + str(k))
- lock.release()
- app.logger.info("Workers now: " + str(D))
+ result = PerformanceMetrics(expid, JobListHelperDirector(JobListHelperBuilder(expid)).build_job_list_helper()).to_json()
except Exception as exp:
- app.logger.info("[CRITICAL] Could not shutdown process " + expid + " by user \"" + user + "\"")
- app.logger.info('SHUTDOWN|DONE|RTIME' + "|" + str(time.time() - start_time))
- return ""
+ result = {"SYPD": None,
+ "ASYPD": None,
+ "RSYPD": None,
+ "CHSY": None,
+ "JPSY": None,
+ "Parallelization": None,
+ "considered": [],
+ "error": True,
+ "error_message": str(exp),
+ "warnings_job_data": [],
+ }
+ return result
+
+
+ @app.route('/graph///', methods=['GET'])
+ @with_log_run_times(app.logger, "GRAPH")
+ def get_list_format(expid, layout='standard', grouped='none'):
+ user = request.args.get("loggedUser", default="null", type=str)
+ # app.logger.info("user: " + user)
+ # app.logger.info("expid: " + expid)
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "graph", expid, True]; lock.release();
+ result = CommonRequests.get_experiment_graph(expid, app.logger, layout, grouped)
+ app.logger.info('Process: ' + str(os.getpid()) + " graph workers: " + str(D))
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "graph", expid, False]; lock.release();
+ if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
+ return result
+
+
+ @app.route('/tree/', methods=['GET'])
+ @with_log_run_times(app.logger, "TREE")
+ def get_exp_tree(expid):
+ user = request.args.get("loggedUser", default="null", type=str)
+ # app.logger.info("user: " + user)
+ # app.logger.info("expid: " + expid)
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "tree", expid, True]; lock.release();
+ result = CommonRequests.get_experiment_tree_structured(expid, app.logger)
+ app.logger.info('Process: ' + str(os.getpid()) + " tree workers: " + str(D))
+ if user != "null": lock.acquire(); D[os.getpid()] = [user, "tree", expid, False]; lock.release();
+ if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
+ return result
-@app.route('/performance/', methods=['GET'])
-def get_exp_performance(expid):
- start_time = time.time()
- app.logger.info('PRF|RECEIVED|' + str(expid))
- result = {}
- try:
- result = PerformanceMetrics(expid, JobListHelperDirector(JobListHelperBuilder(expid)).build_job_list_helper()).to_json()
- except Exception as exp:
- result = {"SYPD": None,
- "ASYPD": None,
- "RSYPD": None,
- "CHSY": None,
- "JPSY": None,
- "Parallelization": None,
- "considered": [],
- "error": True,
- "error_message": str(exp),
- "warnings_job_data": [],
- }
- app.logger.info('PRF|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/graph///', methods=['GET'])
-def get_list_format(expid, layout='standard', grouped='none'):
- start_time = time.time()
- user = request.args.get("loggedUser", default="null", type=str)
- # app.logger.info("user: " + user)
- # app.logger.info("expid: " + expid)
- app.logger.info('GRAPH|RECEIVED|' + str(expid) + "~" + str(grouped) + "~" + str(layout))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "graph", expid, True]; lock.release();
- result = CommonRequests.get_experiment_graph(expid, app.logger, layout, grouped)
- app.logger.info('Process: ' + str(os.getpid()) + " graph workers: " + str(D))
- app.logger.info('GRAPH|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "graph", expid, False]; lock.release();
- if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
- return result
-
-
-@app.route('/tree/', methods=['GET'])
-def get_exp_tree(expid):
- start_time = time.time()
- user = request.args.get("loggedUser", default="null", type=str)
- # app.logger.info("user: " + user)
- # app.logger.info("expid: " + expid)
- app.logger.info('TREE|RECEIVED|' + str(expid))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "tree", expid, True]; lock.release();
- result = CommonRequests.get_experiment_tree_structured(expid, app.logger)
- app.logger.info('Process: ' + str(os.getpid()) + " tree workers: " + str(D))
- app.logger.info('TREE|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- if user != "null": lock.acquire(); D[os.getpid()] = [user, "tree", expid, False]; lock.release();
- if user != "null": lock.acquire(); D.pop(os.getpid(), None); lock.release();
- return result
-
-
-@app.route('/quick/', methods=['GET'])
-def get_quick_view_data(expid):
- start_time = time.time()
- app.logger.info('QUICK|RECEIVED|' + str(expid))
- result = CommonRequests.get_quick_view(expid)
- app.logger.info('QUICK|RTIME|{0}|{1}'.format(str(expid), str(time.time() - start_time)))
- return result
-
-
-@app.route('/exprun/', methods=['GET'])
-def get_experiment_running(expid):
- """
- Finds log and gets the last 150 lines
- """
- start_time = time.time()
- app.logger.info('LOG|RECEIVED|' + str(expid))
- result = CommonRequests.get_experiment_log_last_lines(expid)
- app.logger.info('LOG|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
+ @app.route('/quick/', methods=['GET'])
+ @with_log_run_times(app.logger, "QUICK")
+ def get_quick_view_data(expid):
+ result = CommonRequests.get_quick_view(expid)
+ return result
-@app.route('/joblog/', methods=['GET'])
-def get_job_log_from_path(logfile):
- """
- Get log from path
- """
- expid = logfile.split('_') if logfile is not None else ""
- expid = expid[0] if len(expid) > 0 else ""
- start_time = time.time()
- app.logger.info('JOBLOG|RECEIVED|{0}'.format(expid))
- result = CommonRequests.get_job_log(expid, logfile)
- app.logger.info('JOBLOG|RTIME|{0}|{1}'.format(expid, str(time.time() - start_time)))
- return result
-
-
-@app.route('/pklinfo//', methods=['GET'])
-def get_experiment_pklinfo(expid, timeStamp):
- start_time = time.time()
- app.logger.info('GPKL|RECEIVED|' + str(expid) + "~" + str(timeStamp))
- result = CommonRequests.get_experiment_pkl(expid)
- app.logger.info('GPKL|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/pkltreeinfo//', methods=['GET'])
-def get_experiment_tree_pklinfo(expid, timeStamp):
- start_time = time.time()
- app.logger.info('TPKL|RECEIVED|' + str(expid) + "~" + str(timeStamp))
- result = CommonRequests.get_experiment_tree_pkl(expid)
- app.logger.info('TPKL|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/stats///')
-def get_experiment_statistics(expid, filter_period, filter_type):
- start_time = time.time()
- app.logger.info('STAT|RECEIVED|' + str(expid) + "~" + str(filter_period) + "~" + str(filter_type))
- result = CommonRequests.get_experiment_stats(expid, filter_period, filter_type)
- app.logger.info('STAT|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/history//')
-def get_exp_job_history(expid, jobname):
- start_time = time.time()
- app.logger.info('HISTORY|RECEIVED|' + str(expid) + "~" + str(jobname))
- result = CommonRequests.get_job_history(expid, jobname)
- app.logger.info('HISTORY|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/rundetail//')
-def get_experiment_run_job_detail(expid, runid):
- start_time = time.time()
- app.logger.info('RUNDETAIL|RECEIVED|' + str(expid) + "~" + str(runid))
- result = CommonRequests.get_experiment_tree_rundetail(expid, runid)
- app.logger.info('RUNDETAIL|RTIME|' + str(expid) + "|" + str(time.time() - start_time))
- return result
-
-
-@app.route('/filestatus/')
-def get_file_status():
- start_time = time.time()
- app.logger.info('FSTATUS|RECEIVED|')
- result = CommonRequests.get_last_test_archive_status()
- app.logger.info('FSTATUS|RTIME|' + str(time.time() - start_time))
- return result
+ @app.route('/exprun/', methods=['GET'])
+ @with_log_run_times(app.logger, "LOG")
+ def get_experiment_running(expid):
+ """
+ Finds log and gets the last 150 lines
+ """
+ result = CommonRequests.get_experiment_log_last_lines(expid)
+ return result
+
+
+ @app.route('/joblog/', methods=['GET'])
+ @with_log_run_times(app.logger, "JOBLOG")
+ def get_job_log_from_path(logfile):
+ """
+ Get log from path
+ """
+ expid = logfile.split('_') if logfile is not None else ""
+ expid = expid[0] if len(expid) > 0 else ""
+ result = CommonRequests.get_job_log(expid, logfile)
+ return result
+
+
+ @app.route('/pklinfo//', methods=['GET'])
+ @with_log_run_times(app.logger, "GPKL")
+ def get_experiment_pklinfo(expid, timeStamp):
+ result = CommonRequests.get_experiment_pkl(expid)
+ return result
+
+
+ @app.route('/pkltreeinfo//', methods=['GET'])
+ @with_log_run_times(app.logger, "TPKL")
+ def get_experiment_tree_pklinfo(expid, timeStamp):
+ result = CommonRequests.get_experiment_tree_pkl(expid)
+ return result
+
+
+ @app.route('/stats///')
+ @with_log_run_times(app.logger, "STAT")
+ def get_experiment_statistics(expid, filter_period, filter_type):
+ result = CommonRequests.get_experiment_stats(expid, filter_period, filter_type)
+ return result
+
+
+ @app.route('/history//')
+ @with_log_run_times(app.logger, "HISTORY")
+ def get_exp_job_history(expid, jobname):
+ result = CommonRequests.get_job_history(expid, jobname)
+ return result
+
+
+ @app.route('/rundetail//')
+ @with_log_run_times(app.logger, "RUNDETAIL")
+ def get_experiment_run_job_detail(expid, runid):
+ result = CommonRequests.get_experiment_tree_rundetail(expid, runid)
+ return result
+
+
+ @app.route('/filestatus/')
+ @with_log_run_times(app.logger, "FSTATUS")
+ def get_file_status():
+ result = CommonRequests.get_last_test_archive_status()
+ return result
+
+
+ return app
+
+app = create_app()
\ No newline at end of file
diff --git a/autosubmit_api/app.pyc b/autosubmit_api/app.pyc
deleted file mode 100644
index f9d49de8c66b7fcd5eac888dc667509e09d6ed79..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/app.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_api.egg-info/PKG-INFO b/autosubmit_api/autosubmit_api.egg-info/PKG-INFO
deleted file mode 100644
index 754166785604aece4732b5dbe08990418dfd8e2c..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_api.egg-info/PKG-INFO
+++ /dev/null
@@ -1,16 +0,0 @@
-Metadata-Version: 1.1
-Name: autosubmit-api
-Version: 1.0.0
-Summary: An extension to the Autosubmit package that serves its information as an API
-Home-page: https://earth.bsc.es/gitlab/wuruchi/autosubmit_api
-Author: Wilmer Uruchi
-Author-email: wilmer.uruchi@bsc.es
-License: GNU GPL
-Description: UNKNOWN
-Keywords: autosubmit,API
-Platform: UNKNOWN
-Classifier: Development Status :: 4 - Beta
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
-Classifier: Operating System :: POSIX :: Linux
-Classifier: Programming Language :: Python :: 3.7
diff --git a/autosubmit_api/autosubmit_api.egg-info/SOURCES.txt b/autosubmit_api/autosubmit_api.egg-info/SOURCES.txt
deleted file mode 100644
index 861769ff0cfc65c3d1facd06b41e6c194716d681..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_api.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,156 +0,0 @@
-.egg-info/PKG-INFO
-.egg-info/SOURCES.txt
-.egg-info/dependency_links.txt
-.egg-info/requires.txt
-.egg-info/top_level.txt
-autosubmit_legacy/__init__.py
-autosubmit_legacy/autosubmit.py
-autosubmit_legacy/job/__init__.py
-autosubmit_legacy/job/job.py
-autosubmit_legacy/job/job_common.py
-autosubmit_legacy/job/job_dict.py
-autosubmit_legacy/job/job_exceptions.py
-autosubmit_legacy/job/job_grouping.py
-autosubmit_legacy/job/job_list.py
-autosubmit_legacy/job/job_list_persistence.py
-autosubmit_legacy/job/job_package_persistence.py
-autosubmit_legacy/job/job_packager.py
-autosubmit_legacy/job/job_packages.py
-autosubmit_legacy/job/job_utils.py
-autosubmit_legacy/platforms/__init__.py
-autosubmit_legacy/platforms/ecmwf_adaptor.py
-autosubmit_legacy/platforms/ecplatform.py
-autosubmit_legacy/platforms/locplatform.py
-autosubmit_legacy/platforms/lsfplatform.py
-autosubmit_legacy/platforms/mn_adaptor.py
-autosubmit_legacy/platforms/paramiko_platform.py
-autosubmit_legacy/platforms/paramiko_submitter.py
-autosubmit_legacy/platforms/pbsplatform.py
-autosubmit_legacy/platforms/platform.py
-autosubmit_legacy/platforms/psplatform.py
-autosubmit_legacy/platforms/saga_platform.py
-autosubmit_legacy/platforms/saga_submitter.py
-autosubmit_legacy/platforms/sgeplatform.py
-autosubmit_legacy/platforms/slurmplatform.py
-autosubmit_legacy/platforms/submitter.py
-autosubmit_legacy/platforms/headers/__init__.py
-autosubmit_legacy/platforms/headers/ec_cca_header.py
-autosubmit_legacy/platforms/headers/ec_header.py
-autosubmit_legacy/platforms/headers/local_header.py
-autosubmit_legacy/platforms/headers/lsf_header.py
-autosubmit_legacy/platforms/headers/pbs10_header.py
-autosubmit_legacy/platforms/headers/pbs11_header.py
-autosubmit_legacy/platforms/headers/pbs12_header.py
-autosubmit_legacy/platforms/headers/ps_header.py
-autosubmit_legacy/platforms/headers/sge_header.py
-autosubmit_legacy/platforms/headers/slurm_header.py
-autosubmit_legacy/platforms/wrappers/__init__.py
-autosubmit_legacy/platforms/wrappers/wrapper_builder.py
-autosubmit_legacy/platforms/wrappers/wrapper_factory.py
-builders/__init__.py
-builders/basic_builder.py
-builders/configuration_facade_builder.py
-builders/experiment_history_builder.py
-builders/joblist_helper_builder.py
-builders/joblist_loader_builder.py
-builders/pkl_organizer_builder.py
-common/__init__.py
-common/utils.py
-common/utils_for_testing.py
-components/__init__.py
-components/experiment/__init__.py
-components/experiment/configuration_facade.py
-components/experiment/pkl_organizer.py
-components/experiment/test.py
-components/jobs/__init__.py
-components/jobs/job_factory.py
-components/jobs/job_support.py
-components/jobs/joblist_helper.py
-components/jobs/joblist_loader.py
-components/jobs/test.py
-components/jobs/utils.py
-components/representations/__init__.py
-components/representations/graph/__init__.py
-components/representations/graph/edge.py
-components/representations/graph/graph.py
-components/representations/graph/test.py
-components/representations/tree/__init__.py
-components/representations/tree/test.py
-components/representations/tree/tree.py
-config/__init__.py
-config/basicConfig.py
-config/config_common.py
-database/__init__.py
-database/db_common.py
-database/db_jobdata.py
-database/db_manager.py
-database/db_structure.py
-experiment/__init__.py
-experiment/as_times_db_manager.py
-experiment/common_db_requests.py
-experiment/common_requests.py
-experiment/experiment_common.py
-experiment/experiment_db_manager.py
-experiment/test.py
-experiment/utils.py
-git/__init__.py
-git/autosubmit_git.py
-history/__init__.py
-history/experiment_history.py
-history/experiment_status.py
-history/experiment_status_manager.py
-history/internal_logging.py
-history/strategies.py
-history/test.py
-history/test_job_history.py
-history/test_strategies.py
-history/test_utils.py
-history/utils.py
-history/data_classes/__init__.py
-history/data_classes/experiment_run.py
-history/data_classes/job_data.py
-history/database_managers/__init__.py
-history/database_managers/database_manager.py
-history/database_managers/database_models.py
-history/database_managers/experiment_history_db_manager.py
-history/database_managers/experiment_status_db_manager.py
-history/database_managers/test.py
-history/platform_monitor/__init__.py
-history/platform_monitor/platform_monitor.py
-history/platform_monitor/platform_utils.py
-history/platform_monitor/slurm_monitor.py
-history/platform_monitor/slurm_monitor_item.py
-history/platform_monitor/test.py
-monitor/__init__.py
-monitor/diagram.py
-monitor/monitor.py
-monitor/utils.py
-notifications/__init__.py
-notifications/mail_notifier.py
-notifications/notifier.py
-performance/__init__.py
-performance/performance_metrics.py
-performance/utils.py
-statistics/__init__.py
-statistics/job_stat.py
-statistics/statistics.py
-statistics/stats_summary.py
-statistics/test.py
-statistics/utils.py
-workers/__init__.py
-workers/populate_details_db.py
-workers/populate_graph.py
-workers/populate_queue_run_times.py
-workers/populate_running_experiments.py
-workers/test.py
-workers/test_esarchive.py
-workers/verify_complete.py
-workers/business/__init__.py
-workers/business/populate_times.py
-workers/business/process_graph_drawings.py
-workers/deprecated/__init__.py
-workers/deprecated/fix_historic.py
-workers/deprecated/fix_historic_energy.py
-workers/populate_details/__init__.py
-workers/populate_details/populate.py
-workers/populate_details/test.py
\ No newline at end of file
diff --git a/autosubmit_api/autosubmit_api.egg-info/dependency_links.txt b/autosubmit_api/autosubmit_api.egg-info/dependency_links.txt
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_api.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/autosubmit_api/autosubmit_api.egg-info/requires.txt b/autosubmit_api/autosubmit_api.egg-info/requires.txt
deleted file mode 100644
index d48668615a82bcef4d7bdaf81c0326244541e470..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_api.egg-info/requires.txt
+++ /dev/null
@@ -1,37 +0,0 @@
-bscearth.utils==0.5.2
-argparse<2,>=1.2
-python-dateutil>2
-pydotplus>=2
-pyparsing>=2.0.1
-numpy
-matplotlib
-paramiko==2.6.0
-mock>=1.3.0
-portalocker>=0.5.7
-networkx
-bscearth.utils
-Flask==1.0.4
-Flask-Cors==3.0.8
-Flask-Jsonpify==1.5.0
-Flask-RESTful==0.3.7
-SQLAlchemy==1.3.11
-PyJWT==1.7.1
-Flask==1.1.1
-Flask-Cors==3.0.8
-Flask-Jsonpify==1.5.0
-Flask-RESTful==0.3.7
-gunicorn==19.9.0
-mock==3.0.5
-networkx==2.2
-numpy==1.16.4
-paramiko==1.15.0
-portalocker==0.5.7
-pydotplus==2.0.2
-pydot==1.4.1
-regex==2019.6.8
-requests==2.22.0
-graphviz==0.13
-enum34==1.1.6
-typing==3.7.4.3
-radical.saga==0.70.0
-scipy==1.2.2
diff --git a/autosubmit_api/autosubmit_api.egg-info/top_level.txt b/autosubmit_api/autosubmit_api.egg-info/top_level.txt
deleted file mode 100644
index 25b178aab7083c3934c3d3f0b90bdc5ac4e5988d..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_api.egg-info/top_level.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-autosubmit_legacy
-builders
-common
-components
-config
-database
-experiment
-git
-history
-monitor
-notifications
-performance
-statistics
-workers
diff --git a/autosubmit_api/autosubmit_legacy/__init__.pyc b/autosubmit_api/autosubmit_legacy/__init__.pyc
deleted file mode 100644
index 49bdbde9f260188e7702232471cd86878c1b2c07..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/autosubmit.py b/autosubmit_api/autosubmit_legacy/autosubmit.py
index 4a7c4dba46018119fbda5636654d4babf5aac686..cf782f3bd65e03af1d66a721324c64c816ce075e 100644
--- a/autosubmit_api/autosubmit_legacy/autosubmit.py
+++ b/autosubmit_api/autosubmit_legacy/autosubmit.py
@@ -41,8 +41,8 @@ import subprocess
import argparse
sys.path.insert(0, os.path.abspath('.'))
-from ..config.basicConfig import BasicConfig
-from ..config.config_common import AutosubmitConfig
+from ..config.basicConfig import APIBasicConfig
+from ..config.config_common import AutosubmitConfigResolver
from bscearth.utils.config_parser import ConfigParserFactory
from .job.job_common import Status
from ..git.autosubmit_git import AutosubmitGit
@@ -131,7 +131,7 @@ class Autosubmit:
Parse arguments given to an executable and start execution of command given
"""
try:
- BasicConfig.read()
+ APIBasicConfig.read()
parser = argparse.ArgumentParser(
description='Main executable for autosubmit. ')
@@ -561,17 +561,17 @@ class Autosubmit:
:type expid_delete: str
:param expid_delete: identifier of the experiment to delete
"""
- if expid_delete == '' or expid_delete is None and not os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
+ if expid_delete == '' or expid_delete is None and not os.path.exists(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR,
expid_delete)):
Log.info("Experiment directory does not exist.")
else:
Log.info("Removing experiment directory...")
ret = False
- if pwd.getpwuid(os.stat(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid_delete)).st_uid).pw_name == os.getlogin():
+ if pwd.getpwuid(os.stat(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid_delete)).st_uid).pw_name == os.getlogin():
try:
shutil.rmtree(os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid_delete))
+ APIBasicConfig.LOCAL_ROOT_DIR, expid_delete))
except OSError as e:
Log.warning('Can not delete experiment folder: {0}', e)
return ret
@@ -603,10 +603,10 @@ class Autosubmit:
:return: experiment identifier. If method fails, returns ''.
:rtype: str
"""
- BasicConfig.read()
+ APIBasicConfig.read()
log_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, 'ASlogs', 'expid.log'.format(os.getuid()))
+ APIBasicConfig.LOCAL_ROOT_DIR, 'ASlogs', 'expid.log'.format(os.getuid()))
try:
Log.set_file(log_path)
except IOError as e:
@@ -625,10 +625,10 @@ class Autosubmit:
if exp_id == '':
return ''
try:
- os.mkdir(os.path.join(BasicConfig.LOCAL_ROOT_DIR, exp_id))
+ os.mkdir(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, exp_id))
os.mkdir(os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id, 'conf'))
+ APIBasicConfig.LOCAL_ROOT_DIR, exp_id, 'conf'))
Log.info("Copying config files...")
# autosubmit config and experiment copied from AS.
@@ -639,18 +639,18 @@ class Autosubmit:
new_filename = filename[:index] + \
"_" + exp_id + filename[index:]
- if filename == 'platforms.conf' and BasicConfig.DEFAULT_PLATFORMS_CONF != '':
+ if filename == 'platforms.conf' and APIBasicConfig.DEFAULT_PLATFORMS_CONF != '':
content = open(os.path.join(
- BasicConfig.DEFAULT_PLATFORMS_CONF, filename)).read()
- elif filename == 'jobs.conf' and BasicConfig.DEFAULT_JOBS_CONF != '':
+ APIBasicConfig.DEFAULT_PLATFORMS_CONF, filename)).read()
+ elif filename == 'jobs.conf' and APIBasicConfig.DEFAULT_JOBS_CONF != '':
content = open(os.path.join(
- BasicConfig.DEFAULT_JOBS_CONF, filename)).read()
+ APIBasicConfig.DEFAULT_JOBS_CONF, filename)).read()
else:
content = resource_string(
'autosubmit.config', 'files/' + filename)
conf_new_filename = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id, "conf", new_filename)
+ APIBasicConfig.LOCAL_ROOT_DIR, exp_id, "conf", new_filename)
Log.debug(conf_new_filename)
open(conf_new_filename, 'w').write(content)
Autosubmit._prepare_conf_files(
@@ -662,18 +662,18 @@ class Autosubmit:
return ''
else:
try:
- if os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, copy_id)):
+ if os.path.exists(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, copy_id)):
exp_id = copy_experiment(
copy_id, description, Autosubmit.autosubmit_version, test, operational)
if exp_id == '':
return ''
dir_exp_id = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id)
+ APIBasicConfig.LOCAL_ROOT_DIR, exp_id)
os.mkdir(dir_exp_id)
os.mkdir(dir_exp_id + '/conf')
Log.info("Copying previous experiment config directories")
conf_copy_id = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, copy_id, "conf")
+ APIBasicConfig.LOCAL_ROOT_DIR, copy_id, "conf")
files = os.listdir(conf_copy_id)
for filename in files:
if os.path.isfile(os.path.join(conf_copy_id, filename)):
@@ -685,8 +685,8 @@ class Autosubmit:
Autosubmit._prepare_conf_files(
exp_id, hpc, Autosubmit.autosubmit_version, dummy)
#####
- autosubmit_config = AutosubmitConfig(
- copy_id, BasicConfig, ConfigParserFactory())
+ autosubmit_config = AutosubmitConfigResolver(
+ copy_id, APIBasicConfig, ConfigParserFactory())
if autosubmit_config.check_conf_files():
project_type = autosubmit_config.get_project_type()
if project_type == "git":
@@ -707,12 +707,12 @@ class Autosubmit:
return ''
Log.debug("Creating temporal directory...")
- exp_id_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, exp_id)
+ exp_id_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, exp_id)
tmp_path = os.path.join(exp_id_path, "tmp")
os.mkdir(tmp_path)
os.chmod(tmp_path, 0o775)
- os.mkdir(os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR))
- os.chmod(os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR), 0o775)
+ os.mkdir(os.path.join(tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR))
+ os.chmod(os.path.join(tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR), 0o775)
Log.debug("Creating temporal remote directory...")
remote_tmp_path = os.path.join(tmp_path, "LOG_" + exp_id)
os.mkdir(remote_tmp_path)
@@ -742,14 +742,14 @@ class Autosubmit:
:rtype: bool
"""
log_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'delete.log'.format(os.getuid()))
+ APIBasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'delete.log'.format(os.getuid()))
try:
Log.set_file(log_path)
except IOError as e:
Log.error("Can not create log file in path {0}: {1}".format(
log_path, e.message))
- if os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)):
+ if os.path.exists(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)):
if force or Autosubmit._user_yes_no_query("Do you want to delete " + expid + " ?"):
return Autosubmit._delete_expid(expid)
else:
@@ -787,9 +787,9 @@ class Autosubmit:
if expid is None:
Log.critical("Missing experiment id")
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
+ tmp_path = os.path.join(exp_path, APIBasicConfig.LOCAL_TMP_DIR)
if os.path.exists(os.path.join(tmp_path, 'autosubmit.lock')):
locked = True
else:
@@ -802,10 +802,10 @@ class Autosubmit:
return 1
Log.info("Starting inspect command")
Log.set_file(os.path.join(
- tmp_path, BasicConfig.LOCAL_ASLOG_DIR, 'generate.log'))
+ tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR, 'generate.log'))
os.system('clear')
signal.signal(signal.SIGINT, signal_handler)
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not generate scripts with invalid configuration')
return False
@@ -816,9 +816,9 @@ class Autosubmit:
safetysleeptime = as_conf.get_safetysleeptime()
Log.debug("The Experiment name is: {0}", expid)
Log.debug("Sleep: {0}", safetysleeptime)
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages_persistence = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid)
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
+ os.chmod(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
"pkl", "job_packages_" + expid + ".db"), 0o664)
packages_persistence.reset_table(True)
@@ -989,10 +989,10 @@ class Autosubmit:
if expid is None:
Log.critical("Missing experiment id")
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
- aslogs_path = os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
+ tmp_path = os.path.join(exp_path, APIBasicConfig.LOCAL_TMP_DIR)
+ aslogs_path = os.path.join(tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR)
if not os.path.exists(aslogs_path):
os.mkdir(aslogs_path)
os.chmod(aslogs_path, 0o775)
@@ -1006,7 +1006,7 @@ class Autosubmit:
import platform
host = platform.node()
print(host)
- if BasicConfig.ALLOWED_HOSTS and host not in BasicConfig.ALLOWED_HOSTS:
+ if APIBasicConfig.ALLOWED_HOSTS and host not in APIBasicConfig.ALLOWED_HOSTS:
Log.info("\n Autosubmit run command is not allowed on this host")
return False
@@ -1021,8 +1021,8 @@ class Autosubmit:
signal.signal(signal.SIGINT, signal_handler)
- as_conf = AutosubmitConfig(
- expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not run with invalid configuration')
return False
@@ -1047,7 +1047,7 @@ class Autosubmit:
Log.info("Starting job submission...")
pkl_dir = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
+ APIBasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
job_list = Autosubmit.load_job_list(
expid, as_conf, notransitive=notransitive)
@@ -1074,11 +1074,11 @@ class Autosubmit:
job_list.check_scripts(as_conf)
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages_persistence = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid)
if as_conf.get_wrapper_type() != 'none':
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
+ os.chmod(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR,
expid, "pkl", "job_packages_" + expid + ".db"), 0o664)
packages = packages_persistence.load()
for (exp_id, package_name, job_name) in packages:
@@ -1166,7 +1166,7 @@ class Autosubmit:
if prev_status != job.update_status(as_conf.get_copy_remote_logs() == 'true'):
if as_conf.get_notifications() == 'true':
if Status.VALUE_TO_KEY[job.status] in job.notify_on:
- Notifier.notify_status_change(MailNotifier(BasicConfig), expid, job.name,
+ Notifier.notify_status_change(MailNotifier(APIBasicConfig), expid, job.name,
Status.VALUE_TO_KEY[prev_status],
Status.VALUE_TO_KEY[job.status],
as_conf.get_mails_to())
@@ -1189,7 +1189,7 @@ class Autosubmit:
if prev_status != job.update_status(as_conf.get_copy_remote_logs() == 'true'):
if as_conf.get_notifications() == 'true':
if Status.VALUE_TO_KEY[job.status] in job.notify_on:
- Notifier.notify_status_change(MailNotifier(BasicConfig), expid, job.name,
+ Notifier.notify_status_change(MailNotifier(APIBasicConfig), expid, job.name,
Status.VALUE_TO_KEY[prev_status],
Status.VALUE_TO_KEY[job.status],
as_conf.get_mails_to())
@@ -1349,9 +1349,9 @@ class Autosubmit:
:param hide: hides plot window
:type hide: bool
"""
- BasicConfig.read()
+ APIBasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if not os.path.exists(exp_path):
Log.critical(
@@ -1359,16 +1359,16 @@ class Autosubmit:
Log.warning("Does an experiment with the given id exist?")
return 1
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'monitor.log'))
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'monitor.log'))
Log.info("Getting job list...")
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not run with invalid configuration')
return False
- pkl_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
+ pkl_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
job_list = Autosubmit.load_job_list(
expid, as_conf, notransitive=notransitive, monitor=True)
@@ -1444,9 +1444,9 @@ class Autosubmit:
job.parents = job.parents - referenced_jobs_to_remove
# WRAPPERS
if as_conf.get_wrapper_type() != 'none' and check_wrapper:
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages_persistence = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid)
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
+ os.chmod(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
"pkl", "job_packages_" + expid + ".db"), 0o664)
packages_persistence.reset_table(True)
referenced_jobs_to_remove = set()
@@ -1469,7 +1469,7 @@ class Autosubmit:
packages = packages_persistence.load(True)
else:
- packages = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid).load()
# print(packages)
@@ -1511,24 +1511,24 @@ class Autosubmit:
:param hide: hides plot window
:type hide: bool
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if not os.path.exists(exp_path):
Log.critical(
"The directory %s is needed and does not exist." % exp_path)
Log.warning("Does an experiment with the given id exist?")
return 1
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'statistics.log'))
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'statistics.log'))
Log.info("Loading jobs...")
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not run with invalid configuration')
return False
- pkl_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
+ pkl_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
job_list = Autosubmit.load_job_list(
expid, as_conf, notransitive=notransitive)
Log.debug("Job list restored from {0} files", pkl_dir)
@@ -1583,8 +1583,8 @@ class Autosubmit:
:param plot: set True to delete outdated plots
:param stats: set True to delete outdated stats
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if not os.path.exists(exp_path):
Log.critical(
"The directory %s is needed and does not exist." % exp_path)
@@ -1592,11 +1592,11 @@ class Autosubmit:
return 1
if create_log_file:
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'clean_exp.log'))
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'clean_exp.log'))
if project:
- autosubmit_config = AutosubmitConfig(
- expid, BasicConfig, ConfigParserFactory())
+ autosubmit_config = AutosubmitConfigResolver(
+ expid, APIBasicConfig, ConfigParserFactory())
if not autosubmit_config.check_conf_files():
Log.critical(
'Can not clean project with invalid configuration')
@@ -1639,24 +1639,24 @@ class Autosubmit:
:param hide: hides plot window
:type hide: bool
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if not os.path.exists(exp_path):
Log.critical(
"The directory %s is needed and does not exist." % exp_path)
Log.warning("Does an experiment with the given id exist?")
return 1
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'recovery.log'))
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'recovery.log'))
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not run with invalid configuration')
return False
Log.info('Recovering experiment {0}'.format(expid))
- pkl_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
+ pkl_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
job_list = Autosubmit.load_job_list(
expid, as_conf, notransitive=notransitive, monitor=True)
Log.debug("Job list restored from {0} files", pkl_dir)
@@ -1724,7 +1724,7 @@ class Autosubmit:
Log.result("Recovery finalized")
- packages = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid).load()
groups_dict = dict()
@@ -1757,13 +1757,13 @@ class Autosubmit:
:param offer:
"""
log_file = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'migrate_{0}.log'.format(experiment_id))
+ APIBasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'migrate_{0}.log'.format(experiment_id))
Log.set_file(log_file)
if offer:
Log.info('Migrating experiment {0}'.format(experiment_id))
- as_conf = AutosubmitConfig(
- experiment_id, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ experiment_id, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not proceed with invalid configuration')
return False
@@ -1899,8 +1899,8 @@ class Autosubmit:
Log.critical("The experiment cannot be picked up")
return False
Log.info("Local files/dirs have been successfully picked up")
- as_conf = AutosubmitConfig(
- experiment_id, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ experiment_id, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not proceed with invalid configuration')
return False
@@ -1956,20 +1956,20 @@ class Autosubmit:
:param experiment_id: experiment identifier:
:type experiment_id: str
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, experiment_id)
if not os.path.exists(exp_path):
Log.critical(
"The directory {0} is needed and does not exist.", exp_path)
Log.warning("Does an experiment with the given id exist?")
return False
- log_file = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'check_exp.log')
+ log_file = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, experiment_id,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'check_exp.log')
Log.set_file(log_file)
- as_conf = AutosubmitConfig(
- experiment_id, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ experiment_id, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
return False
@@ -1984,7 +1984,7 @@ class Autosubmit:
return False
pkl_dir = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, experiment_id, 'pkl')
+ APIBasicConfig.LOCAL_ROOT_DIR, experiment_id, 'pkl')
job_list = Autosubmit.load_job_list(
experiment_id, as_conf, notransitive=notransitive)
Log.debug("Job list restored from {0} files", pkl_dir)
@@ -2015,8 +2015,8 @@ class Autosubmit:
model = ""
branch = ""
hpc = ""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, experiment_id)
if not os.path.exists(exp_path):
return user, created, model, branch, hpc
@@ -2033,8 +2033,8 @@ class Autosubmit:
os.path.getmtime(experiment_file))
try:
- as_conf = AutosubmitConfig(
- experiment_id, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ experiment_id, APIBasicConfig, ConfigParserFactory())
as_conf.reload()
project_type = as_conf.get_project_type()
@@ -2377,10 +2377,10 @@ class Autosubmit:
Creates a new database instance for autosubmit at the configured path
"""
- BasicConfig.read()
+ APIBasicConfig.read()
Log.set_file(os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'install.log'))
- if not os.path.exists(BasicConfig.DB_PATH):
+ APIBasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'install.log'))
+ if not os.path.exists(APIBasicConfig.DB_PATH):
Log.info("Creating autosubmit database...")
qry = resource_string('autosubmit.database', 'data/autosubmit.sql')
if not create_db(qry):
@@ -2404,10 +2404,10 @@ class Autosubmit:
:param expid: experiment identifier
:type expid: str
"""
- BasicConfig.read()
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- BasicConfig.LOCAL_TMP_DIR, BasicConfig.LOCAL_ASLOG_DIR, 'refresh.log'))
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
+ APIBasicConfig.read()
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid,
+ APIBasicConfig.LOCAL_TMP_DIR, APIBasicConfig.LOCAL_ASLOG_DIR, 'refresh.log'))
+ as_conf = AutosubmitConfigResolver(expid, APIBasicConfig, ConfigParserFactory())
as_conf.reload()
if not as_conf.check_expdef_conf():
Log.critical('Can not copy with invalid configuration')
@@ -2430,17 +2430,17 @@ class Autosubmit:
:param expid: experiment identifier
:type expid: str
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if not os.path.exists(exp_path):
Log.critical(
"The directory %s is needed and does not exist." % exp_path)
Log.warning("Does an experiment with the given id exist?")
return 1
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR,
"ASlogs", 'archive_{0}.log'.format(expid)))
- exp_folder = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
+ exp_folder = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
if clean:
# Cleaning to reduce file size.
@@ -2451,7 +2451,7 @@ class Autosubmit:
# Getting year of last completed. If not, year of expid folder
year = None
- tmp_folder = os.path.join(exp_folder, BasicConfig.LOCAL_TMP_DIR)
+ tmp_folder = os.path.join(exp_folder, APIBasicConfig.LOCAL_TMP_DIR)
if os.path.isdir(tmp_folder):
for filename in os.listdir(tmp_folder):
if filename.endswith("COMPLETED"):
@@ -2467,7 +2467,7 @@ class Autosubmit:
# Creating tar file
Log.info("Creating tar file ... ")
try:
- year_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, str(year))
+ year_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, str(year))
if not os.path.exists(year_path):
os.mkdir(year_path)
with tarfile.open(os.path.join(year_path, '{0}.tar.gz'.format(expid)), "w:gz") as tar:
@@ -2499,10 +2499,10 @@ class Autosubmit:
:param experiment_id: experiment identifier
:type experiment_id: str
"""
- BasicConfig.read()
- Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
+ APIBasicConfig.read()
+ Log.set_file(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR,
"ASlogs", 'unarchive_{0}.log'.format(experiment_id)))
- exp_folder = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id)
+ exp_folder = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, experiment_id)
if os.path.exists(exp_folder):
Log.error("Experiment {0} is not archived", experiment_id)
@@ -2512,7 +2512,7 @@ class Autosubmit:
year = datetime.datetime.today().year
archive_path = None
while year > 2000:
- archive_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, str(
+ archive_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, str(
year), '{0}.tar.gz'.format(experiment_id))
if os.path.exists(archive_path):
break
@@ -2594,11 +2594,11 @@ class Autosubmit:
:type output: str
"""
- BasicConfig.read()
+ APIBasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
- aslogs_path = os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR)
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
+ tmp_path = os.path.join(exp_path, APIBasicConfig.LOCAL_TMP_DIR)
+ aslogs_path = os.path.join(tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR)
if not os.path.exists(aslogs_path):
os.mkdir(aslogs_path)
os.chmod(aslogs_path, 0o775)
@@ -2614,10 +2614,10 @@ class Autosubmit:
Log.info(
"Preparing .lock file to avoid multiple instances with same expid.")
Log.set_file(os.path.join(
- tmp_path, BasicConfig.LOCAL_ASLOG_DIR, 'create_exp.log'))
+ tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR, 'create_exp.log'))
- as_conf = AutosubmitConfig(
- expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not create with invalid configuration')
return False
@@ -2626,7 +2626,7 @@ class Autosubmit:
if not Autosubmit._copy_code(as_conf, expid, project_type, False):
return False
- update_job = not os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl",
+ update_job = not os.path.exists(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl",
"job_list_" + expid + ".pkl"))
Autosubmit._create_project_associated_conf(
as_conf, False, update_job)
@@ -2652,7 +2652,7 @@ class Autosubmit:
rerun = as_conf.get_rerun()
Log.info("\nCreating the jobs list...")
- job_list = JobList(expid, BasicConfig, ConfigParserFactory(),
+ job_list = JobList(expid, APIBasicConfig, ConfigParserFactory(),
Autosubmit._get_job_list_persistence(expid, as_conf))
date_format = ''
@@ -2676,7 +2676,7 @@ class Autosubmit:
job_list.remove_rerun_only_jobs(notransitive)
Log.info("\nSaving the jobs list...")
job_list.save()
- JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid).reset_table()
groups_dict = dict()
@@ -2695,7 +2695,7 @@ class Autosubmit:
# WRAPPERS
if as_conf.get_wrapper_type() != 'none' and check_wrappers:
packages_persistence = JobPackagePersistence(
- os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"), "job_packages_" + expid)
+ os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"), "job_packages_" + expid)
packages_persistence.reset_table(True)
referenced_jobs_to_remove = set()
job_list_wrappers = copy.deepcopy(job_list)
@@ -2755,7 +2755,7 @@ class Autosubmit:
svn_project_url = as_conf.get_svn_project_url()
svn_project_revision = as_conf.get_svn_project_revision()
project_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid, BasicConfig.LOCAL_PROJ_DIR)
+ APIBasicConfig.LOCAL_ROOT_DIR, expid, APIBasicConfig.LOCAL_PROJ_DIR)
if os.path.exists(project_path):
Log.info("Using project folder: {0}", project_path)
if not force:
@@ -2781,7 +2781,7 @@ class Autosubmit:
elif project_type == "local":
local_project_path = as_conf.get_local_project_path()
project_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid, BasicConfig.LOCAL_PROJ_DIR)
+ APIBasicConfig.LOCAL_ROOT_DIR, expid, APIBasicConfig.LOCAL_PROJ_DIR)
local_destination = os.path.join(project_path, project_destination)
if os.path.exists(project_path):
@@ -2861,9 +2861,9 @@ class Autosubmit:
:param hide: hides plot window
:type hide: bool
"""
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
+ APIBasicConfig.read()
+ exp_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid)
+ tmp_path = os.path.join(exp_path, APIBasicConfig.LOCAL_TMP_DIR)
if not os.path.exists(exp_path):
Log.critical(
"The directory %s is needed and does not exist." % exp_path)
@@ -2877,7 +2877,7 @@ class Autosubmit:
"Preparing .lock file to avoid multiple instances with same expid.")
Log.set_file(os.path.join(
- tmp_path, BasicConfig.LOCAL_ASLOG_DIR, 'set_status.log'))
+ tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR, 'set_status.log'))
Log.debug('Exp ID: {0}', expid)
Log.debug('Save: {0}', save)
Log.debug('Final status: {0}', final)
@@ -2886,8 +2886,8 @@ class Autosubmit:
Log.debug('Status of jobs to change: {0}', filter_status)
Log.debug('Sections to change: {0}', filter_section)
wrongExpid = 0
- as_conf = AutosubmitConfig(
- expid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(
+ expid, APIBasicConfig, ConfigParserFactory())
if not as_conf.check_conf_files():
Log.critical('Can not run with invalid configuration')
return False
@@ -2997,9 +2997,9 @@ class Autosubmit:
"Save disabled due invalid expid, please check or/and jobs expid name")
if as_conf.get_wrapper_type() != 'none' and check_wrapper:
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages_persistence = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid)
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
+ os.chmod(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR,
expid, "pkl", "job_packages_" + expid + ".db"), 0o664)
packages_persistence.reset_table(True)
referenced_jobs_to_remove = set()
@@ -3023,7 +3023,7 @@ class Autosubmit:
packages = packages_persistence.load(True)
else:
- packages = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ packages = JobPackagePersistence(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_packages_" + expid).load()
if not noplot:
groups_dict = dict()
@@ -3083,7 +3083,7 @@ class Autosubmit:
:param dummy: if True, creates a dummy experiment adding some default values
:type dummy: bool
"""
- as_conf = AutosubmitConfig(exp_id, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(exp_id, APIBasicConfig, ConfigParserFactory())
as_conf.set_version(autosubmit_version)
as_conf.set_expid(exp_id)
as_conf.set_platform(hpc)
@@ -3209,7 +3209,7 @@ class Autosubmit:
if storage_type == 'pkl':
return JobListPersistencePkl()
elif storage_type == 'db':
- return JobListPersistenceDb(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
+ return JobListPersistenceDb(os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
"job_list_" + expid)
# communications library not known
@@ -3319,7 +3319,7 @@ class Autosubmit:
@staticmethod
def _change_conf(testid, hpc, start_date, member, chunks, branch, random_select=False):
- as_conf = AutosubmitConfig(testid, BasicConfig, ConfigParserFactory())
+ as_conf = AutosubmitConfigResolver(testid, APIBasicConfig, ConfigParserFactory())
exp_parser = as_conf.get_parser(
ConfigParserFactory(), as_conf.experiment_file)
if exp_parser.get_bool_option('rerun', "RERUN", True):
@@ -3379,9 +3379,9 @@ class Autosubmit:
"""
# print("Load Job List Start")
try:
- BasicConfig.read()
+ APIBasicConfig.read()
rerun = as_conf.get_rerun()
- job_list = JobList(expid, BasicConfig, ConfigParserFactory(),
+ job_list = JobList(expid, APIBasicConfig, ConfigParserFactory(),
Autosubmit._get_job_list_persistence(expid, as_conf))
date_list = as_conf.get_date_list()
date_format = ''
@@ -3403,7 +3403,7 @@ class Autosubmit:
if not monitor:
job_list.rerun(chunk_list, notransitive)
else:
- rerun_list = JobList(expid, BasicConfig, ConfigParserFactory(),
+ rerun_list = JobList(expid, APIBasicConfig, ConfigParserFactory(),
Autosubmit._get_job_list_persistence(expid, as_conf))
rerun_list.generate(date_list, as_conf.get_member_list(), as_conf.get_num_chunks(),
as_conf.get_chunk_ini(),
diff --git a/autosubmit_api/autosubmit_legacy/autosubmit.pyc b/autosubmit_api/autosubmit_legacy/autosubmit.pyc
deleted file mode 100644
index f8cfe1132f1d4ded653b4c2c646d0ff8d14ca596..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/autosubmit.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/__init__.pyc b/autosubmit_api/autosubmit_legacy/job/__init__.pyc
deleted file mode 100644
index 325c7874fe54b1de27c286e0f053df817ffd2faa..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job.py b/autosubmit_api/autosubmit_legacy/job/job.py
index 59ca6164a8fcedf65f9669851c17e7971307b0d5..f5c9c09a6fa6bcf65959149797a08a4df5a4f217 100644
--- a/autosubmit_api/autosubmit_legacy/job/job.py
+++ b/autosubmit_api/autosubmit_legacy/job/job.py
@@ -30,7 +30,7 @@ from collections import OrderedDict
from ...monitor.monitor import Monitor
from .job_common import Status, Type
-from ...config.basicConfig import BasicConfig
+from ...config.basicConfig import APIBasicConfig
from .job_common import StatisticsSnippetBash, StatisticsSnippetPython
from .job_common import StatisticsSnippetR, StatisticsSnippetEmpty
#from autosubmitAPIwu.config.config_common import AutosubmitConfig
@@ -104,7 +104,7 @@ class Job(object):
self.expid = name.split('_')[0]
self.parameters = dict()
self._tmp_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, self.expid, BasicConfig.LOCAL_TMP_DIR)
+ APIBasicConfig.LOCAL_ROOT_DIR, self.expid, APIBasicConfig.LOCAL_TMP_DIR)
self.write_start = False
self._platform = None
self.check = 'True'
@@ -728,7 +728,7 @@ class Job(object):
parameters['CURRENT_LOGDIR'] = job_platform.get_files_path()
parameters['ROOTDIR'] = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, self.expid)
+ APIBasicConfig.LOCAL_ROOT_DIR, self.expid)
parameters['PROJDIR'] = as_conf.get_project_dir()
parameters['NUMMEMBERS'] = len(as_conf.get_member_list())
diff --git a/autosubmit_api/autosubmit_legacy/job/job.pyc b/autosubmit_api/autosubmit_legacy/job/job.pyc
deleted file mode 100644
index ae608934554368795529ad102160520af40b4a98..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_common.pyc b/autosubmit_api/autosubmit_legacy/job/job_common.pyc
deleted file mode 100644
index 60a7501d0a1c9a3fd5e7da7cf30f8f9edbbd0445..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_common.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_dict.pyc b/autosubmit_api/autosubmit_legacy/job/job_dict.pyc
deleted file mode 100644
index c058fefac0e19e828d9991d3c56b870793bdff15..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_dict.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_exceptions.pyc b/autosubmit_api/autosubmit_legacy/job/job_exceptions.pyc
deleted file mode 100644
index ba4cb9e314daa9d575fd84140e2594532327f081..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_exceptions.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_grouping.pyc b/autosubmit_api/autosubmit_legacy/job/job_grouping.pyc
deleted file mode 100644
index 124384bd4d54e37d5aaaab0fe330bbbe3f7e344d..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_grouping.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py
index 2ec4716062945e7d8b3a69308de5a58e47cd34ec..02e23242366693f7c91467c7bddd73e1547f7822 100644
--- a/autosubmit_api/autosubmit_legacy/job/job_list.py
+++ b/autosubmit_api/autosubmit_legacy/job/job_list.py
@@ -50,7 +50,7 @@ from random import shuffle
from dateutil.relativedelta import *
from .job import Job
-from ...config.config_common import AutosubmitConfig
+from ...config.config_common import AutosubmitConfigResolver
from bscearth.utils.log import Log
from .job_dict import DicJobs
from .job_utils import Dependency
@@ -109,7 +109,7 @@ class JobList:
self.sections_checked = set()
self._wrapper_queue = None
try:
- as_conf = AutosubmitConfig(
+ as_conf = AutosubmitConfigResolver(
self.expid, config, ConfigParserFactory())
as_conf.reload()
self._wrapper_queue = as_conf.get_wrapper_queue()
@@ -3026,7 +3026,7 @@ class JobList:
# Getting information
path_local_root = basic_config.LOCAL_ROOT_DIR
path_structure = basic_config.STRUCTURES_DIR
- db_file = os.path.join(path_local_root, "ecearth.db")
+ db_file = os.path.join(path_local_root, basic_config.DB_FILE)
conn = DbRequests.create_connection(db_file)
# job_data = None
# Job information from worker database
diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.pyc b/autosubmit_api/autosubmit_legacy/job/job_list.pyc
deleted file mode 100644
index b7c1a0681b5f1b58b8205e53c781a8a8ef650a21..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_list.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_list_persistence.pyc b/autosubmit_api/autosubmit_legacy/job/job_list_persistence.pyc
deleted file mode 100644
index d3a3d5d5a4492947b21a39d919f9ed26060c4086..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_list_persistence.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_package_persistence.pyc b/autosubmit_api/autosubmit_legacy/job/job_package_persistence.pyc
deleted file mode 100644
index 83c8873d6fa7175cfb71b6858695cf81ffa85f3e..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_package_persistence.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_packager.pyc b/autosubmit_api/autosubmit_legacy/job/job_packager.pyc
deleted file mode 100644
index 69ae92fd04e3311bb9303bb0f9aab4bc3f31814f..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_packager.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_packages.pyc b/autosubmit_api/autosubmit_legacy/job/job_packages.pyc
deleted file mode 100644
index c5894a181ffc59481e96af25e42dca05271297ee..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_packages.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/job_utils.py b/autosubmit_api/autosubmit_legacy/job/job_utils.py
index fd80be04f1c63d46247e6cc0042b6f1c6d6f25ac..65b464c3937a7c25c9d31cae5a2acc0170c0f29e 100644
--- a/autosubmit_api/autosubmit_legacy/job/job_utils.py
+++ b/autosubmit_api/autosubmit_legacy/job/job_utils.py
@@ -27,7 +27,7 @@ from networkx import DiGraph
from networkx import dfs_edges
from networkx import NetworkXError
from .job_package_persistence import JobPackagePersistence
-from ...config.basicConfig import BasicConfig
+from ...config.basicConfig import APIBasicConfig
def transitive_reduction(graph):
@@ -61,7 +61,7 @@ def get_job_package_code(expid, job_name):
:rtype: int or None
"""
try:
- basic_conf = BasicConfig()
+ basic_conf = APIBasicConfig()
basic_conf.read()
packages_wrapper = JobPackagePersistence(os.path.join(basic_conf.LOCAL_ROOT_DIR, expid, "pkl"),"job_packages_" + expid).load(wrapper=True)
packages_wrapper_plus = JobPackagePersistence(os.path.join(basic_conf.LOCAL_ROOT_DIR, expid, "pkl"),"job_packages_" + expid).load(wrapper=False)
diff --git a/autosubmit_api/autosubmit_legacy/job/job_utils.pyc b/autosubmit_api/autosubmit_legacy/job/job_utils.pyc
deleted file mode 100644
index c2e68e53adcc9e578bd95350ebdc15b28c4c524a..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/job_utils.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/job/tree.pyc b/autosubmit_api/autosubmit_legacy/job/tree.pyc
deleted file mode 100644
index 7653d608b704d17781ec27ea28c25f5853887735..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/job/tree.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/__init__.pyc b/autosubmit_api/autosubmit_legacy/platforms/__init__.pyc
deleted file mode 100644
index e19ab6bbc3f392d6c279be2e83c4bd12108a9cc0..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/ecmwf_adaptor.pyc b/autosubmit_api/autosubmit_legacy/platforms/ecmwf_adaptor.pyc
deleted file mode 100644
index 69e9dec946c6d64914c2b15d5ee82ea0e82f7149..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/ecmwf_adaptor.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/ecplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/ecplatform.pyc
deleted file mode 100644
index c93c83f385cf8d33e774e1ce5d2b7dc2db9950ff..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/ecplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/__init__.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/__init__.pyc
deleted file mode 100644
index 9fd954a2a3d9c2b945c67b52bb63da939b73639c..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/ec_cca_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/ec_cca_header.pyc
deleted file mode 100644
index cd001ae52cf62d862be142ece3def116df8d539b..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/ec_cca_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/ec_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/ec_header.pyc
deleted file mode 100644
index e324bc0a7d9836e19035d629da4f1abf8c2263bb..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/ec_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/local_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/local_header.pyc
deleted file mode 100644
index daeddc94b247145bf10f977dc2bba99eac31fce8..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/local_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/lsf_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/lsf_header.pyc
deleted file mode 100644
index ffb6ec747b282fd3b3275967bfd34b1695cd4593..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/lsf_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs10_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/pbs10_header.pyc
deleted file mode 100644
index 62a925d9f0a85cc729a99a20f1872770619154b1..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs10_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs11_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/pbs11_header.pyc
deleted file mode 100644
index 3081967dc161d1d62789c9a4a53adb8344cb7f21..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs11_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs12_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/pbs12_header.pyc
deleted file mode 100644
index eb960f9c8e978a82b78db95b6e1d1adfe4b7fc52..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/pbs12_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/ps_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/ps_header.pyc
deleted file mode 100644
index e76bc5ffcb3777a25da78e75ad082a46b25636b3..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/ps_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/sge_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/sge_header.pyc
deleted file mode 100644
index b74a22bb3265c58d69aeb77beb79c838d4d0c0ba..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/sge_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/headers/slurm_header.pyc b/autosubmit_api/autosubmit_legacy/platforms/headers/slurm_header.pyc
deleted file mode 100644
index 592852ba36080ec12f5dc1cf98ac91cad35e10c2..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/headers/slurm_header.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/locplatform.py b/autosubmit_api/autosubmit_legacy/platforms/locplatform.py
index 043f1c5b0b394bdea8bc3a59f45c039cebd65587..92fdf78bc2de03d856bbd3f05461e222f742fae3 100644
--- a/autosubmit_api/autosubmit_legacy/platforms/locplatform.py
+++ b/autosubmit_api/autosubmit_legacy/platforms/locplatform.py
@@ -24,7 +24,7 @@ import subprocess
from .paramiko_platform import ParamikoPlatform
from .headers.local_header import LocalHeader
-from ...config.basicConfig import BasicConfig
+from ...config.basicConfig import APIBasicConfig
from bscearth.utils.log import Log
@@ -52,7 +52,7 @@ class LocalPlatform(ParamikoPlatform):
"""
Updates commands for platforms
"""
- self.root_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid)
+ self.root_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, self.expid)
self.remote_log_dir = os.path.join(
self.root_dir, "tmp", 'LOG_' + self.expid)
self.cancel_cmd = "kill -SIGINT"
diff --git a/autosubmit_api/autosubmit_legacy/platforms/locplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/locplatform.pyc
deleted file mode 100644
index d01dc7cab7a9a0076550e7f328a12d2c522d84a2..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/locplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/lsfplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/lsfplatform.pyc
deleted file mode 100644
index d4c08f54ac4ad112b069a136b1b611e506830ef1..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/lsfplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/mn_adaptor.pyc b/autosubmit_api/autosubmit_legacy/platforms/mn_adaptor.pyc
deleted file mode 100644
index 8f77b751b4f9a9368163501cb152dd7322e0fd79..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/mn_adaptor.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/paramiko_platform.pyc b/autosubmit_api/autosubmit_legacy/platforms/paramiko_platform.pyc
deleted file mode 100644
index 095b8b8402fe7411f8eff751bddf3ff89d1c4eb5..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/paramiko_platform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.py b/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.py
index 1290b9641bc947d9b80a0fc1462a03f07dba1f2f..c2121f20c25d6dbb9b986650b8cdb5272d094cec 100644
--- a/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.py
+++ b/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.py
@@ -24,8 +24,8 @@ import os
from bscearth.utils.log import Log
-from ...config.basicConfig import BasicConfig
-from ...config.config_common import AutosubmitConfig
+from ...config.basicConfig import APIBasicConfig
+from ...config.config_common import AutosubmitConfigResolver
from .submitter import Submitter
from .psplatform import PsPlatform
from .lsfplatform import LsfPlatform
@@ -65,14 +65,14 @@ class ParamikoSubmitter(Submitter):
parser = asconf.platforms_parser
platforms = dict()
- local_platform = LocalPlatform(asconf.expid, 'local', BasicConfig)
+ local_platform = LocalPlatform(asconf.expid, 'local', APIBasicConfig)
local_platform.max_wallclock = asconf.get_max_wallclock()
local_platform.max_processors = asconf.get_max_processors()
local_platform.max_waiting_jobs = asconf.get_max_waiting_jobs()
local_platform.total_jobs = asconf.get_total_jobs()
- local_platform.scratch = os.path.join(BasicConfig.LOCAL_ROOT_DIR, asconf.expid, BasicConfig.LOCAL_TMP_DIR)
- local_platform.temp_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, 'ASlogs')
- local_platform.root_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, local_platform.expid)
+ local_platform.scratch = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, asconf.expid, APIBasicConfig.LOCAL_TMP_DIR)
+ local_platform.temp_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, 'ASlogs')
+ local_platform.root_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, local_platform.expid)
local_platform.host = 'localhost'
platforms['local'] = local_platform
platforms['LOCAL'] = local_platform
@@ -86,17 +86,17 @@ class ParamikoSubmitter(Submitter):
platform_version = parser.get_option(section, 'VERSION', '')
try:
if platform_type == 'pbs':
- remote_platform = PBSPlatform(asconf.expid, section.lower(), BasicConfig, platform_version)
+ remote_platform = PBSPlatform(asconf.expid, section.lower(), APIBasicConfig, platform_version)
elif platform_type == 'sge':
- remote_platform = SgePlatform(asconf.expid, section.lower(), BasicConfig)
+ remote_platform = SgePlatform(asconf.expid, section.lower(), APIBasicConfig)
elif platform_type == 'ps':
- remote_platform = PsPlatform(asconf.expid, section.lower(), BasicConfig)
+ remote_platform = PsPlatform(asconf.expid, section.lower(), APIBasicConfig)
elif platform_type == 'lsf':
- remote_platform = LsfPlatform(asconf.expid, section.lower(), BasicConfig)
+ remote_platform = LsfPlatform(asconf.expid, section.lower(), APIBasicConfig)
elif platform_type == 'ecaccess':
- remote_platform = EcPlatform(asconf.expid, section.lower(), BasicConfig, platform_version)
+ remote_platform = EcPlatform(asconf.expid, section.lower(), APIBasicConfig, platform_version)
elif platform_type == 'slurm':
- remote_platform = SlurmPlatform(asconf.expid, section.lower(), BasicConfig)
+ remote_platform = SlurmPlatform(asconf.expid, section.lower(), APIBasicConfig)
else:
raise Exception("Queue type not specified on platform {0}".format(section))
diff --git a/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.pyc b/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.pyc
deleted file mode 100644
index b56f668024f120a9455cb8fabfbf10987a2d2dad..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/paramiko_submitter.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/pbsplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/pbsplatform.pyc
deleted file mode 100644
index 99f206314ad773100ad50c86a958e0d05f322695..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/pbsplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/platform.pyc b/autosubmit_api/autosubmit_legacy/platforms/platform.pyc
deleted file mode 100644
index 12dcdfa4a5267dfa05f4feb6079b0c508f61990f..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/platform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/psplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/psplatform.pyc
deleted file mode 100644
index 13b26c82a77ccd22e91eeec18e3a3fca2dffaa87..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/psplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/saga_platform.pyc b/autosubmit_api/autosubmit_legacy/platforms/saga_platform.pyc
deleted file mode 100644
index 61ae638d8f0de2a0db2f5ed7d61b0a83344ff030..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/saga_platform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.py b/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.py
index 8bccf9d089511888ad9fd5907a69be9d808742db..f89f63f2779e23c02953f24d4f8d56b40c28e2ed 100644
--- a/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.py
+++ b/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.py
@@ -22,8 +22,8 @@ import time
import os
-from ...config.basicConfig import BasicConfig
-from ...config.config_common import AutosubmitConfig
+from ...config.basicConfig import APIBasicConfig
+from ...config.config_common import AutosubmitConfigResolver
from .saga_platform import SagaPlatform
from .submitter import Submitter
@@ -71,7 +71,7 @@ class SagaSubmitter(Submitter):
session = None
platforms = dict()
- local_platform = SagaPlatform(asconf.expid, 'local', BasicConfig)
+ local_platform = SagaPlatform(asconf.expid, 'local', APIBasicConfig)
local_platform.service = None
retry = retries
while local_platform.service is None and retry >= 0:
@@ -86,13 +86,13 @@ class SagaSubmitter(Submitter):
local_platform.max_processors = asconf.get_max_processors()
local_platform.max_waiting_jobs = asconf.get_max_waiting_jobs()
local_platform.total_jobs = asconf.get_total_jobs()
- local_platform.scratch = os.path.join(BasicConfig.LOCAL_ROOT_DIR, asconf.expid, BasicConfig.LOCAL_TMP_DIR)
+ local_platform.scratch = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, asconf.expid, APIBasicConfig.LOCAL_TMP_DIR)
local_platform.project = ''
local_platform.budget = ''
local_platform.reservation = ''
local_platform.exclusivity = ''
local_platform.user = ''
- local_platform.root_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, local_platform.expid)
+ local_platform.root_dir = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, local_platform.expid)
local_platform.transfer = "file"
local_platform.host = 'localhost'
platforms['local'] = local_platform
@@ -105,7 +105,7 @@ class SagaSubmitter(Submitter):
platform_type = parser.get_option(section, 'TYPE', '').lower()
- remote_platform = SagaPlatform(asconf.expid, section.lower(), BasicConfig)
+ remote_platform = SagaPlatform(asconf.expid, section.lower(), APIBasicConfig)
remote_platform.type = platform_type
platform_version = parser.get_option(section, 'VERSION', '')
diff --git a/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.pyc b/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.pyc
deleted file mode 100644
index a06901982eea2f87835817cc89160e15088da0e2..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/saga_submitter.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/sgeplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/sgeplatform.pyc
deleted file mode 100644
index 35fb5ba153e47709d7707a025de6c701f1da7642..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/sgeplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.py b/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.py
index 25d803ecc310b4eccc8007402261499ec8a72b26..02f919fe82fd3fa4ac861c408bd154cdf3b49511 100644
--- a/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.py
+++ b/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.py
@@ -24,7 +24,7 @@ from xml.dom.minidom import parseString
from .paramiko_platform import ParamikoPlatform
from .headers.slurm_header import SlurmHeader
from .wrappers.wrapper_factory import SlurmWrapperFactory
-from ...config.basicConfig import BasicConfig
+from ...config.basicConfig import APIBasicConfig
class SlurmPlatform(ParamikoPlatform):
@@ -50,10 +50,10 @@ class SlurmPlatform(ParamikoPlatform):
self._allow_wrappers = True
self.update_cmds()
- exp_id_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid)
+ exp_id_path = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, self.expid)
tmp_path = os.path.join(exp_id_path, "tmp")
self._submit_script_path = os.path.join(
- tmp_path, BasicConfig.LOCAL_ASLOG_DIR, "submit_" + self.name + ".sh")
+ tmp_path, APIBasicConfig.LOCAL_ASLOG_DIR, "submit_" + self.name + ".sh")
# No need to write from API
# self._submit_script_file = open(self._submit_script_path, 'w').close()
diff --git a/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.pyc b/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.pyc
deleted file mode 100644
index e06a8d820c3b99a8b902cb79a6637df788831fe5..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/slurmplatform.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/submitter.py b/autosubmit_api/autosubmit_legacy/platforms/submitter.py
index 89d4523e71b155e764c6692ca3676a87bf74badc..408148070231cad30af3ba07e170fccd55faa556 100644
--- a/autosubmit_api/autosubmit_legacy/platforms/submitter.py
+++ b/autosubmit_api/autosubmit_legacy/platforms/submitter.py
@@ -18,7 +18,7 @@
# along with Autosubmit. If not, see .
-from ...config.config_common import AutosubmitConfig
+from ...config.config_common import AutosubmitConfigResolver
class Submitter:
diff --git a/autosubmit_api/autosubmit_legacy/platforms/submitter.pyc b/autosubmit_api/autosubmit_legacy/platforms/submitter.pyc
deleted file mode 100644
index a6457b9b3f1aa54dbc8c75117d7b2833f68d70e5..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/submitter.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/wrappers/__init__.pyc b/autosubmit_api/autosubmit_legacy/platforms/wrappers/__init__.pyc
deleted file mode 100644
index 7cdcbb3f33a32e1423681f466f99897db9326a32..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/wrappers/__init__.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_builder.pyc b/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_builder.pyc
deleted file mode 100644
index 8d6581211c0b4bcacd394070b3f873d748f0a448..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_builder.pyc and /dev/null differ
diff --git a/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_factory.pyc b/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_factory.pyc
deleted file mode 100644
index a8853878c5776519f36a9c4d4afd6bec46419f76..0000000000000000000000000000000000000000
Binary files a/autosubmit_api/autosubmit_legacy/platforms/wrappers/wrapper_factory.pyc and /dev/null differ
diff --git a/autosubmit_api/build/lib/autosubmit_legacy/autosubmit.py b/autosubmit_api/build/lib/autosubmit_legacy/autosubmit.py
deleted file mode 100644
index 4a7c4dba46018119fbda5636654d4babf5aac686..0000000000000000000000000000000000000000
--- a/autosubmit_api/build/lib/autosubmit_legacy/autosubmit.py
+++ /dev/null
@@ -1,3498 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2017 Earth Sciences Department, BSC-CNS
-
-# This file is part of Autosubmit.
-
-# Autosubmit is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-
-# Autosubmit is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with Autosubmit. If not, see .
-# pipeline_test
-
-
-import traceback
-from pyparsing import nestedExpr
-from collections import defaultdict
-from distutils.util import strtobool
-from pkg_resources import require, resource_listdir, resource_exists, resource_string
-import portalocker
-import datetime
-import signal
-import random
-import re
-import shutil
-import sys
-import pwd
-import os
-import copy
-import time
-import tarfile
-import json
-import subprocess
-import argparse
-
-sys.path.insert(0, os.path.abspath('.'))
-from ..config.basicConfig import BasicConfig
-from ..config.config_common import AutosubmitConfig
-from bscearth.utils.config_parser import ConfigParserFactory
-from .job.job_common import Status
-from ..git.autosubmit_git import AutosubmitGit
-from .job.job_list import JobList
-from .job.job_packages import JobPackageThread
-from .job.job_package_persistence import JobPackagePersistence
-from .job.job_list_persistence import JobListPersistenceDb
-from .job.job_list_persistence import JobListPersistencePkl
-from .job.job_grouping import JobGrouping
-from bscearth.utils.log import Log
-from ..database.db_common import create_db
-from ..experiment.experiment_common import new_experiment
-from ..experiment.experiment_common import copy_experiment
-from ..database.db_common import delete_experiment
-from ..database.db_common import get_autosubmit_version
-from ..monitor.monitor import Monitor
-from bscearth.utils.date import date2str
-from ..notifications.mail_notifier import MailNotifier
-from ..notifications.notifier import Notifier
-from .platforms.saga_submitter import SagaSubmitter
-from .platforms.paramiko_submitter import ParamikoSubmitter
-from .job.job_exceptions import WrongTemplateException
-from .job.job_packager import JobPackager
-from .platforms.paramiko_platform import ParamikoTimeout
-"""
-Main module for autosubmit. Only contains an interface class to all functionality implemented on autosubmit
-"""
-
-try:
- # noinspection PyCompatibility
- from configparser import SafeConfigParser
-except ImportError:
- # noinspection PyCompatibility
- from configparser import SafeConfigParser
-
-# It is Python dialog available? (optional dependency)
-try:
- import dialog
-except Exception:
- dialog = None
-
-
-# noinspection PyPackageRequirements
-# noinspection PyPackageRequirements
-# noinspection PyPackageRequirements
-
-# noinspection PyUnusedLocal
-
-
-def signal_handler(signal_received, frame):
- """
- Used to handle interrupt signals, allowing autosubmit to clean before exit
-
- :param signal_received:
- :param frame:
- """
- Log.info('Autosubmit will interrupt at the next safe occasion')
- Autosubmit.exit = True
-
-
-class Autosubmit:
- """
- Interface class for autosubmit.
- """
- # sys.setrecursionlimit(500000)
- # # Get the version number from the relevant file. If not, from autosubmit package
- # scriptdir = os.path.abspath(os.path.dirname(__file__))
-
- # if not os.path.exists(os.path.join(scriptdir, 'VERSION')):
- # scriptdir = os.path.join(scriptdir, os.path.pardir)
-
- # version_path = os.path.join(scriptdir, 'VERSION')
- # readme_path = os.path.join(scriptdir, 'README')
- # changes_path = os.path.join(scriptdir, 'CHANGELOG')
- # if os.path.isfile(version_path):
- # with open(version_path) as f:
- # autosubmit_version = f.read().strip()
- # else:
- # autosubmit_version = require("autosubmitAPIwu")[0].version
-
- exit = False
-
- @staticmethod
- def parse_args():
- """
- Parse arguments given to an executable and start execution of command given
- """
- try:
- BasicConfig.read()
-
- parser = argparse.ArgumentParser(
- description='Main executable for autosubmit. ')
- parser.add_argument('-v', '--version', action='version', version=Autosubmit.autosubmit_version,
- help="returns autosubmit's version number and exit")
- parser.add_argument('-lf', '--logfile', choices=('EVERYTHING', 'DEBUG', 'INFO', 'RESULT', 'USER_WARNING',
- 'WARNING', 'ERROR', 'CRITICAL', 'NO_LOG'),
- default='DEBUG', type=str,
- help="sets file's log level.")
- parser.add_argument('-lc', '--logconsole', choices=('EVERYTHING', 'DEBUG', 'INFO', 'RESULT', 'USER_WARNING',
- 'WARNING', 'ERROR', 'CRITICAL', 'NO_LOG'),
- default='INFO', type=str,
- help="sets console's log level")
-
- subparsers = parser.add_subparsers(dest='command')
-
- # Run
- subparser = subparsers.add_parser(
- 'run', description="runs specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
-
- # Expid
- subparser = subparsers.add_parser(
- 'expid', description="Creates a new experiment")
- group = subparser.add_mutually_exclusive_group()
- group.add_argument(
- '-y', '--copy', help='makes a copy of the specified experiment')
- group.add_argument('-dm', '--dummy', action='store_true',
- help='creates a new experiment with default values, usually for testing')
- group.add_argument('-op', '--operational', action='store_true',
- help='creates a new experiment with operational experiment id')
- subparser.add_argument('-H', '--HPC', required=True,
- help='specifies the HPC to use for the experiment')
- subparser.add_argument('-d', '--description', type=str, required=True,
- help='sets a description for the experiment to store in the database.')
-
- # Delete
- subparser = subparsers.add_parser(
- 'delete', description="delete specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument(
- '-f', '--force', action='store_true', help='deletes experiment without confirmation')
-
- # Monitor
- subparser = subparsers.add_parser(
- 'monitor', description="plots specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-o', '--output', choices=('pdf', 'png', 'ps', 'svg'), default='pdf',
- help='chooses type of output for generated plot')
- subparser.add_argument('-group_by', choices=('date', 'member', 'chunk', 'split', 'automatic'), default=None,
- help='Groups the jobs automatically or by date, member, chunk or split')
- subparser.add_argument('-expand', type=str,
- help='Supply the list of dates/members/chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- subparser.add_argument(
- '-expand_status', type=str, help='Select the statuses to be expanded')
- subparser.add_argument('--hide_groups', action='store_true',
- default=False, help='Hides the groups from the plot')
- subparser.add_argument('-cw', '--check_wrapper', action='store_true',
- default=False, help='Generate possible wrapper in the current workflow')
-
- group2 = subparser.add_mutually_exclusive_group(required=False)
-
- group.add_argument('-fs', '--filter_status', type=str,
- choices=('Any', 'READY', 'COMPLETED',
- 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN'),
- help='Select the original status to filter the list of jobs')
- group = subparser.add_mutually_exclusive_group(required=False)
- group.add_argument('-fl', '--list', type=str,
- help='Supply the list of job names to be filtered. Default = "Any". '
- 'LIST = "b037_20101101_fc3_21_sim b037_20111101_fc4_26_sim"')
- group.add_argument('-fc', '--filter_chunks', type=str,
- help='Supply the list of chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- group.add_argument('-fs', '--filter_status', type=str,
- choices=('Any', 'READY', 'COMPLETED',
- 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN'),
- help='Select the original status to filter the list of jobs')
- group.add_argument('-ft', '--filter_type', type=str,
- help='Select the job type to filter the list of jobs')
- subparser.add_argument('--hide', action='store_true', default=False,
- help='hides plot window')
- group2.add_argument('--txt', action='store_true', default=False,
- help='Generates only txt status file')
-
- group2.add_argument('-txtlog', '--txt_logfiles', action='store_true', default=False,
- help='Generates only txt status file(AS < 3.12b behaviour)')
-
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
-
- # Stats
- subparser = subparsers.add_parser(
- 'stats', description="plots statistics for specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-ft', '--filter_type', type=str, help='Select the job type to filter '
- 'the list of jobs')
- subparser.add_argument('-fp', '--filter_period', type=int, help='Select the period to filter jobs '
- 'from current time to the past '
- 'in number of hours back')
- subparser.add_argument('-o', '--output', choices=('pdf', 'png', 'ps', 'svg'), default='pdf',
- help='type of output for generated plot')
- subparser.add_argument('--hide', action='store_true', default=False,
- help='hides plot window')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
-
- # Clean
- subparser = subparsers.add_parser(
- 'clean', description="clean specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument(
- '-pr', '--project', action="store_true", help='clean project')
- subparser.add_argument('-p', '--plot', action="store_true",
- help='clean plot, only 2 last will remain')
- subparser.add_argument('-s', '--stats', action="store_true",
- help='clean stats, only last will remain')
-
- # Recovery
- subparser = subparsers.add_parser(
- 'recovery', description="recover specified experiment")
- subparser.add_argument(
- 'expid', type=str, help='experiment identifier')
- subparser.add_argument(
- '-np', '--noplot', action='store_true', default=False, help='omit plot')
- subparser.add_argument('--all', action="store_true", default=False,
- help='Get completed files to synchronize pkl')
- subparser.add_argument(
- '-s', '--save', action="store_true", default=False, help='Save changes to disk')
- subparser.add_argument('--hide', action='store_true', default=False,
- help='hides plot window')
- subparser.add_argument('-group_by', choices=('date', 'member', 'chunk', 'split', 'automatic'), default=None,
- help='Groups the jobs automatically or by date, member, chunk or split')
- subparser.add_argument('-expand', type=str,
- help='Supply the list of dates/members/chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- subparser.add_argument(
- '-expand_status', type=str, help='Select the statuses to be expanded')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
- subparser.add_argument('-nl', '--no_recover_logs', action='store_true', default=False,
- help='Disable logs recovery')
- # Migrate
- subparser = subparsers.add_parser(
- 'migrate', description="Migrate experiments from current user to another")
- subparser.add_argument('expid', help='experiment identifier')
- group = subparser.add_mutually_exclusive_group(required=True)
- group.add_argument('-o', '--offer', action="store_true",
- default=False, help='Offer experiment')
- group.add_argument('-p', '--pickup', action="store_true",
- default=False, help='Pick-up released experiment')
-
- # Inspect
- subparser = subparsers.add_parser(
- 'inspect', description="Generate all .cmd files")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
- subparser.add_argument(
- '-f', '--force', action="store_true", help='Overwrite all cmd')
- subparser.add_argument('-cw', '--check_wrapper', action='store_true',
- default=False, help='Generate possible wrapper in the current workflow')
-
- group.add_argument('-fs', '--filter_status', type=str,
- choices=('Any', 'READY', 'COMPLETED',
- 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN'),
- help='Select the original status to filter the list of jobs')
- group = subparser.add_mutually_exclusive_group(required=False)
- group.add_argument('-fl', '--list', type=str,
- help='Supply the list of job names to be filtered. Default = "Any". '
- 'LIST = "b037_20101101_fc3_21_sim b037_20111101_fc4_26_sim"')
- group.add_argument('-fc', '--filter_chunks', type=str,
- help='Supply the list of chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- group.add_argument('-fs', '--filter_status', type=str,
- choices=('Any', 'READY', 'COMPLETED',
- 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN'),
- help='Select the original status to filter the list of jobs')
- group.add_argument('-ft', '--filter_type', type=str,
- help='Select the job type to filter the list of jobs')
-
- # Check
- subparser = subparsers.add_parser(
- 'check', description="check configuration for specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
- # Describe
- subparser = subparsers.add_parser(
- 'describe', description="Show details for specified experiment")
- subparser.add_argument('expid', help='experiment identifier')
-
- # Create
- subparser = subparsers.add_parser(
- 'create', description="create specified experiment joblist")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument(
- '-np', '--noplot', action='store_true', default=False, help='omit plot')
- subparser.add_argument('--hide', action='store_true', default=False,
- help='hides plot window')
- subparser.add_argument('-o', '--output', choices=('pdf', 'png', 'ps', 'svg'), default='pdf',
- help='chooses type of output for generated plot')
- subparser.add_argument('-group_by', choices=('date', 'member', 'chunk', 'split', 'automatic'), default=None,
- help='Groups the jobs automatically or by date, member, chunk or split')
- subparser.add_argument('-expand', type=str,
- help='Supply the list of dates/members/chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- subparser.add_argument(
- '-expand_status', type=str, help='Select the statuses to be expanded')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
- subparser.add_argument('-cw', '--check_wrapper', action='store_true',
- default=False, help='Generate possible wrapper in the current workflow')
-
- # Configure
- subparser = subparsers.add_parser('configure', description="configure database and path for autosubmit. It "
- "can be done at machine, user or local level."
- "If no arguments specified configure will "
- "display dialog boxes (if installed)")
- subparser.add_argument(
- '--advanced', action="store_true", help="Open advanced configuration of autosubmit")
- subparser.add_argument('-db', '--databasepath', default=None, help='path to database. If not supplied, '
- 'it will prompt for it')
- subparser.add_argument(
- '-dbf', '--databasefilename', default=None, help='database filename')
- subparser.add_argument('-lr', '--localrootpath', default=None, help='path to store experiments. If not '
- 'supplied, it will prompt for it')
- subparser.add_argument('-pc', '--platformsconfpath', default=None, help='path to platforms.conf file to '
- 'use by default. Optional')
- subparser.add_argument('-jc', '--jobsconfpath', default=None, help='path to jobs.conf file to use by '
- 'default. Optional')
- subparser.add_argument(
- '-sm', '--smtphostname', default=None, help='STMP server hostname. Optional')
- subparser.add_argument(
- '-mf', '--mailfrom', default=None, help='Notifications sender address. Optional')
- group = subparser.add_mutually_exclusive_group()
- group.add_argument('--all', action="store_true",
- help='configure for all users')
- group.add_argument('--local', action="store_true", help='configure only for using Autosubmit from this '
- 'path')
-
- # Install
- subparsers.add_parser(
- 'install', description='install database for autosubmit on the configured folder')
-
- # Set status
- subparser = subparsers.add_parser(
- 'setstatus', description="sets job status for an experiment")
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument(
- '-np', '--noplot', action='store_true', default=False, help='omit plot')
- subparser.add_argument(
- '-s', '--save', action="store_true", default=False, help='Save changes to disk')
-
- subparser.add_argument('-t', '--status_final',
- choices=('READY', 'COMPLETED', 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN',
- 'QUEUING', 'RUNNING'),
- required=True,
- help='Supply the target status')
- group = subparser.add_mutually_exclusive_group(required=True)
- group.add_argument('-fl', '--list', type=str,
- help='Supply the list of job names to be changed. Default = "Any". '
- 'LIST = "b037_20101101_fc3_21_sim b037_20111101_fc4_26_sim"')
- group.add_argument('-fc', '--filter_chunks', type=str,
- help='Supply the list of chunks to change the status. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- group.add_argument('-fs', '--filter_status', type=str,
- help='Select the status (one or more) to filter the list of jobs.'
- "Valid values = ['Any', 'READY', 'COMPLETED', 'WAITING', 'SUSPENDED', 'FAILED', 'UNKNOWN']")
- group.add_argument('-ft', '--filter_type', type=str,
- help='Select the job type to filter the list of jobs')
-
- subparser.add_argument('--hide', action='store_true', default=False,
- help='hides plot window')
- subparser.add_argument('-group_by', choices=('date', 'member', 'chunk', 'split', 'automatic'), default=None,
- help='Groups the jobs automatically or by date, member, chunk or split')
- subparser.add_argument('-expand', type=str,
- help='Supply the list of dates/members/chunks to filter the list of jobs. Default = "Any". '
- 'LIST = "[ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ]"')
- subparser.add_argument(
- '-expand_status', type=str, help='Select the statuses to be expanded')
- subparser.add_argument('-nt', '--notransitive', action='store_true',
- default=False, help='Disable transitive reduction')
- subparser.add_argument('-cw', '--check_wrapper', action='store_true',
- default=False, help='Generate possible wrapper in the current workflow')
-
- # Test Case
- subparser = subparsers.add_parser(
- 'testcase', description='create test case experiment')
- subparser.add_argument(
- '-y', '--copy', help='makes a copy of the specified experiment')
- subparser.add_argument(
- '-d', '--description', required=True, help='description of the test case')
- subparser.add_argument('-c', '--chunks', help='chunks to run')
- subparser.add_argument('-m', '--member', help='member to run')
- subparser.add_argument('-s', '--stardate', help='stardate to run')
- subparser.add_argument(
- '-H', '--HPC', required=True, help='HPC to run experiment on it')
- subparser.add_argument(
- '-b', '--branch', help='branch of git to run (or revision from subversion)')
-
- # Test
- subparser = subparsers.add_parser(
- 'test', description='test experiment')
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument(
- '-c', '--chunks', required=True, help='chunks to run')
- subparser.add_argument('-m', '--member', help='member to run')
- subparser.add_argument('-s', '--stardate', help='stardate to run')
- subparser.add_argument(
- '-H', '--HPC', help='HPC to run experiment on it')
- subparser.add_argument(
- '-b', '--branch', help='branch of git to run (or revision from subversion)')
-
- # Refresh
- subparser = subparsers.add_parser(
- 'refresh', description='refresh project directory for an experiment')
- subparser.add_argument('expid', help='experiment identifier')
- subparser.add_argument('-mc', '--model_conf', default=False, action='store_true',
- help='overwrite model conf file')
- subparser.add_argument('-jc', '--jobs_conf', default=False, action='store_true',
- help='overwrite jobs conf file')
-
- # Archive
- subparser = subparsers.add_parser(
- 'archive', description='archives an experiment')
- subparser.add_argument('expid', help='experiment identifier')
-
- # Unarchive
- subparser = subparsers.add_parser(
- 'unarchive', description='unarchives an experiment')
- subparser.add_argument('expid', help='experiment identifier')
-
- # Readme
- subparsers.add_parser('readme', description='show readme')
-
- # Changelog
- subparsers.add_parser('changelog', description='show changelog')
-
- args = parser.parse_args()
-
- Log.set_console_level(args.logconsole)
- Log.set_file_level(args.logfile)
-
- if args.command == 'run':
- return Autosubmit.run_experiment(args.expid, args.notransitive)
- elif args.command == 'expid':
- return Autosubmit.expid(args.HPC, args.description, args.copy, args.dummy, False,
- args.operational) != ''
- elif args.command == 'delete':
- return Autosubmit.delete(args.expid, args.force)
- elif args.command == 'monitor':
- return Autosubmit.monitor(args.expid, args.output, args.list, args.filter_chunks, args.filter_status,
- args.filter_type, args.hide, args.txt, args.group_by, args.expand,
- args.expand_status, args.hide_groups, args.notransitive, args.check_wrapper, args.txt_logfiles)
- elif args.command == 'stats':
- return Autosubmit.statistics(args.expid, args.filter_type, args.filter_period, args.output, args.hide,
- args.notransitive)
- elif args.command == 'clean':
- return Autosubmit.clean(args.expid, args.project, args.plot, args.stats)
- elif args.command == 'recovery':
- return Autosubmit.recovery(args.expid, args.noplot, args.save, args.all, args.hide, args.group_by,
- args.expand, args.expand_status, args.notransitive, args.no_recover_logs)
- elif args.command == 'check':
- return Autosubmit.check(args.expid, args.notransitive)
- elif args.command == 'inspect':
- return Autosubmit.inspect(args.expid, args.list, args.filter_chunks, args.filter_status,
- args.filter_type, args.notransitive, args.force, args.check_wrapper)
- elif args.command == 'describe':
- return Autosubmit.describe(args.expid)
- elif args.command == 'migrate':
- return Autosubmit.migrate(args.expid, args.offer, args.pickup)
- elif args.command == 'create':
- return Autosubmit.create(args.expid, args.noplot, args.hide, args.output, args.group_by, args.expand,
- args.expand_status, args.notransitive, args.check_wrapper)
- elif args.command == 'configure':
- if not args.advanced or (args.advanced and dialog is None):
- return Autosubmit.configure(args.advanced, args.databasepath, args.databasefilename,
- args.localrootpath, args.platformsconfpath, args.jobsconfpath,
- args.smtphostname, args.mailfrom, args.all, args.local)
- else:
- return Autosubmit.configure_dialog()
- elif args.command == 'install':
- return Autosubmit.install()
- elif args.command == 'setstatus':
- return Autosubmit.set_status(args.expid, args.noplot, args.save, args.status_final, args.list,
- args.filter_chunks, args.filter_status, args.filter_type, args.hide,
- args.group_by, args.expand, args.expand_status, args.notransitive, args.check_wrapper)
- elif args.command == 'testcase':
- return Autosubmit.testcase(args.copy, args.description, args.chunks, args.member, args.stardate,
- args.HPC, args.branch)
- elif args.command == 'test':
- return Autosubmit.test(args.expid, args.chunks, args.member, args.stardate, args.HPC, args.branch)
- elif args.command == 'refresh':
- return Autosubmit.refresh(args.expid, args.model_conf, args.jobs_conf)
- elif args.command == 'archive':
- return Autosubmit.archive(args.expid)
- elif args.command == 'unarchive':
- return Autosubmit.unarchive(args.expid)
-
- elif args.command == 'readme':
- if os.path.isfile(Autosubmit.readme_path):
- with open(Autosubmit.readme_path) as f:
- print(f.read())
- return True
- return False
- elif args.command == 'changelog':
- if os.path.isfile(Autosubmit.changes_path):
- with open(Autosubmit.changes_path) as f:
- print(f.read())
- return True
- return False
- except Exception as e:
- from traceback import format_exc
- Log.critical(
- 'Unhandled exception on Autosubmit: {0}\n{1}', e, format_exc(10))
-
- return False
-
- @staticmethod
- def _delete_expid(expid_delete):
- """
- Removes an experiment from path and database
-
- :type expid_delete: str
- :param expid_delete: identifier of the experiment to delete
- """
- if expid_delete == '' or expid_delete is None and not os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
- expid_delete)):
- Log.info("Experiment directory does not exist.")
- else:
- Log.info("Removing experiment directory...")
- ret = False
- if pwd.getpwuid(os.stat(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid_delete)).st_uid).pw_name == os.getlogin():
- try:
-
- shutil.rmtree(os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid_delete))
- except OSError as e:
- Log.warning('Can not delete experiment folder: {0}', e)
- return ret
- Log.info("Deleting experiment from database...")
- ret = delete_experiment(expid_delete)
- if ret:
- Log.result("Experiment {0} deleted".format(expid_delete))
- else:
- Log.warning(
- "Current User is not the Owner {0} can not be deleted!", expid_delete)
- return ret
-
- @staticmethod
- def expid(hpc, description, copy_id='', dummy=False, test=False, operational=False):
- """
- Creates a new experiment for given HPC
-
- :param operational: if true, creates an operational experiment
- :type operational: bool
- :type hpc: str
- :type description: str
- :type copy_id: str
- :type dummy: bool
- :param hpc: name of the main HPC for the experiment
- :param description: short experiment's description.
- :param copy_id: experiment identifier of experiment to copy
- :param dummy: if true, writes a default dummy configuration for testing
- :param test: if true, creates an experiment for testing
- :return: experiment identifier. If method fails, returns ''.
- :rtype: str
- """
- BasicConfig.read()
-
- log_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, 'ASlogs', 'expid.log'.format(os.getuid()))
- try:
- Log.set_file(log_path)
- except IOError as e:
- Log.error("Can not create log file in path {0}: {1}".format(
- log_path, e.message))
- exp_id = None
- if description is None:
- Log.error("Missing experiment description.")
- return ''
- if hpc is None:
- Log.error("Missing HPC.")
- return ''
- if not copy_id:
- exp_id = new_experiment(
- description, Autosubmit.autosubmit_version, test, operational)
- if exp_id == '':
- return ''
- try:
- os.mkdir(os.path.join(BasicConfig.LOCAL_ROOT_DIR, exp_id))
-
- os.mkdir(os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id, 'conf'))
- Log.info("Copying config files...")
-
- # autosubmit config and experiment copied from AS.
- files = resource_listdir('autosubmit.config', 'files')
- for filename in files:
- if resource_exists('autosubmit.config', 'files/' + filename):
- index = filename.index('.')
- new_filename = filename[:index] + \
- "_" + exp_id + filename[index:]
-
- if filename == 'platforms.conf' and BasicConfig.DEFAULT_PLATFORMS_CONF != '':
- content = open(os.path.join(
- BasicConfig.DEFAULT_PLATFORMS_CONF, filename)).read()
- elif filename == 'jobs.conf' and BasicConfig.DEFAULT_JOBS_CONF != '':
- content = open(os.path.join(
- BasicConfig.DEFAULT_JOBS_CONF, filename)).read()
- else:
- content = resource_string(
- 'autosubmit.config', 'files/' + filename)
-
- conf_new_filename = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id, "conf", new_filename)
- Log.debug(conf_new_filename)
- open(conf_new_filename, 'w').write(content)
- Autosubmit._prepare_conf_files(
- exp_id, hpc, Autosubmit.autosubmit_version, dummy)
- except (OSError, IOError) as e:
- Log.error(
- "Can not create experiment: {0}\nCleaning...".format(e))
- Autosubmit._delete_expid(exp_id)
- return ''
- else:
- try:
- if os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, copy_id)):
- exp_id = copy_experiment(
- copy_id, description, Autosubmit.autosubmit_version, test, operational)
- if exp_id == '':
- return ''
- dir_exp_id = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, exp_id)
- os.mkdir(dir_exp_id)
- os.mkdir(dir_exp_id + '/conf')
- Log.info("Copying previous experiment config directories")
- conf_copy_id = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, copy_id, "conf")
- files = os.listdir(conf_copy_id)
- for filename in files:
- if os.path.isfile(os.path.join(conf_copy_id, filename)):
- new_filename = filename.replace(copy_id, exp_id)
- content = open(os.path.join(
- conf_copy_id, filename), 'r').read()
- open(os.path.join(dir_exp_id, "conf",
- new_filename), 'w').write(content)
- Autosubmit._prepare_conf_files(
- exp_id, hpc, Autosubmit.autosubmit_version, dummy)
- #####
- autosubmit_config = AutosubmitConfig(
- copy_id, BasicConfig, ConfigParserFactory())
- if autosubmit_config.check_conf_files():
- project_type = autosubmit_config.get_project_type()
- if project_type == "git":
- autosubmit_config.check_proj()
- autosubmit_git = AutosubmitGit(copy_id[0])
- Log.info("checking model version...")
- if not autosubmit_git.check_commit(autosubmit_config):
- return False
- #####
- else:
- Log.critical(
- "The previous experiment directory does not exist")
- return ''
- except (OSError, IOError) as e:
- Log.error(
- "Can not create experiment: {0}\nCleaning...".format(e))
- Autosubmit._delete_expid(exp_id)
- return ''
-
- Log.debug("Creating temporal directory...")
- exp_id_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, exp_id)
- tmp_path = os.path.join(exp_id_path, "tmp")
- os.mkdir(tmp_path)
- os.chmod(tmp_path, 0o775)
- os.mkdir(os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR))
- os.chmod(os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR), 0o775)
- Log.debug("Creating temporal remote directory...")
- remote_tmp_path = os.path.join(tmp_path, "LOG_" + exp_id)
- os.mkdir(remote_tmp_path)
- os.chmod(remote_tmp_path, 0o775)
-
- Log.debug("Creating pkl directory...")
- os.mkdir(os.path.join(exp_id_path, "pkl"))
-
- Log.debug("Creating plot directory...")
- os.mkdir(os.path.join(exp_id_path, "plot"))
- os.chmod(os.path.join(exp_id_path, "plot"), 0o775)
- Log.result("Experiment registered successfully")
- Log.user_warning("Remember to MODIFY the config files!")
- return exp_id
-
- @staticmethod
- def delete(expid, force):
- """
- Deletes and experiment from database and experiment's folder
-
- :type force: bool
- :type expid: str
- :param expid: identifier of the experiment to delete
- :param force: if True, does not ask for confirmation
-
- :returns: True if succesful, False if not
- :rtype: bool
- """
- log_path = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'delete.log'.format(os.getuid()))
- try:
- Log.set_file(log_path)
- except IOError as e:
- Log.error("Can not create log file in path {0}: {1}".format(
- log_path, e.message))
-
- if os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)):
- if force or Autosubmit._user_yes_no_query("Do you want to delete " + expid + " ?"):
- return Autosubmit._delete_expid(expid)
- else:
- Log.info("Quitting...")
- return False
- else:
- Log.error("The experiment does not exist")
- return True
-
- @staticmethod
- def _load_parameters(as_conf, job_list, platforms):
- # Load parameters
- Log.debug("Loading parameters...")
- parameters = as_conf.load_parameters()
- for platform_name in platforms:
- platform = platforms[platform_name]
- platform.add_parameters(parameters)
-
- platform = platforms[as_conf.get_platform().lower()]
- platform.add_parameters(parameters, True)
-
- job_list.parameters = parameters
-
- @staticmethod
- def inspect(expid, lst, filter_chunks, filter_status, filter_section, notransitive=False, force=False, check_wrapper=False):
- """
- Generates cmd files experiment.
-
- :type expid: str
- :param expid: identifier of experiment to be run
- :return: True if run to the end, False otherwise
- :rtype: bool
- """
-
- if expid is None:
- Log.critical("Missing experiment id")
-
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
- if os.path.exists(os.path.join(tmp_path, 'autosubmit.lock')):
- locked = True
- else:
- locked = False
-
- if not os.path.exists(exp_path):
- Log.critical(
- "The directory %s is needed and does not exist" % exp_path)
- Log.warning("Does an experiment with the given id exist?")
- return 1
- Log.info("Starting inspect command")
- Log.set_file(os.path.join(
- tmp_path, BasicConfig.LOCAL_ASLOG_DIR, 'generate.log'))
- os.system('clear')
- signal.signal(signal.SIGINT, signal_handler)
- as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory())
- if not as_conf.check_conf_files():
- Log.critical('Can not generate scripts with invalid configuration')
- return False
- project_type = as_conf.get_project_type()
- if project_type != "none":
- # Check proj configuration
- as_conf.check_proj()
- safetysleeptime = as_conf.get_safetysleeptime()
- Log.debug("The Experiment name is: {0}", expid)
- Log.debug("Sleep: {0}", safetysleeptime)
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
- "job_packages_" + expid)
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid,
- "pkl", "job_packages_" + expid + ".db"), 0o664)
-
- packages_persistence.reset_table(True)
- job_list_original = Autosubmit.load_job_list(
- expid, as_conf, notransitive=notransitive)
- job_list = copy.deepcopy(job_list_original)
- job_list.packages_dict = {}
-
- Log.debug("Length of the jobs list: {0}", len(job_list))
-
- # variables to be updated on the fly
- safetysleeptime = as_conf.get_safetysleeptime()
- Log.debug("Sleep: {0}", safetysleeptime)
- # Generate
- Log.info("Starting to generate cmd scripts")
-
- if not isinstance(job_list, type([])):
- jobs = []
- jobs_cw = []
- if check_wrapper and (not locked or (force and locked)):
- Log.info("Generating all cmd script adapted for wrappers")
- jobs = job_list.get_uncompleted()
-
- jobs_cw = job_list.get_completed()
- else:
- if (force and not locked) or (force and locked):
- Log.info("Overwritting all cmd scripts")
- jobs = job_list.get_job_list()
- elif locked:
- Log.warning(
- "There is a .lock file and not -f, generating only all unsubmitted cmd scripts")
- jobs = job_list.get_unsubmitted()
- else:
- Log.info("Generating cmd scripts only for selected jobs")
- if filter_chunks:
- fc = filter_chunks
- Log.debug(fc)
- if fc == 'Any':
- jobs = job_list.get_job_list()
- else:
- # noinspection PyTypeChecker
- data = json.loads(Autosubmit._create_json(fc))
- for date_json in data['sds']:
- date = date_json['sd']
- jobs_date = [j for j in job_list.get_job_list() if date2str(
- j.date) == date]
-
- for member_json in date_json['ms']:
- member = member_json['m']
- jobs_member = [j for j in jobs_date if j.member == member]
-
- for chunk_json in member_json['cs']:
- chunk = int(chunk_json)
- jobs = jobs + \
- [job for job in [j for j in jobs_member if j.chunk == chunk]]
-
- elif filter_status:
- Log.debug(
- "Filtering jobs with status {0}", filter_status)
- if filter_status == 'Any':
- jobs = job_list.get_job_list()
- else:
- fs = Autosubmit._get_status(filter_status)
- jobs = [job for job in [j for j in job_list.get_job_list() if j.status == fs]]
-
- elif filter_section:
- ft = filter_section
- Log.debug(ft)
-
- if ft == 'Any':
- jobs = job_list.get_job_list()
- else:
- for job in job_list.get_job_list():
- if job.section == ft:
- jobs.append(job)
- elif lst:
- jobs_lst = lst.split()
-
- if jobs == 'Any':
- jobs = job_list.get_job_list()
- else:
- for job in job_list.get_job_list():
- if job.name in jobs_lst:
- jobs.append(job)
- else:
- jobs = job_list.get_job_list()
- if isinstance(jobs, type([])):
- referenced_jobs_to_remove = set()
- for job in jobs:
- for child in job.children:
- if child not in jobs:
- referenced_jobs_to_remove.add(child)
- for parent in job.parents:
- if parent not in jobs:
- referenced_jobs_to_remove.add(parent)
-
- for job in jobs:
- job.status = Status.WAITING
-
- Autosubmit.generate_scripts_andor_wrappers(
- as_conf, job_list, jobs, packages_persistence, False)
- if len(jobs_cw) > 0:
- referenced_jobs_to_remove = set()
- for job in jobs_cw:
- for child in job.children:
- if child not in jobs_cw:
- referenced_jobs_to_remove.add(child)
- for parent in job.parents:
- if parent not in jobs_cw:
- referenced_jobs_to_remove.add(parent)
-
- for job in jobs_cw:
- job.status = Status.WAITING
- Autosubmit.generate_scripts_andor_wrappers(
- as_conf, job_list, jobs_cw, packages_persistence, False)
-
- Log.info("no more scripts to generate, now proceed to check them manually")
- time.sleep(safetysleeptime)
- return True
-
- @staticmethod
- def generate_scripts_andor_wrappers(as_conf, job_list, jobs_filtered, packages_persistence, only_wrappers=False):
- """
- as_conf: AutosubmitConfig object
- job_list: JobList object, contains a list of jobs
- jobs_filtered: list of jobs
- packages_persistence: Database handler
- only_wrappers: True
- """
- job_list._job_list = jobs_filtered
- job_list.update_list(as_conf, False)
- # Identifying the submitter and loading it
- submitter = Autosubmit._get_submitter(as_conf)
- # Function depending on the submitter
- submitter.load_platforms(as_conf)
- # Identifying HPC from config files
- hpcarch = as_conf.get_platform()
- #
- Autosubmit._load_parameters(as_conf, job_list, submitter.platforms)
- platforms_to_test = set()
- for job in job_list.get_job_list():
- if job.platform_name is None:
- job.platform_name = hpcarch
- # noinspection PyTypeChecker
- job.platform = submitter.platforms[job.platform_name.lower()]
- # noinspection PyTypeChecker
- platforms_to_test.add(job.platform)
- # case setstatus
- job_list.check_scripts(as_conf)
- job_list.update_list(as_conf, False)
- Autosubmit._load_parameters(as_conf, job_list, submitter.platforms)
- while job_list.get_active():
- Autosubmit.submit_ready_jobs(
- as_conf, job_list, platforms_to_test, packages_persistence, True, only_wrappers)
-
- job_list.update_list(as_conf, False)
-
- @staticmethod
- def run_experiment(expid, notransitive=False):
- """
- Runs and experiment (submitting all the jobs properly and repeating its execution in case of failure).
-
- :type expid: str
- :param expid: identifier of experiment to be run
- :return: True if run to the end, False otherwise
- :rtype: bool
- """
- if expid is None:
- Log.critical("Missing experiment id")
-
- BasicConfig.read()
- exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid)
- tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR)
- aslogs_path = os.path.join(tmp_path, BasicConfig.LOCAL_ASLOG_DIR)
- if not os.path.exists(aslogs_path):
- os.mkdir(aslogs_path)
- os.chmod(aslogs_path, 0o775)
- if not os.path.exists(exp_path):
- Log.critical(
- "The directory %s is needed and does not exist" % exp_path)
- Log.warning("Does an experiment with the given id exist?")
- return 1
-
- # checking host whitelist
- import platform
- host = platform.node()
- print(host)
- if BasicConfig.ALLOWED_HOSTS and host not in BasicConfig.ALLOWED_HOSTS:
- Log.info("\n Autosubmit run command is not allowed on this host")
- return False
-
- # checking if there is a lock file to avoid multiple running on the same expid
- try:
- with portalocker.Lock(os.path.join(tmp_path, 'autosubmit.lock'), timeout=1):
- Log.info(
- "Preparing .lock file to avoid multiple instances with same experiment id")
-
- Log.set_file(os.path.join(aslogs_path, 'run.log'))
- os.system('clear')
-
- signal.signal(signal.SIGINT, signal_handler)
-
- as_conf = AutosubmitConfig(
- expid, BasicConfig, ConfigParserFactory())
- if not as_conf.check_conf_files():
- Log.critical('Can not run with invalid configuration')
- return False
-
- project_type = as_conf.get_project_type()
- if project_type != "none":
- # Check proj configuration
- as_conf.check_proj()
-
- hpcarch = as_conf.get_platform()
-
- safetysleeptime = as_conf.get_safetysleeptime()
- retrials = as_conf.get_retrials()
-
- submitter = Autosubmit._get_submitter(as_conf)
- submitter.load_platforms(as_conf)
-
- Log.debug("The Experiment name is: {0}", expid)
- Log.debug("Sleep: {0}", safetysleeptime)
- Log.debug("Default retrials: {0}", retrials)
-
- Log.info("Starting job submission...")
-
- pkl_dir = os.path.join(
- BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl')
- job_list = Autosubmit.load_job_list(
- expid, as_conf, notransitive=notransitive)
-
- Log.debug(
- "Starting from job list restored from {0} files", pkl_dir)
-
- Log.debug("Length of the jobs list: {0}", len(job_list))
-
- Autosubmit._load_parameters(
- as_conf, job_list, submitter.platforms)
-
- # check the job list script creation
- Log.debug("Checking experiment templates...")
-
- platforms_to_test = set()
- for job in job_list.get_job_list():
- if job.platform_name is None:
- job.platform_name = hpcarch
- # noinspection PyTypeChecker
- job.platform = submitter.platforms[job.platform_name.lower(
- )]
- # noinspection PyTypeChecker
- platforms_to_test.add(job.platform)
-
- job_list.check_scripts(as_conf)
-
- packages_persistence = JobPackagePersistence(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"),
- "job_packages_" + expid)
-
- if as_conf.get_wrapper_type() != 'none':
- os.chmod(os.path.join(BasicConfig.LOCAL_ROOT_DIR,
- expid, "pkl", "job_packages_" + expid + ".db"), 0o664)
- packages = packages_persistence.load()
- for (exp_id, package_name, job_name) in packages:
- if package_name not in job_list.packages_dict:
- job_list.packages_dict[package_name] = []
- job_list.packages_dict[package_name].append(
- job_list.get_job_by_name(job_name))
-
- for package_name, jobs in list(job_list.packages_dict.items()):
- from job.job import WrapperJob
- wrapper_job = WrapperJob(package_name, jobs[0].id, Status.SUBMITTED, 0, jobs,
- None,
- None, jobs[0].platform, as_conf)
- job_list.job_package_map[jobs[0].id] = wrapper_job
- job_list.update_list(as_conf)
- job_list.save()
- #########################
- # AUTOSUBMIT - MAIN LOOP
- #########################
- # Main loop. Finishing when all jobs have been submitted
- while job_list.get_active():
- if Autosubmit.exit:
- return 2
- # reload parameters changes
- Log.debug("Reloading parameters...")
- as_conf.reload()
- Autosubmit._load_parameters(
- as_conf, job_list, submitter.platforms)
- # variables to be updated on the fly
- total_jobs = len(job_list.get_job_list())
- Log.info(
- "\n\n{0} of {1} jobs remaining ({2})".format(total_jobs - len(job_list.get_completed()),
- total_jobs,
- time.strftime("%H:%M")))
- safetysleeptime = as_conf.get_safetysleeptime()
- Log.debug("Sleep: {0}", safetysleeptime)
- default_retrials = as_conf.get_retrials()
- Log.debug("Number of retrials: {0}", default_retrials)
-
- check_wrapper_jobs_sleeptime = as_conf.get_wrapper_check_time()
- Log.debug('WRAPPER CHECK TIME = {0}'.format(
- check_wrapper_jobs_sleeptime))
-
- save = False
-
- slurm = []
- for platform in platforms_to_test:
- list_jobid = ""
- completed_joblist = []
- list_prevStatus = []
- queuing_jobs = job_list.get_in_queue_grouped_id(
- platform)
- for job_id, job in list(queuing_jobs.items()):
- if job_list.job_package_map and job_id in job_list.job_package_map:
- Log.debug(
- 'Checking wrapper job with id ' + str(job_id))
- wrapper_job = job_list.job_package_map[job_id]
- check_wrapper = True
- if wrapper_job.status == Status.RUNNING:
- check_wrapper = True if datetime.timedelta.total_seconds(datetime.datetime.now(
- ) - wrapper_job.checked_time) >= check_wrapper_jobs_sleeptime else False
- if check_wrapper:
- wrapper_job.checked_time = datetime.datetime.now()
- platform.check_job(wrapper_job)
- Log.info(
- 'Wrapper job ' + wrapper_job.name + ' is ' + str(Status.VALUE_TO_KEY[wrapper_job.new_status]))
- wrapper_job.check_status(
- wrapper_job.new_status)
- save = True
- else:
- Log.info(
- "Waiting for wrapper check time: {0}\n", check_wrapper_jobs_sleeptime)
- else:
- job = job[0]
- prev_status = job.status
- if job.status == Status.FAILED:
- continue
-
- if platform.type == "slurm":
- list_jobid += str(job_id) + ','
- list_prevStatus.append(prev_status)
- completed_joblist.append(job)
- else:
- platform.check_job(job)
- if prev_status != job.update_status(as_conf.get_copy_remote_logs() == 'true'):
- if as_conf.get_notifications() == 'true':
- if Status.VALUE_TO_KEY[job.status] in job.notify_on:
- Notifier.notify_status_change(MailNotifier(BasicConfig), expid, job.name,
- Status.VALUE_TO_KEY[prev_status],
- Status.VALUE_TO_KEY[job.status],
- as_conf.get_mails_to())
- save = True
-
- if platform.type == "slurm" and list_jobid != "":
- slurm.append(
- [platform, list_jobid, list_prevStatus, completed_joblist])
- # END LOOP
- for platform_jobs in slurm:
- platform = platform_jobs[0]
- jobs_to_check = platform_jobs[1]
- platform.check_Alljobs(
- platform_jobs[3], jobs_to_check, as_conf.get_copy_remote_logs())
-
- for j_Indx in range(0, len(platform_jobs[3])):
- prev_status = platform_jobs[2][j_Indx]
- job = platform_jobs[3][j_Indx]
-
- if prev_status != job.update_status(as_conf.get_copy_remote_logs() == 'true'):
- if as_conf.get_notifications() == 'true':
- if Status.VALUE_TO_KEY[job.status] in job.notify_on:
- Notifier.notify_status_change(MailNotifier(BasicConfig), expid, job.name,
- Status.VALUE_TO_KEY[prev_status],
- Status.VALUE_TO_KEY[job.status],
- as_conf.get_mails_to())
- save = True
-
- if job_list.update_list(as_conf) or save:
- job_list.save()
-
- if Autosubmit.submit_ready_jobs(as_conf, job_list, platforms_to_test, packages_persistence):
- job_list.save()
-
- if Autosubmit.exit:
- return 2
- time.sleep(safetysleeptime)
-
- Log.info("No more jobs to run.")
- if len(job_list.get_failed()) > 0:
- Log.info("Some jobs have failed and reached maximum retrials")
- return False
- else:
- Log.result("Run successful")
- return True
-
- except portalocker.AlreadyLocked:
- Autosubmit.show_lock_warning(expid)
-
- except WrongTemplateException:
- return False
-
- @staticmethod
- def submit_ready_jobs(as_conf, job_list, platforms_to_test, packages_persistence, inspect=False,
- only_wrappers=False):
- """
- Gets READY jobs and send them to the platforms if there is available space on the queues
-
- :param as_conf: autosubmit config object. \n
- :type as_conf: AutosubmitConfig Object. \n
- :param job_list: JobList as a single entity. \n
- :type job_list: JobList() Object. \n
- :param platforms_to_test: List of platforms that will be used in the experiment. \n
- :type platforms_to_test: Set() of Platform() Object. e.g. EcPlatform(), LsfPlatform(), etc. \n
- :return: True if at least one job was submitted, False otherwise
- :rtype: bool
- """
- save = False
-
- for platform in platforms_to_test:
- Log.debug("\nJobs ready for {1}: {0}", len(
- job_list.get_ready(platform)), platform.name)
- packages_to_submit, remote_dependencies_dict = JobPackager(
- as_conf, platform, job_list).build_packages()
- if not inspect:
- platform.open_submit_script()
- valid_packages_to_submit = []
- for package in packages_to_submit:
- try:
- if hasattr(package, "name"):
- if remote_dependencies_dict and package.name in remote_dependencies_dict['dependencies']:
- remote_dependency = remote_dependencies_dict['dependencies'][package.name]
- remote_dependency_id = remote_dependencies_dict['name_to_id'][remote_dependency]
- package.set_job_dependency(remote_dependency_id)
- if not only_wrappers:
- try:
- package.submit(
- as_conf, job_list.parameters, inspect)
- valid_packages_to_submit.append(package)
- except (IOError, OSError):
- # write error file
- continue
- if only_wrappers or inspect:
- for innerJob in package._jobs:
- innerJob.status = Status.COMPLETED
-
- if hasattr(package, "name"):
- job_list.packages_dict[package.name] = package.jobs
- from job.job import WrapperJob
- wrapper_job = WrapperJob(package.name, package.jobs[0].id, Status.READY, 0,
- package.jobs,
- package._wallclock, package._num_processors,
- package.platform, as_conf)
- job_list.job_package_map[package.jobs[0].id] = wrapper_job
- if remote_dependencies_dict and package.name in remote_dependencies_dict['name_to_id']:
- remote_dependencies_dict['name_to_id'][package.name] = package.jobs[0].id
- if isinstance(package, JobPackageThread):
- packages_persistence.save(
- package.name, package.jobs, package._expid, inspect)
- save = True
- except WrongTemplateException as e:
- Log.error(
- "Invalid parameter substitution in {0} template", e.job_name)
- raise
- except Exception:
- Log.error(
- "{0} submission failed due to Unknown error", platform.name)
- raise
-
- if platform.type == "slurm" and not inspect and not only_wrappers:
- try:
- save = True
- if len(valid_packages_to_submit) > 0:
- jobs_id = platform.submit_Script()
- if jobs_id is None:
- raise BaseException(
- "Exiting AS being unable to get jobID")
- i = 0
- for package in valid_packages_to_submit:
- for job in package.jobs:
- job.id = str(jobs_id[i])
- Log.info("{0} submitted", job.name)
- job.status = Status.SUBMITTED
- job.write_submit_time()
- if hasattr(package, "name"):
- job_list.packages_dict[package.name] = package.jobs
- from job.job import WrapperJob
- wrapper_job = WrapperJob(package.name, package.jobs[0].id, Status.SUBMITTED, 0,
- package.jobs,
- package._wallclock, package._num_processors,
- package.platform, as_conf)
- job_list.job_package_map[package.jobs[0].id] = wrapper_job
- if remote_dependencies_dict and package.name in remote_dependencies_dict[
- 'name_to_id']:
- remote_dependencies_dict['name_to_id'][package.name] = package.jobs[0].id
- if isinstance(package, JobPackageThread):
- packages_persistence.save(
- package.name, package.jobs, package._expid, inspect)
- i += 1
-
- except WrongTemplateException as e:
- Log.error(
- "Invalid parameter substitution in {0} template", e.job_name)
- raise
- except Exception:
- Log.error("{0} submission failed", platform.name)
- raise
-
- return save
-
- @staticmethod
- def monitor(expid, file_format, lst, filter_chunks, filter_status, filter_section, hide, txt_only=False,
- group_by=None, expand=list(), expand_status=list(), hide_groups=False, notransitive=False, check_wrapper=False, txt_logfiles=False):
- """
- Plots workflow graph for a given experiment with status of each job coded by node color.
- Plot is created in experiment's plot folder with name __