diff --git a/CHANGELOG.md b/CHANGELOG.md
index 76949c37c0da056cb41e244c757d2ff8b4115ff8..16437757e4e747b46d9ac69e2c123ad9085de51f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# CHANGELOG
+### Pre-release v4.0.0b6 - Release date: TBD
+
+* Added filter by autosubmit version in `/v4/experiments`
+* Fixed bug that affected wrappers information retrieval
+* Fixed some connection file handlers
+* Added `/v4/experiments/{expid}/wrappers` endpoint
+
### Pre-release v4.0.0b5 - Release date: 2024-03-18
* Fixed the graph background task to correctly select the active experiments
diff --git a/autosubmit_api/__init__.py b/autosubmit_api/__init__.py
index 7e3f8c4b24e448ffd344ba6516c0ba88ba736f00..e7eae9a30091fe0cf2d717b94857d259a82c4e09 100644
--- a/autosubmit_api/__init__.py
+++ b/autosubmit_api/__init__.py
@@ -17,6 +17,6 @@
# You should have received a copy of the GNU General Public License
# along with Autosubmit. If not, see .
-__version__ = "4.0.0b5"
+__version__ = "4.0.0b6"
__author__ = "Luiggi Tenorio, Bruno P. Kinoshita, Cristian GutiƩrrez, Julian Berlin, Wilmer Uruchi"
__credits__ = "Barcelona Supercomputing Center"
\ No newline at end of file
diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py
index c9ad0cba243cd707061de96e6ea092d122f1f8a6..3db57957da0079baa57ee0b0d47937bce2d837bb 100644
--- a/autosubmit_api/autosubmit_legacy/job/job_list.py
+++ b/autosubmit_api/autosubmit_legacy/job/job_list.py
@@ -28,7 +28,6 @@ import math
from time import time, mktime
from dateutil.relativedelta import *
-from bscearth.utils.log import Log
from autosubmit_api.autosubmit_legacy.job.job_utils import SubJob
from autosubmit_api.autosubmit_legacy.job.job_utils import SubJobManager, job_times_to_text
from autosubmit_api.config.basicConfig import APIBasicConfig
@@ -37,8 +36,6 @@ from autosubmit_api.components.jobs import utils as JUtils
from autosubmit_api.monitor.monitor import Monitor
from autosubmit_api.common.utils import Status
from bscearth.utils.date import date2str, parse_date
-from autosubmit_api.experiment import common_db_requests as DbRequests
-from autosubmit_api.autosubmit_legacy.job.job_package_persistence import JobPackagePersistence
# from autosubmit_legacy.job.tree import Tree
from autosubmit_api.database import db_structure as DbStructure
from autosubmit_api.database.db_jobdata import JobDataStructure, JobRow
@@ -590,11 +587,6 @@ class JobList:
path_local_root = basic_config.LOCAL_ROOT_DIR
path_structure = basic_config.STRUCTURES_DIR
db_file = os.path.join(path_local_root, basic_config.DB_FILE)
- conn = DbRequests.create_connection(db_file)
- # job_data = None
- # Job information from worker database
- # job_times = dict() # REMOVED: DbRequests.get_times_detail_by_expid(conn, expid)
- conn.close()
# Job information from job historic data
# print("Get current job data structure...")
experiment_history = ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)).build_reader_experiment_history()
@@ -846,72 +838,3 @@ class JobList:
int(finish_time),
None,
None)
-
- @staticmethod
- def retrieve_packages(basic_config, expid, current_jobs=None):
- """
- Retrieves dictionaries that map the collection of packages in the experiment
-
- :param basic_config: Basic configuration
- :type basic_config: Configuration Object
- :param expid: Experiment Id
- :type expid: String
- :param current_jobs: list of names of current jobs
- :type current_jobs: list
- :return: job to package, package to jobs, package to package_id, package to symbol
- :rtype: Dictionary(Job Object, Package_name), Dictionary(Package_name, List of Job Objects), Dictionary(String, String), Dictionary(String, String)
- """
- monitor = Monitor()
- packages = None
- exp_paths = ExperimentPaths(expid)
- try:
- packages = JobPackagePersistence(exp_paths.job_packages_db).load(wrapper=False)
-
- # if the main table exist but is empty, we try the other one
- if not (any(packages.keys()) or any(packages.values())):
- Log.info("Wrapper table empty, trying packages.")
- packages = JobPackagePersistence(exp_paths.job_packages_db).load(wrapper=True)
-
-
- except Exception as ex:
- print("Wrapper table not found, trying packages.")
- packages = None
- try:
- packages = JobPackagePersistence(exp_paths.job_packages_db).load(wrapper=True)
- except Exception as exp2:
- packages = None
- pass
- pass
-
- job_to_package = dict()
- package_to_jobs = dict()
- package_to_package_id = dict()
- package_to_symbol = dict()
- if (packages):
- try:
- for exp, package_name, job_name in packages:
- if len(str(package_name).strip()) > 0:
- if (current_jobs):
- if job_name in current_jobs:
- job_to_package[job_name] = package_name
- else:
- job_to_package[job_name] = package_name
- # list_packages.add(package_name)
- for name in job_to_package:
- package_name = job_to_package[name]
- package_to_jobs.setdefault(package_name, []).append(name)
- # if package_name not in package_to_jobs.keys():
- # package_to_jobs[package_name] = list()
- # package_to_jobs[package_name].append(name)
- for key in package_to_jobs:
- package_to_package_id[key] = key.split("_")[2]
- list_packages = list(job_to_package.values())
- for i in range(len(list_packages)):
- if i % 2 == 0:
- package_to_symbol[list_packages[i]] = 'square'
- else:
- package_to_symbol[list_packages[i]] = 'hexagon'
- except Exception as ex:
- print((traceback.format_exc()))
-
- return (job_to_package, package_to_jobs, package_to_package_id, package_to_symbol)
\ No newline at end of file
diff --git a/autosubmit_api/autosubmit_legacy/job/job_package_persistence.py b/autosubmit_api/autosubmit_legacy/job/job_package_persistence.py
deleted file mode 100644
index 5a39c58d3ca7a7881d280d6c154fd0f658e4b06b..0000000000000000000000000000000000000000
--- a/autosubmit_api/autosubmit_legacy/job/job_package_persistence.py
+++ /dev/null
@@ -1,54 +0,0 @@
-
-
-# Copyright 2017 Earth Sciences Department, BSC-CNS
-
-# This file is part of Autosubmit.
-
-# Autosubmit is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-
-# Autosubmit is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with Autosubmit. If not, see .
-
-
-from autosubmit_api.database.db_manager import DbManager
-
-
-class JobPackagePersistence(object):
-
- VERSION = 1
- JOB_PACKAGES_TABLE = 'job_package'
- WRAPPER_JOB_PACKAGES_TABLE = 'wrapper_job_package'
- TABLE_FIELDS = ['exp_id', 'package_name', 'job_name']
-
- def __init__(self, persistence_path, persistence_file):
- self.db_manager = DbManager(persistence_path, persistence_file, self.VERSION)
- # self.db_manager.create_table(self.JOB_PACKAGES_TABLE, self.TABLE_FIELDS)
- # self.db_manager.create_table(self.WRAPPER_JOB_PACKAGES_TABLE, self.TABLE_FIELDS)
- def load(self,wrapper=False):
- """
- Loads package of jobs from a database
- :param persistence_file: str
- :param persistence_path: str
-
- """
- if not wrapper:
- return self.db_manager.select_all(self.JOB_PACKAGES_TABLE)
- else:
- return self.db_manager.select_all(self.WRAPPER_JOB_PACKAGES_TABLE)
- def reset(self):
- """
- Loads package of jobs from a database
- :param persistence_file: str
- :param persistence_path: str
-
- """
- self.db_manager.drop_table(self.WRAPPER_JOB_PACKAGES_TABLE)
- self.db_manager.create_table(self.WRAPPER_JOB_PACKAGES_TABLE, self.TABLE_FIELDS)
diff --git a/autosubmit_api/blueprints/v4.py b/autosubmit_api/blueprints/v4.py
index fb193334b441463c35a380e451a7c98ca68e08c5..46cfb358c2f20204c9375ca08cf7bdf197e22ced 100644
--- a/autosubmit_api/blueprints/v4.py
+++ b/autosubmit_api/blueprints/v4.py
@@ -44,6 +44,11 @@ def create_v4_blueprint():
view_func=v4_views.ExperimentJobsView.as_view("ExperimentJobsView"),
)
+ blueprint.add_url_rule(
+ "/experiments//wrappers",
+ view_func=v4_views.ExperimentWrappersView.as_view("ExperimentWrappersView"),
+ )
+
# blueprint.route("/experiments//runs")(v3_views.get_runs)
# blueprint.route("/experiments//check-running")(
# v3_views.get_if_running
diff --git a/autosubmit_api/components/jobs/joblist_helper.py b/autosubmit_api/components/jobs/joblist_helper.py
index 997b272e7c669c7728024c1aecd49998ea02eda9..14a87f726772eaf0b284187e898c0a22aef4376e 100644
--- a/autosubmit_api/components/jobs/joblist_helper.py
+++ b/autosubmit_api/components/jobs/joblist_helper.py
@@ -7,6 +7,7 @@ from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer
from autosubmit_api.config.basicConfig import APIBasicConfig
from typing import List, Dict
from autosubmit_api.components.jobs.job_factory import Job
+from autosubmit_api.persistance.job_package_reader import JobPackageReader
class JobListHelper(object):
""" Loads time (queuing runnning) and packages. Applies the fix for queue time of jobs in wrappers. """
@@ -28,8 +29,16 @@ class JobListHelper(object):
self._initialize_main_values()
def _initialize_main_values(self):
- # type: () -> None
- self.job_to_package, self.package_to_jobs, self.package_to_package_id, self.package_to_symbol = JobList.retrieve_packages(self.basic_config, self.expid)
+ job_package_reader = JobPackageReader(self.expid)
+ try:
+ job_package_reader.read()
+ self.job_to_package = job_package_reader.job_to_package
+ self.package_to_jobs = job_package_reader.package_to_jobs
+ self.package_to_package_id = job_package_reader.package_to_package_id
+ self.package_to_symbol = job_package_reader.package_to_symbol
+ except:
+ self.warning_messages.append("Failed to read job_packages")
+
self.job_name_to_job_row, self.job_running_time_to_text, self.warning_messages = JobList.get_job_times_collection(
self.basic_config, self.simple_jobs, self.expid, self.job_to_package, self.package_to_jobs, timeseconds=True)
diff --git a/autosubmit_api/components/representations/graph/graph.py b/autosubmit_api/components/representations/graph/graph.py
index c107b339c3bb3e0a96ccb7fd89f2bdc68ccfd622..5e4c96cbc104acb8476c7a4f9cbeea256dd448ce 100644
--- a/autosubmit_api/components/representations/graph/graph.py
+++ b/autosubmit_api/components/representations/graph/graph.py
@@ -250,6 +250,7 @@ class GraphRepresentation(object):
"platform_name": job.platform,
"chunk": job.chunk,
"package": job.package,
+ "wrapper": job.package,
"member": job.member,
"date": ini_date,
"date_plus": end_date,
diff --git a/autosubmit_api/database/db_common.py b/autosubmit_api/database/db_common.py
index 961bba69346eac7098dea4aca257d0f19c7b8eb3..40443b2afee46b7e2bf9ce4d59ab20b45b3b719b 100644
--- a/autosubmit_api/database/db_common.py
+++ b/autosubmit_api/database/db_common.py
@@ -382,6 +382,7 @@ def get_experiment_by_id(expid):
cursor.execute(query)
headers = get_headers_sqlite(cursor)
row = cursor.fetchone()
+ close_conn(conn, cursor)
if row is not None:
obj = map_row_result_to_dict_sqlite(row, headers)
result['id'] = obj["id"]
diff --git a/autosubmit_api/database/db_structure.py b/autosubmit_api/database/db_structure.py
index 0866488dc470ff71fad8d81dc93cff953d530a1e..06ad129852a3e498e5904472343cd3f6f5752866 100644
--- a/autosubmit_api/database/db_structure.py
+++ b/autosubmit_api/database/db_structure.py
@@ -24,7 +24,6 @@ def get_structure(expid, structures_path):
os.open(db_structure_path, os.O_WRONLY | os.O_CREAT, 0o777)
# open(db_structure_path, "w")
# print(db_structure_path)
- conn = create_connection(db_structure_path)
create_table_query = textwrap.dedent(
'''CREATE TABLE
IF NOT EXISTS experiment_structure (
@@ -32,7 +31,8 @@ def get_structure(expid, structures_path):
e_to text NOT NULL,
UNIQUE(e_from,e_to)
);''')
- create_table(conn, create_table_query)
+ with create_connection(db_structure_path) as conn:
+ create_table(conn, create_table_query)
current_table = _get_exp_structure(db_structure_path)
# print("Current table: ")
# print(current_table)
@@ -92,12 +92,12 @@ def _get_exp_structure(path):
:rtype: 4-tuple (int, str, str, int)
"""
try:
- conn = create_connection(path)
- conn.text_factory = str
- cur = conn.cursor()
- cur.execute(
- "SELECT e_from, e_to FROM experiment_structure")
- rows = cur.fetchall()
+ with create_connection(path) as conn:
+ conn.text_factory = str
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT e_from, e_to FROM experiment_structure")
+ rows = cur.fetchall()
return rows
except Exception as exp:
print((traceback.format_exc()))
diff --git a/autosubmit_api/database/queries.py b/autosubmit_api/database/queries.py
index ad3e3278658386af3b540adb1a7158981e9bc1a6..88c8acd8a58eec5594cab8a7984911e9f47b5da9 100644
--- a/autosubmit_api/database/queries.py
+++ b/autosubmit_api/database/queries.py
@@ -9,6 +9,7 @@ def generate_query_listexp_extended(
only_active: bool = False,
owner: str = None,
exp_type: str = None,
+ autosubmit_version: str = None,
order_by: str = None,
order_desc: bool = False,
):
@@ -61,7 +62,11 @@ def generate_query_listexp_extended(
filter_stmts.append(tables.experiment_table.c.name.not_like(f"t%"))
filter_stmts.append(tables.experiment_table.c.name.not_like(f"o%"))
- # logger.debug(str(filter_stmts))
+ if autosubmit_version:
+ filter_stmts.append(
+ tables.experiment_table.c.autosubmit_version == autosubmit_version
+ )
+
statement = statement.where(*filter_stmts)
# Order by
diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py
index 7a42573b5b774d59c7cd6ff25c84f449042f526d..a1fd39dccff3eea56bb0ffd0134829239ac5a450 100644
--- a/autosubmit_api/database/tables.py
+++ b/autosubmit_api/database/tables.py
@@ -1,51 +1,110 @@
-from sqlalchemy import Table, Column, MetaData, Integer, String, Text
+from sqlalchemy import MetaData, Integer, String, Text, Table
+from sqlalchemy.orm import DeclarativeBase, mapped_column, Mapped
+
metadata_obj = MetaData()
-# MAIN_DB TABLES
+## SQLAlchemy ORM tables
+class BaseTable(DeclarativeBase):
+ metadata = metadata_obj
-experiment_table = Table(
- "experiment",
- metadata_obj,
- Column("id", Integer, nullable=False, primary_key=True),
- Column("name", String, nullable=False),
- Column("description", String, nullable=False),
- Column("autosubmit_version", String),
-)
-
-details_table = Table(
- "details",
- metadata_obj,
- Column("exp_id", Integer, primary_key=True),
- Column("user", Text, nullable=False),
- Column("created", Text, nullable=False),
- Column("model", Text, nullable=False),
- Column("branch", Text, nullable=False),
- Column("hpc", Text, nullable=False),
-)
+class ExperimentTable(BaseTable):
+ """
+ Is the main table, populated by Autosubmit. Should be read-only by the API.
+ """
-# AS_TIMES TABLES
+ __tablename__ = "experiment"
+
+ id: Mapped[int] = mapped_column(Integer, nullable=False, primary_key=True)
+ name: Mapped[str] = mapped_column(String, nullable=False)
+ description: Mapped[str] = mapped_column(String, nullable=False)
+ autosubmit_version: Mapped[str] = mapped_column(String)
+
+
+class DetailsTable(BaseTable):
+ """
+ Stores extra information. It is populated by the API.
+ """
+
+ __tablename__ = "details"
+
+ exp_id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ user: Mapped[str] = mapped_column(Text, nullable=False)
+ created: Mapped[str] = mapped_column(Text, nullable=False)
+ model: Mapped[str] = mapped_column(Text, nullable=False)
+ branch: Mapped[str] = mapped_column(Text, nullable=False)
+ hpc: Mapped[str] = mapped_column(Text, nullable=False)
+
+
+class ExperimentStatusTable(BaseTable):
+ """
+ Stores the status of the experiments
+ """
+
+ __tablename__ = "experiment_status"
+
+ exp_id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ name: Mapped[str] = mapped_column(Text, nullable=False)
+ status: Mapped[str] = mapped_column(Text, nullable=False)
+ seconds_diff: Mapped[int] = mapped_column(Integer, nullable=False)
+ modified: Mapped[str] = mapped_column(Text, nullable=False)
+
+
+class GraphDataTable(BaseTable):
+ """
+ Stores the coordinates and it is used exclusively to speed up the process
+ of generating the graph layout
+ """
-experiment_status_table = Table(
- "experiment_status",
- metadata_obj,
- Column("exp_id", Integer, primary_key=True),
- Column("name", Text, nullable=False),
- Column("status", Text, nullable=False),
- Column("seconds_diff", Integer, nullable=False),
- Column("modified", Text, nullable=False),
-)
+ __tablename__ = "experiment_graph_draw"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ job_name: Mapped[str] = mapped_column(Text, nullable=False)
+ x: Mapped[int] = mapped_column(Integer, nullable=False)
+ y: Mapped[int] = mapped_column(Integer, nullable=False)
+
+
+class JobPackageTable(BaseTable):
+ """
+ Stores a mapping between the wrapper name and the actual job in slurm
+ """
+
+ __tablename__ = "job_package"
+
+ exp_id: Mapped[str] = mapped_column(Text)
+ package_name: Mapped[str] = mapped_column(Text, primary_key=True)
+ job_name: Mapped[str] = mapped_column(Text, primary_key=True)
+
+
+class WrapperJobPackageTable(BaseTable):
+ """
+ It is a replication. It is only created/used when using inspectand create or monitor
+ with flag -cw in Autosubmit.\n
+ This replication is used to not interfere with the current autosubmit run of that experiment
+ since wrapper_job_package will contain a preview, not the real wrapper packages
+ """
+
+ __tablename__ = "wrapper_job_package"
+
+ exp_id: Mapped[str] = mapped_column(Text)
+ package_name: Mapped[str] = mapped_column(Text, primary_key=True)
+ job_name: Mapped[str] = mapped_column(Text, primary_key=True)
+
+
+## SQLAlchemy Core tables
+
+# MAIN_DB TABLES
+experiment_table: Table = ExperimentTable.__table__
+details_table: Table = DetailsTable.__table__
+
+# AS_TIMES TABLES
+experiment_status_table: Table = ExperimentStatusTable.__table__
# Graph Data TABLES
+graph_data_table: Table = GraphDataTable.__table__
-graph_data_table = Table(
- "experiment_graph_draw",
- metadata_obj,
- Column("id", Integer, primary_key=True),
- Column("job_name", Text, nullable=False),
- Column("x", Integer, nullable=False),
- Column("y", Integer, nullable=False),
-)
+# Job package TABLES
+job_package_table: Table = JobPackageTable.__table__
+wrapper_job_package_table: Table = WrapperJobPackageTable.__table__
\ No newline at end of file
diff --git a/autosubmit_api/experiment/common_db_requests.py b/autosubmit_api/experiment/common_db_requests.py
index 801e7e4447946140990bc2853c4f6d1b60e6f05d..187cd45f8aa8f4771383588c2f2fa25da2cd4c47 100644
--- a/autosubmit_api/experiment/common_db_requests.py
+++ b/autosubmit_api/experiment/common_db_requests.py
@@ -2,29 +2,54 @@ import os
import traceback
import sqlite3
from datetime import datetime
-
+from autosubmit_api.logger import logger
from autosubmit_api.config.basicConfig import APIBasicConfig
+from autosubmit_api.database import tables
+from autosubmit_api.database.common import create_as_times_db_engine
+
APIBasicConfig.read()
+DB_FILES_STATUS = os.path.join(
+ APIBasicConfig.LOCAL_ROOT_DIR, "as_metadata", "test", APIBasicConfig.FILE_STATUS_DB
+) # "/esarchive/autosubmit/as_metadata/test/status.db"
+
-DB_FILE_AS_TIMES = os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB) # "/esarchive/autosubmit/as_times.db"
-DB_FILES_STATUS = os.path.join(APIBasicConfig.LOCAL_ROOT_DIR, "as_metadata", "test", APIBasicConfig.FILE_STATUS_DB) # "/esarchive/autosubmit/as_metadata/test/status.db"
-# PATH_DB_DATA = "/esarchive/autosubmit/as_metadata/data/"
+# STATUS ARCHIVE # Might be removed soon
-# STATUS ARCHIVE
+def create_connection(db_file):
+ # type: (str) -> sqlite3.Connection
+ """
+ Create a database connection to the SQLite database specified by db_file.
+ :param db_file: database file name
+ :return: Connection object or None
+ """
+ try:
+ conn = sqlite3.connect(db_file)
+ return conn
+ except Exception as exc:
+ logger.error(exc)
+
def insert_archive_status(status, alatency, abandwidth, clatency, cbandwidth, rtime):
try:
- conn = create_connection(DB_FILES_STATUS)
- sql = ''' INSERT INTO archive_status(status, avg_latency, avg_bandwidth, current_latency, current_bandwidth, response_time, modified ) VALUES(?,?,?,?,?,?,?)'''
- # print(row_content)
- cur = conn.cursor()
- cur.execute(sql, (int(status), alatency, abandwidth, clatency,
- cbandwidth, rtime, datetime.today().strftime('%Y-%m-%d-%H:%M:%S')))
- # print(cur)
- conn.commit()
- return cur.lastrowid
+ with create_connection(DB_FILES_STATUS) as conn:
+ sql = """ INSERT INTO archive_status(status, avg_latency, avg_bandwidth, current_latency, current_bandwidth, response_time, modified ) VALUES(?,?,?,?,?,?,?)"""
+ cur = conn.cursor()
+ cur.execute(
+ sql,
+ (
+ int(status),
+ alatency,
+ abandwidth,
+ clatency,
+ cbandwidth,
+ rtime,
+ datetime.today().strftime("%Y-%m-%d-%H:%M:%S"),
+ ),
+ )
+ conn.commit()
+ return cur.lastrowid
except Exception as exp:
print((traceback.format_exc()))
print(("Error on Insert : " + str(exp)))
@@ -37,48 +62,36 @@ def get_last_read_archive_status():
:rtype: 7-tuple
"""
try:
- conn = create_connection(DB_FILES_STATUS)
- sql = "SELECT status, avg_latency, avg_bandwidth, current_latency, current_bandwidth, response_time, modified FROM archive_status order by rowid DESC LIMIT 1"
- cur = conn.cursor()
- cur.execute(sql)
- rows = cur.fetchall()
- status, alatency, abandwidth, clatency, cbandwidth, rtime, date = rows[0]
- return (status, alatency, abandwidth, clatency, cbandwidth, rtime, date)
- # print(rows)
+ with create_connection(DB_FILES_STATUS) as conn:
+ sql = "SELECT status, avg_latency, avg_bandwidth, current_latency, current_bandwidth, response_time, modified FROM archive_status order by rowid DESC LIMIT 1"
+ cur = conn.cursor()
+ cur.execute(sql)
+ rows = cur.fetchall()
+ status, alatency, abandwidth, clatency, cbandwidth, rtime, date = rows[0]
+ return (status, alatency, abandwidth, clatency, cbandwidth, rtime, date)
except Exception as exp:
print((traceback.format_exc()))
print(("Error on Get Last : " + str(exp)))
return (False, None, None, None, None, None, None)
-# INSERTIONS
-
-def create_connection(db_file):
- # type: (str) -> sqlite3.Connection
- """
- Create a database connection to the SQLite database specified by db_file.
- :param db_file: database file name
- :return: Connection object or None
- """
- try:
- conn = sqlite3.connect(db_file)
- return conn
- except Exception as exp:
- print(exp)
-
# SELECTS
+
def get_experiment_status():
"""
Gets table experiment_status as dictionary
conn is expected to reference as_times.db
"""
- # conn = create_connection(DB_FILE_AS_TIMES)
experiment_status = dict()
- current_table = _get_exp_status()
- for item in current_table:
- exp_id, name, status, seconds_diff = item
- experiment_status[name] = status
+ try:
+ with create_as_times_db_engine().connect() as conn:
+ cursor = conn.execute(tables.experiment_status_table.select())
+ for row in cursor:
+ experiment_status[row.name] = row.status
+ except Exception as exc:
+ logger.error(f"Exception while reading experiment_status: {exc}")
+ logger.error(traceback.format_exc())
return experiment_status
@@ -90,50 +103,16 @@ def get_specific_experiment_status(expid):
:return: name of experiment and status
:rtype: 2-tuple (name, status)
"""
- exp_id, name, status, seconds_diff = _get_specific_exp_status(expid)
- print(("{} {} {} {}".format(exp_id, name, status, seconds_diff)))
- return (name, status)
-
-
-def _get_exp_status():
- """
- Get all registers from experiment_status.\n
- :return: row content: exp_id, name, status, seconds_diff
- :rtype: 4-tuple (int, str, str, int)
- """
try:
- conn = create_connection(os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB))
- conn.text_factory = str
- cur = conn.cursor()
- cur.execute(
- "SELECT exp_id, name, status, seconds_diff FROM experiment_status")
- rows = cur.fetchall()
- return rows
- except Exception as exp:
- print((traceback.format_exc()))
- return dict()
-
-
-def _get_specific_exp_status(expid):
- """
- Get all registers from experiment_status.\n
- :return: row content: exp_id, name, status, seconds_diff
- :rtype: 4-tuple (int, str, str, int)
- """
- try:
- # print("Honk")
- conn = create_connection(os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB))
- cur = conn.cursor()
- cur.execute(
- "SELECT exp_id, name, status, seconds_diff FROM experiment_status WHERE name=?", (expid,))
- row = cur.fetchone()
- if row == None:
- return (0, expid, "NOT RUNNING", 0)
- # print(row)
- return row
- except Exception as exp:
- print((traceback.format_exc()))
- return (0, expid, "NOT RUNNING", 0)
-
-
-# UPDATES
+ with create_as_times_db_engine().connect() as conn:
+ row = conn.execute(
+ tables.experiment_status_table.select().where(
+ tables.experiment_status_table.c.name == expid
+ )
+ ).one_or_none()
+ except Exception as exc:
+ logger.error(f"Exception while reading experiment_status for {expid}: {exc}")
+ logger.error(traceback.format_exc())
+ if row:
+ return (row.name, row.status)
+ return (expid, "NOT RUNNING")
diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py
index 0028357e2741388767409e58024102091d66e7b7..f5a909502217aadfac338d06a6cd857b949a4262 100644
--- a/autosubmit_api/experiment/common_requests.py
+++ b/autosubmit_api/experiment/common_requests.py
@@ -45,6 +45,7 @@ from autosubmit_api.performance.utils import calculate_SYPD_perjob
from autosubmit_api.monitor.monitor import Monitor
from autosubmit_api.persistance.experiment import ExperimentPaths
+from autosubmit_api.persistance.job_package_reader import JobPackageReader
from autosubmit_api.statistics.statistics import Statistics
from autosubmit_api.config.basicConfig import APIBasicConfig
@@ -309,12 +310,11 @@ def get_experiment_summary(expid, log):
tmp_path = os.path.join(
APIBasicConfig.LOCAL_ROOT_DIR, expid, APIBasicConfig.LOCAL_TMP_DIR)
# Try to get packages
- job_to_package = dict()
- package_to_jobs = dict()
- package_to_package_id = dict()
- package_to_symbol = dict()
- job_to_package, package_to_jobs, package_to_package_id, package_to_symbol = JobList.retrieve_packages(
- APIBasicConfig, expid)
+ job_package_reader = JobPackageReader(expid)
+ try:
+ job_package_reader.read()
+ except:
+ logger.warning("Failed to read job_packages")
# Basic data
job_running_to_seconds = dict()
job_running_to_runtext = dict()
@@ -338,9 +338,17 @@ def get_experiment_summary(expid, log):
status_code, status_color, status_text, out, err, priority, id_number)
fakeAllJobs.append(
SimpleJob(job_name, tmp_path, status_code))
-
- job_running_to_seconds, job_running_to_runtext, _ = JobList.get_job_times_collection(
- APIBasicConfig, fakeAllJobs, expid, job_to_package, package_to_jobs, timeseconds=True)
+
+ job_running_to_seconds, job_running_to_runtext, _ = (
+ JobList.get_job_times_collection(
+ APIBasicConfig,
+ fakeAllJobs,
+ expid,
+ job_package_reader.job_to_package,
+ job_package_reader.package_to_jobs,
+ timeseconds=True,
+ )
+ )
# Main Loop
if len(list(job_running_to_seconds.keys())) > 0:
@@ -824,8 +832,17 @@ def get_quick_view(expid):
# Retrieving packages
now_ = time.time()
- job_to_package, package_to_jobs, package_to_package_id, package_to_symbol = JobList.retrieve_packages(
- APIBasicConfig, expid)
+ job_to_package = {}
+ package_to_package_id = {}
+
+ job_package_reader = JobPackageReader(expid)
+ try:
+ job_package_reader.read()
+ job_to_package = job_package_reader.job_to_package
+ package_to_package_id = job_package_reader.package_to_package_id
+ except:
+ logger.warning("Failed to read job_packages")
+
logger.debug(("Retrieving packages {0} seconds.".format(
str(time.time() - now_))))
diff --git a/autosubmit_api/persistance/job_package_reader.py b/autosubmit_api/persistance/job_package_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..2dbe4ba749073f0625ac4e56b5c75d7790ee99b7
--- /dev/null
+++ b/autosubmit_api/persistance/job_package_reader.py
@@ -0,0 +1,107 @@
+from typing import Dict, List
+from sqlalchemy import select
+from autosubmit_api.logger import logger
+from autosubmit_api.database import tables
+from autosubmit_api.database.common import AttachedDatabaseConnBuilder
+from autosubmit_api.persistance.experiment import ExperimentPaths
+
+
+class JobPackageReader:
+
+ def __init__(self, expid: str) -> None:
+ self.expid = expid
+ self._content: List[Dict] = []
+ self._job_to_package: Dict[str, str] = {}
+ self._package_to_jobs: Dict[str, List[str]] = {}
+ self._package_to_package_id: Dict[str, str] = {}
+ self._package_to_symbol: Dict[str, str] = {}
+
+ def read(self):
+ conn_builder = AttachedDatabaseConnBuilder()
+ conn_builder.attach_db(
+ ExperimentPaths(self.expid).job_packages_db, "job_packages"
+ )
+
+ with conn_builder.product as conn:
+ try:
+ statement = select(tables.JobPackageTable)
+ self._content = [x._mapping for x in conn.execute(statement).all()]
+ if len(self._content) == 0:
+ raise Warning(
+ "job_packages table empty, trying wrapper_job_packages"
+ )
+ except Exception as exc:
+ logger.warning(exc)
+ statement = select(tables.WrapperJobPackageTable)
+ self._content = [x._mapping for x in conn.execute(statement).all()]
+
+ self._build_job_to_package()
+ self._build_package_to_jobs()
+ self._build_package_to_package_id()
+ self._build_package_to_symbol()
+
+ return self
+
+ def _build_job_to_package(self):
+ try:
+ for row in self._content:
+ package_name: str = row.get("package_name")
+ job_name: str = row.get("job_name")
+
+ if len(str(package_name).strip()) > 0:
+ self._job_to_package[job_name] = package_name
+ except Exception:
+ logger.warning("Error while building job_to_package")
+
+ return self._job_to_package
+
+ def _build_package_to_jobs(self):
+ try:
+ for job_name, package_name in self._job_to_package.items():
+ self._package_to_jobs.setdefault(package_name, []).append(job_name)
+ except Exception:
+ logger.warning("Error while building package_to_jobs")
+
+ return self._package_to_jobs
+
+ def _build_package_to_package_id(self):
+ try:
+ for package_name in self._package_to_jobs:
+ splitted_name = package_name.split("_")
+ if len(splitted_name) >= 3:
+ self._package_to_package_id[package_name] = package_name.split("_")[
+ 2
+ ]
+ except Exception:
+ logger.warning("Error while building package_to_package_id")
+
+ return self._package_to_package_id
+
+ def _build_package_to_symbol(self):
+ try:
+ list_packages = list(self._job_to_package.values())
+ for i in range(len(list_packages)):
+ if i % 2 == 0:
+ self._package_to_symbol[list_packages[i]] = "square"
+ else:
+ self._package_to_symbol[list_packages[i]] = "hexagon"
+ except Exception:
+ logger.warning("Error while building package_to_symbol")
+
+ return self._package_to_symbol
+
+ @property
+ def job_to_package(self):
+ return self._job_to_package
+
+ @property
+ def package_to_jobs(self):
+ return self._package_to_jobs
+
+ @property
+ def package_to_package_id(self):
+ return self._package_to_package_id
+
+ @property
+ def package_to_symbol(self):
+ return self._package_to_symbol
diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py
index 13585f5071720e54db8d5343595374d2cf0fe7dc..d7101ce372cba4e65599e60652f25e5f1f4f4bc2 100644
--- a/autosubmit_api/views/v4.py
+++ b/autosubmit_api/views/v4.py
@@ -4,7 +4,7 @@ from enum import Enum
from http import HTTPStatus
import math
import traceback
-from typing import Optional
+from typing import Dict, List, Optional
from flask import redirect, request
from flask.views import MethodView
import jwt
@@ -19,15 +19,14 @@ from autosubmit_api.builders.experiment_history_builder import (
from autosubmit_api.common.utils import Status
from autosubmit_api.database import tables
from autosubmit_api.database.common import (
- create_autosubmit_db_engine,
create_main_db_conn,
execute_with_limit_offset,
)
-from autosubmit_api.database.models import ExperimentModel
from autosubmit_api.database.queries import generate_query_listexp_extended
from autosubmit_api.logger import logger, with_log_run_times
from cas import CASClient
from autosubmit_api import config
+from autosubmit_api.persistance.job_package_reader import JobPackageReader
from autosubmit_api.persistance.pkl_reader import PklReader
@@ -217,6 +216,7 @@ class ExperimentView(MethodView):
only_active = request.args.get("only_active") == "true"
owner = request.args.get("owner")
exp_type = request.args.get("exp_type")
+ autosubmit_version = request.args.get("autosubmit_version")
order_by = request.args.get("order_by")
order_desc = request.args.get("order_desc") == "true"
@@ -240,6 +240,7 @@ class ExperimentView(MethodView):
only_active=only_active,
owner=owner,
exp_type=exp_type,
+ autosubmit_version=autosubmit_version,
order_by=order_by,
order_desc=order_desc,
)
@@ -405,3 +406,21 @@ class ExperimentJobsView(MethodView):
pkl_jobs.appendleft(resp_job)
return {"jobs": list(pkl_jobs)}, HTTPStatus.OK
+
+
+class ExperimentWrappersView(MethodView):
+ decorators = [with_auth_token(), with_log_run_times(logger, "WRAPPERS")]
+
+ def get(self, expid: str, user_id: Optional[str] = None):
+
+ job_package_reader = JobPackageReader(expid)
+ job_package_reader.read()
+
+ wrappers_dict: Dict[str, List[str]] = job_package_reader.package_to_jobs
+
+ wrappers = []
+ for key, val in wrappers_dict.items():
+ wrappers.append({"wrapper_name": key, "job_names": val})
+
+ logger.debug(wrappers)
+ return {"wrappers": wrappers}
diff --git a/openapi.json b/openapi.json
index 453357158807c7973b56172e52539def3321020e..d5b24179325fc518a550325b286d3860813e05a3 100644
--- a/openapi.json
+++ b/openapi.json
@@ -217,6 +217,13 @@
]
}
},
+ {
+ "name": "autosubmit_version",
+ "in": "query",
+ "schema": {
+ "type": "string"
+ }
+ },
{
"name": "order_by",
"in": "query",
@@ -539,6 +546,84 @@
}
}
},
+ "/v4/experiments/{expid}/wrappers": {
+ "get": {
+ "tags": [
+ "v4"
+ ],
+ "summary": "List experiment wrappers",
+ "operationId": "get-v4-experiments-expid-wrappers",
+ "parameters": [
+ {
+ "name": "expid",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "wrappers": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "job_names": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "wrapper_name": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ },
+ "x-examples": {
+ "Example 1": {
+ "wrappers": [
+ {
+ "job_names": [
+ "a6zi_18500101_fc0_1_SIM",
+ "a6zi_18500101_fc0_2_SIM"
+ ],
+ "wrapper_name": "a6zi_ASThread_17108563159474_528_10"
+ },
+ {
+ "job_names": [
+ "a6zi_18500101_fc0_3_SIM",
+ "a6zi_18500101_fc0_4_SIM",
+ "a6zi_18500101_fc0_5_SIM",
+ "a6zi_18500101_fc0_6_SIM",
+ "a6zi_18500101_fc0_7_SIM",
+ "a6zi_18500101_fc0_8_SIM",
+ "a6zi_18500101_fc0_9_SIM",
+ "a6zi_18500101_fc0_10_SIM",
+ "a6zi_18500101_fc0_11_SIM",
+ "a6zi_18500101_fc0_12_SIM"
+ ],
+ "wrapper_name": "a6zi_ASThread_17108816522924_528_10"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
"/v4/auth/verify-token": {
"get": {
"tags": [
diff --git a/tests/experiments/a6zj/conf/autosubmit_a6zj.yml b/tests/experiments/a6zj/conf/autosubmit_a6zj.yml
new file mode 100644
index 0000000000000000000000000000000000000000..68ba28c35efad6528fb1b089db7c819cf4927904
--- /dev/null
+++ b/tests/experiments/a6zj/conf/autosubmit_a6zj.yml
@@ -0,0 +1,19 @@
+CONFIG:
+ # Current version of Autosubmit.
+ AUTOSUBMIT_VERSION: "4.0.101"
+ # Maximum number of jobs permitted in the waiting status.
+ MAXWAITINGJOBS: 20
+ # Total number of jobs in the workflow.
+ TOTALJOBS: 20
+ SAFETYSLEEPTIME: 10
+ RETRIALS: 0
+#wrappers:
+# wrapper_sim:
+# TYPE: "vertical"
+# JOBS_IN_WRAPPER: "SIM"
+MAIL:
+ NOTIFICATIONS: False
+ TO:
+STORAGE:
+ TYPE: pkl
+ COPY_REMOTE_LOGS: true
diff --git a/tests/experiments/a6zj/conf/expdef_a6zj.yml b/tests/experiments/a6zj/conf/expdef_a6zj.yml
new file mode 100644
index 0000000000000000000000000000000000000000..14a2141b44de8073d8f6ad6b67ee7f39b21c30b4
--- /dev/null
+++ b/tests/experiments/a6zj/conf/expdef_a6zj.yml
@@ -0,0 +1,43 @@
+DEFAULT:
+ # Job experiment ID.
+ EXPID: "a6zj"
+ # Default HPC platform name.
+ HPCARCH: "MARENOSTRUM4"
+EXPERIMENT:
+ # List of start dates
+ DATELIST: '20000101'
+ # List of members.
+ MEMBERS: fc0
+ # Unit of the chunk size. Can be hour, day, month, or year.
+ CHUNKSIZEUNIT: month
+ # Size of each chunk.
+ CHUNKSIZE: '4'
+ # Number of chunks of the experiment.
+ NUMCHUNKS: '4'
+ CHUNKINI: ''
+ # Calendar used for the experiment. Can be standard or noleap.
+ CALENDAR: standard
+PROJECT:
+ # Type of the project.
+ PROJECT_TYPE: none
+ # Folder to hold the project sources.
+ PROJECT_DESTINATION: ''
+GIT:
+ PROJECT_ORIGIN: ''
+ PROJECT_BRANCH: ''
+ PROJECT_COMMIT: ''
+ PROJECT_SUBMODULES: ''
+ FETCH_SINGLE_BRANCH: true
+SVN:
+ PROJECT_URL: ''
+ PROJECT_REVISION: ''
+LOCAL:
+ PROJECT_PATH: ''
+PROJECT_FILES:
+ FILE_PROJECT_CONF: ''
+ FILE_JOBS_CONF: ''
+ JOB_SCRIPTS_TYPE: ''
+RERUN:
+ RERUN: false
+ RERUN_JOBLIST: ''
+
diff --git a/tests/experiments/a6zj/conf/jobs_a6zj.yml b/tests/experiments/a6zj/conf/jobs_a6zj.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ba803ddcb12398f3cb09374800a81a5e649beb79
--- /dev/null
+++ b/tests/experiments/a6zj/conf/jobs_a6zj.yml
@@ -0,0 +1,41 @@
+WRAPPERS:
+ WRAPPER_V:
+ TYPE: "vertical"
+ JOBS_IN_WRAPPER: "SIM"
+
+JOBS:
+ LOCAL_SETUP:
+ FILE: LOCAL_SETUP.sh
+ PLATFORM: LOCAL
+ RUNNING: once
+ REMOTE_SETUP:
+ FILE: REMOTE_SETUP.sh
+ DEPENDENCIES: LOCAL_SETUP
+ WALLCLOCK: 00:05
+ RUNNING: once
+ INI:
+ FILE: INI.sh
+ DEPENDENCIES: REMOTE_SETUP
+ RUNNING: member
+ WALLCLOCK: 00:05
+ SIM:
+ FILE: SIM.sh
+ DEPENDENCIES: INI SIM-1
+ RUNNING: chunk
+ WALLCLOCK: 00:05
+ POST:
+ FILE: POST.sh
+ DEPENDENCIES: SIM
+ RUNNING: once
+ WALLCLOCK: 00:05
+ CLEAN:
+ FILE: CLEAN.sh
+ DEPENDENCIES: POST
+ RUNNING: once
+ WALLCLOCK: 00:05
+ TRANSFER:
+ FILE: TRANSFER.sh
+ PLATFORM: LOCAL
+ DEPENDENCIES: CLEAN
+ RUNNING: member
+
diff --git a/tests/experiments/a6zj/conf/platforms_a6zj.yml b/tests/experiments/a6zj/conf/platforms_a6zj.yml
new file mode 100644
index 0000000000000000000000000000000000000000..b512b54d52fa28788fc5ca6b61b89b76700a23c9
--- /dev/null
+++ b/tests/experiments/a6zj/conf/platforms_a6zj.yml
@@ -0,0 +1,66 @@
+PLATFORMS:
+ MARENOSTRUM4:
+ TYPE: slurm
+ HOST: mn1.bsc.es
+ PROJECT: bsc32
+ USER: ltenorio
+ QUEUE: debug
+ SCRATCH_DIR: /gpfs/scratch
+ ADD_PROJECT_TO_HOST: false
+ MAX_WALLCLOCK: 48:00
+ TEMP_DIR: ''
+ MARENOSTRUM_ARCHIVE:
+ TYPE: ps
+ HOST: dt02.bsc.es
+ PROJECT: bsc32
+ USER:
+ SCRATCH_DIR: /gpfs/scratch
+ ADD_PROJECT_TO_HOST: false
+ TEST_SUITE: false
+
+ TRANSFER_NODE:
+ TYPE: ps
+ HOST: dt01.bsc.es
+ PROJECT: bsc32
+ USER:
+ ADD_PROJECT_TO_HOST: false
+ SCRATCH_DIR: /gpfs/scratch
+
+ TRANSFER_NODE_BSCEARTH000:
+ TYPE: ps
+ HOST: bscearth000
+ USER:
+ PROJECT: Earth
+ ADD_PROJECT_TO_HOST: false
+ QUEUE: serial
+ SCRATCH_DIR: /esarchive/scratch
+
+ BSCEARTH000:
+ TYPE: ps
+ HOST: bscearth000
+ USER:
+ PROJECT: Earth
+ ADD_PROJECT_TO_HOST: false
+ QUEUE: serial
+ SCRATCH_DIR: /esarchive/scratch
+ NORD3:
+ TYPE: SLURM
+ HOST: nord1.bsc.es
+ PROJECT: bsc32
+ USER:
+ QUEUE: debug
+ SCRATCH_DIR: /gpfs/scratch
+ MAX_WALLCLOCK: 48:00
+
+ ECMWF-XC40:
+ TYPE: ecaccess
+ VERSION: pbs
+ HOST: cca
+ USER:
+ PROJECT: spesiccf
+ ADD_PROJECT_TO_HOST: false
+ SCRATCH_DIR: /scratch/ms
+ QUEUE: np
+ SERIAL_QUEUE: ns
+ MAX_WALLCLOCK: 48:00
+
diff --git a/tests/experiments/a6zj/pkl/job_list_a6zj.pkl b/tests/experiments/a6zj/pkl/job_list_a6zj.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..94fb19fcf14f60698f3110d8f36ddc0bc606c897
Binary files /dev/null and b/tests/experiments/a6zj/pkl/job_list_a6zj.pkl differ
diff --git a/tests/experiments/a6zj/pkl/job_packages_a6zj.db b/tests/experiments/a6zj/pkl/job_packages_a6zj.db
new file mode 100644
index 0000000000000000000000000000000000000000..0487bd719de8cade137e9786baa4b0ac376ff96c
Binary files /dev/null and b/tests/experiments/a6zj/pkl/job_packages_a6zj.db differ
diff --git a/tests/experiments/a6zj/status/a6zj_20240411_1653.txt b/tests/experiments/a6zj/status/a6zj_20240411_1653.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5ca6337b2ac4007e9db2c5e7f69640aaccd001b0
--- /dev/null
+++ b/tests/experiments/a6zj/status/a6zj_20240411_1653.txt
@@ -0,0 +1,10 @@
+a6zj_LOCAL_SETUP READY
+a6zj_REMOTE_SETUP WAITING
+a6zj_20000101_fc0_INI WAITING
+a6zj_20000101_fc0_1_SIM WAITING
+a6zj_20000101_fc0_2_SIM WAITING
+a6zj_20000101_fc0_3_SIM WAITING
+a6zj_20000101_fc0_4_SIM WAITING
+a6zj_POST WAITING
+a6zj_CLEAN WAITING
+a6zj_20000101_fc0_TRANSFER WAITING
diff --git a/tests/experiments/a6zj/tmp/ASLOGS/submit_MARENOSTRUM4.sh b/tests/experiments/a6zj/tmp/ASLOGS/submit_MARENOSTRUM4.sh
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/experiments/as_times.db b/tests/experiments/as_times.db
index 9f643dc9204186ea18f1b182fb11f26213621036..4dcc6f86c6aa9588815e878c922870e8ad2f78a7 100644
Binary files a/tests/experiments/as_times.db and b/tests/experiments/as_times.db differ
diff --git a/tests/experiments/autosubmit.db b/tests/experiments/autosubmit.db
index a6cc8b2615306578b4a0371c179c77c7e7a17b9e..fdca9705b5a01cac55d77fb350ce34b868e060df 100644
Binary files a/tests/experiments/autosubmit.db and b/tests/experiments/autosubmit.db differ
diff --git a/tests/experiments/metadata/data/job_data_a6zj.db b/tests/experiments/metadata/data/job_data_a6zj.db
new file mode 100755
index 0000000000000000000000000000000000000000..55e7012aed31fbcb94aad3da6c1027fb7000d960
Binary files /dev/null and b/tests/experiments/metadata/data/job_data_a6zj.db differ
diff --git a/tests/experiments/metadata/data/job_data_a6zj.sql b/tests/experiments/metadata/data/job_data_a6zj.sql
new file mode 100644
index 0000000000000000000000000000000000000000..8a1820b1141bb510171ed3af7adfcfacbac11821
--- /dev/null
+++ b/tests/experiments/metadata/data/job_data_a6zj.sql
@@ -0,0 +1,58 @@
+PRAGMA foreign_keys=OFF;
+BEGIN TRANSACTION;
+CREATE TABLE experiment_run (
+ run_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ created TEXT NOT NULL,
+ modified TEXT NOT NULL,
+ start INTEGER NOT NULL,
+ finish INTEGER,
+ chunk_unit TEXT NOT NULL,
+ chunk_size INTEGER NOT NULL,
+ completed INTEGER NOT NULL,
+ total INTEGER NOT NULL,
+ failed INTEGER NOT NULL,
+ queuing INTEGER NOT NULL,
+ running INTEGER NOT NULL,
+ submitted INTEGER NOT NULL,
+ suspended INTEGER NOT NULL DEFAULT 0,
+ metadata TEXT
+ );
+INSERT INTO experiment_run VALUES(1,'2024-04-11-16:53:56','2024-04-11-16:53:56',1712847236,0,'month',4,0,10,0,0,0,0,0,'{"CONFIG": {"AUTOSUBMIT_VERSION": "4.0.101", "MAXWAITINGJOBS": 20, "TOTALJOBS": 20, "SAFETYSLEEPTIME": 10, "RETRIALS": 0}, "MAIL": {"NOTIFICATIONS": false, "TO": null}, "STORAGE": {"TYPE": "pkl", "COPY_REMOTE_LOGS": true}, "DEFAULT": {"EXPID": "a6zj", "HPCARCH": "MARENOSTRUM4"}, "EXPERIMENT": {"DATELIST": "20000101", "MEMBERS": "fc0", "CHUNKSIZEUNIT": "month", "CHUNKSIZE": 4, "NUMCHUNKS": 4, "CHUNKINI": "", "CALENDAR": "standard"}, "PROJECT": {"PROJECT_TYPE": "none", "PROJECT_DESTINATION": ""}, "GIT": {"PROJECT_ORIGIN": "", "PROJECT_BRANCH": "", "PROJECT_COMMIT": "", "PROJECT_SUBMODULES": "", "FETCH_SINGLE_BRANCH": true}, "SVN": {"PROJECT_URL": "", "PROJECT_REVISION": ""}, "LOCAL": {"PROJECT_PATH": ""}, "PROJECT_FILES": {"FILE_PROJECT_CONF": "", "FILE_JOBS_CONF": "", "JOB_SCRIPTS_TYPE": ""}, "RERUN": {"RERUN": false, "RERUN_JOBLIST": ""}, "WRAPPERS": {"WRAPPER_V": {"TYPE": "vertical", "JOBS_IN_WRAPPER": "SIM"}}, "JOBS": {"LOCAL_SETUP": {"FILE": "LOCAL_SETUP.sh", "PLATFORM": "LOCAL", "RUNNING": "once", "DEPENDENCIES": {}, "ADDITIONAL_FILES": []}, "REMOTE_SETUP": {"FILE": "REMOTE_SETUP.sh", "DEPENDENCIES": {"LOCAL_SETUP": {}}, "WALLCLOCK": "00:05", "RUNNING": "once", "ADDITIONAL_FILES": []}, "INI": {"FILE": "INI.sh", "DEPENDENCIES": {"REMOTE_SETUP": {}}, "RUNNING": "member", "WALLCLOCK": "00:05", "ADDITIONAL_FILES": []}, "SIM": {"FILE": "SIM.sh", "DEPENDENCIES": {"INI": {}, "SIM-1": {}}, "RUNNING": "chunk", "WALLCLOCK": "00:05", "ADDITIONAL_FILES": []}, "POST": {"FILE": "POST.sh", "DEPENDENCIES": {"SIM": {}}, "RUNNING": "once", "WALLCLOCK": "00:05", "ADDITIONAL_FILES": []}, "CLEAN": {"FILE": "CLEAN.sh", "DEPENDENCIES": {"POST": {}}, "RUNNING": "once", "WALLCLOCK": "00:05", "ADDITIONAL_FILES": []}, "TRANSFER": {"FILE": "TRANSFER.sh", "PLATFORM": "LOCAL", "DEPENDENCIES": {"CLEAN": {}}, "RUNNING": "member", "ADDITIONAL_FILES": []}}, "PLATFORMS": {"MARENOSTRUM4": {"TYPE": "slurm", "HOST": "mn1.bsc.es", "PROJECT": "bsc32", "USER": "ltenorio", "QUEUE": "debug", "SCRATCH_DIR": "/gpfs/scratch", "ADD_PROJECT_TO_HOST": false, "MAX_WALLCLOCK": "48:00", "TEMP_DIR": ""}, "MARENOSTRUM_ARCHIVE": {"TYPE": "ps", "HOST": "dt02.bsc.es", "PROJECT": "bsc32", "USER": null, "SCRATCH_DIR": "/gpfs/scratch", "ADD_PROJECT_TO_HOST": false, "TEST_SUITE": false}, "TRANSFER_NODE": {"TYPE": "ps", "HOST": "dt01.bsc.es", "PROJECT": "bsc32", "USER": null, "ADD_PROJECT_TO_HOST": false, "SCRATCH_DIR": "/gpfs/scratch"}, "TRANSFER_NODE_BSCEARTH000": {"TYPE": "ps", "HOST": "bscearth000", "USER": null, "PROJECT": "Earth", "ADD_PROJECT_TO_HOST": false, "QUEUE": "serial", "SCRATCH_DIR": "/esarchive/scratch"}, "BSCEARTH000": {"TYPE": "ps", "HOST": "bscearth000", "USER": null, "PROJECT": "Earth", "ADD_PROJECT_TO_HOST": false, "QUEUE": "serial", "SCRATCH_DIR": "/esarchive/scratch"}, "NORD3": {"TYPE": "SLURM", "HOST": "nord1.bsc.es", "PROJECT": "bsc32", "USER": null, "QUEUE": "debug", "SCRATCH_DIR": "/gpfs/scratch", "MAX_WALLCLOCK": "48:00"}, "ECMWF-XC40": {"TYPE": "ecaccess", "VERSION": "pbs", "HOST": "cca", "USER": null, "PROJECT": "spesiccf", "ADD_PROJECT_TO_HOST": false, "SCRATCH_DIR": "/scratch/ms", "QUEUE": "np", "SERIAL_QUEUE": "ns", "MAX_WALLCLOCK": "48:00"}}, "ROOTDIR": "/home/ltenorio/autosubmit/a6zj", "PROJDIR": "/home/ltenorio/autosubmit/a6zj/proj/"}');
+CREATE TABLE job_data (
+ id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ counter INTEGER NOT NULL,
+ job_name TEXT NOT NULL,
+ created TEXT NOT NULL,
+ modified TEXT NOT NULL,
+ submit INTEGER NOT NULL,
+ start INTEGER NOT NULL,
+ finish INTEGER NOT NULL,
+ status TEXT NOT NULL,
+ rowtype INTEGER NOT NULL,
+ ncpus INTEGER NOT NULL,
+ wallclock TEXT NOT NULL,
+ qos TEXT NOT NULL,
+ energy INTEGER NOT NULL,
+ date TEXT NOT NULL,
+ section TEXT NOT NULL,
+ member TEXT NOT NULL,
+ chunk INTEGER NOT NULL,
+ last INTEGER NOT NULL,
+ platform TEXT NOT NULL,
+ job_id INTEGER NOT NULL,
+ extra_data TEXT NOT NULL,
+ nnodes INTEGER NOT NULL DEFAULT 0,
+ run_id INTEGER,
+ MaxRSS REAL NOT NULL DEFAULT 0.0,
+ AveRSS REAL NOT NULL DEFAULT 0.0,
+ out TEXT NOT NULL,
+ err TEXT NOT NULL,
+ rowstatus INTEGER NOT NULL DEFAULT 0,
+ children TEXT,
+ platform_output TEXT,
+ UNIQUE(counter,job_name)
+ );
+DELETE FROM sqlite_sequence;
+INSERT INTO sqlite_sequence VALUES('experiment_run',1);
+CREATE INDEX ID_JOB_NAME ON job_data(job_name);
+COMMIT;
diff --git a/tests/experiments/metadata/graph/graph_data_a003.db b/tests/experiments/metadata/graph/graph_data_a003.db
index abc044818f003c3541e7589fcabd27eca7faaf41..41b2c4f84c4a700e3a35394a2aea257543e762d4 100755
Binary files a/tests/experiments/metadata/graph/graph_data_a003.db and b/tests/experiments/metadata/graph/graph_data_a003.db differ
diff --git a/tests/experiments/metadata/graph/graph_data_a6zj.db b/tests/experiments/metadata/graph/graph_data_a6zj.db
new file mode 100755
index 0000000000000000000000000000000000000000..ef961e6bc817486a37b87ead34e3328b9bd4735a
Binary files /dev/null and b/tests/experiments/metadata/graph/graph_data_a6zj.db differ
diff --git a/tests/experiments/metadata/structures/structure_a6zj.db b/tests/experiments/metadata/structures/structure_a6zj.db
new file mode 100644
index 0000000000000000000000000000000000000000..92722f8224aa9e6025ff29562837d7ae085e7d01
Binary files /dev/null and b/tests/experiments/metadata/structures/structure_a6zj.db differ
diff --git a/tests/test_endpoints_v3.py b/tests/test_endpoints_v3.py
index c4a1bdaf47a47c5b0cf06e6b80420be1996c5d7b..e38c75bce6afbf34e71df2700fca5355efe84bba 100644
--- a/tests/test_endpoints_v3.py
+++ b/tests/test_endpoints_v3.py
@@ -176,6 +176,23 @@ class TestTree:
for job in resp_obj["jobs"]:
assert job["id"][:4] == expid
+ def test_wrappers(self, fixture_client: FlaskClient):
+ expid = "a6zj"
+ response = fixture_client.get(self.endpoint.format(expid=expid))
+ resp_obj: dict = response.get_json()
+
+ assert len(resp_obj["jobs"]) == 10
+
+ for job in resp_obj["jobs"]:
+ if job["section"] == "SIM":
+ assert isinstance(job["wrapper"], str) and len(job["wrapper"]) > 0
+ else:
+ assert job["wrapper"] == None
+
+ assert (
+ resp_obj["tree"][2]["title"] == "Wrappers" and resp_obj["tree"][2]["folder"]
+ )
+
class TestRunsList:
endpoint = "/v3/runs/{expid}"
@@ -310,6 +327,26 @@ class TestGraph:
assert resp_obj["error"] == False
assert resp_obj["total_jobs"] == len(resp_obj["nodes"])
+ def test_wrappers(self, fixture_client: FlaskClient):
+ expid = "a6zj"
+ random_user = str(uuid4())
+ response = fixture_client.get(
+ self.endpoint.format(expid=expid, graph_type="standard", grouped="none"),
+ query_string={"loggedUser": random_user},
+ )
+ resp_obj: dict = response.get_json()
+
+ assert len(resp_obj["nodes"]) == 10
+
+ for node in resp_obj["nodes"]:
+ if node["section"] == "SIM":
+ assert isinstance(node["wrapper"], str) and len(node["wrapper"]) > 0
+ else:
+ assert node["wrapper"] == None
+
+ assert "packages" in list(resp_obj.keys())
+ assert len(resp_obj["packages"].keys()) > 0
+
class TestExpCount:
endpoint = "/v3/expcount/{expid}"
@@ -526,6 +563,7 @@ class TestSearchExpid:
assert isinstance(resp_obj["experiment"], list)
assert len(resp_obj["experiment"]) > 0
+
class TestRunningExps:
endpoint = "/v3/running/"
diff --git a/tests/test_endpoints_v4.py b/tests/test_endpoints_v4.py
index fec4dbab52c99bfb9f53b8cf35ccef5b0bad6a8f..dc83894f9c90849e7358a6ae11efc414c0f1695f 100644
--- a/tests/test_endpoints_v4.py
+++ b/tests/test_endpoints_v4.py
@@ -162,3 +162,24 @@ class TestExperimentJobs:
assert isinstance(job, dict) and len(job.keys()) > 2
assert isinstance(job["name"], str) and job["name"].startswith(expid)
assert isinstance(job["status"], str)
+
+
+class TestExperimentWrappers:
+ endpoint = "/v4/experiments/{expid}/wrappers"
+
+ def test_wrappers(self, fixture_client: FlaskClient):
+ expid = "a6zj"
+
+ response = fixture_client.get(self.endpoint.format(expid=expid))
+ resp_obj: dict = response.get_json()
+
+ assert isinstance(resp_obj, dict)
+ assert isinstance(resp_obj["wrappers"], list)
+ assert len(resp_obj["wrappers"]) == 1
+
+ for wrapper in resp_obj["wrappers"]:
+ assert isinstance(wrapper, dict)
+ assert isinstance(wrapper["job_names"], list)
+ assert isinstance(wrapper["wrapper_name"], str) and wrapper[
+ "wrapper_name"
+ ].startswith(expid)