From 41e612b695bf0aa2c60c0a3bec7e86152909160f Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 30 Apr 2024 16:29:38 +0200 Subject: [PATCH 01/26] extend Basic Config to handle postgres (should be reverted later) --- autosubmit_api/config/basicConfig.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/autosubmit_api/config/basicConfig.py b/autosubmit_api/config/basicConfig.py index c73a51e..30b5e1a 100644 --- a/autosubmit_api/config/basicConfig.py +++ b/autosubmit_api/config/basicConfig.py @@ -31,6 +31,8 @@ class APIBasicConfig(BasicConfig): FILE_STATUS_DIR = os.path.join(os.path.expanduser('~'), 'autosubmit', 'metadata', 'test') FILE_STATUS_DB = 'status.db' ALLOWED_CLIENTS = set([]) + DATABASE_BACKEND = "sqlite" # TODO Move to the config parser repo + DATABASE_CONN_URL = "" # TODO Move to the config parser repo @staticmethod def __read_file_config(file_path): @@ -53,6 +55,10 @@ class APIBasicConfig(BasicConfig): APIBasicConfig.FILE_STATUS_DB = parser.get('statusdb', 'filename') if parser.has_option('clients', 'authorized'): APIBasicConfig.ALLOWED_CLIENTS = set(parser.get('clients', 'authorized').split()) + if parser.has_option('database', 'backend'): + APIBasicConfig.DATABASE_BACKEND = parser.get('database', 'backend') + if parser.has_option('database', 'connection_url'): + APIBasicConfig.DATABASE_CONN_URL = parser.get('database', 'connection_url') @staticmethod -- GitLab From 272d6b8e5293ee893a4c963e0c8c6d8001d0c5fb Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Thu, 2 May 2024 09:36:29 +0200 Subject: [PATCH 02/26] add engine switch --- autosubmit_api/database/__init__.py | 27 +++++++++++++++-------- autosubmit_api/database/common.py | 34 ++++++++++++++++++++++++++--- setup.py | 3 ++- 3 files changed, 51 insertions(+), 13 deletions(-) diff --git a/autosubmit_api/database/__init__.py b/autosubmit_api/database/__init__.py index e2c02e2..03445e0 100644 --- a/autosubmit_api/database/__init__.py +++ b/autosubmit_api/database/__init__.py @@ -1,19 +1,28 @@ -from sqlalchemy import text +from sqlalchemy import Connection, Table from autosubmit_api.database.common import ( create_as_times_db_engine, create_autosubmit_db_engine, ) -from autosubmit_api.database.tables import experiment_status_table, details_table +from autosubmit_api.database import tables + + +def _create_autosubmit_db_tables(conn: Connection): + experiment_table: Table = tables.ExperimentTable.__table__ + experiment_table.create(conn, checkfirst=True) + details_table: Table = tables.DetailsTable.__table__ + details_table.create(conn, checkfirst=True) + + +def _create_as_times_db_tables(conn: Connection): + experiment_status_table: Table = tables.ExperimentStatusTable.__table__ + experiment_status_table.create(conn, checkfirst=True) def prepare_db(): with create_as_times_db_engine().connect() as conn: - experiment_status_table.create(conn, checkfirst=True) + _create_as_times_db_tables(conn) + conn.commit() with create_autosubmit_db_engine().connect() as conn: - details_table.create(conn, checkfirst=True) - - view_name = "listexp" - view_from = "select id,name,user,created,model,branch,hpc,description from experiment left join details on experiment.id = details.exp_id" - new_view_stmnt = f"CREATE VIEW IF NOT EXISTS {view_name} as {view_from}" - conn.execute(text(new_view_stmnt)) + _create_autosubmit_db_tables(conn) + conn.commit() diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index defe705..2fc7812 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -9,11 +9,33 @@ from sqlalchemy import ( select, text, func, + Table, ) +from sqlalchemy.orm import DeclarativeBase from autosubmit_api.builders import BaseBuilder from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig +APIBasicConfig.read() +postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) + +def get_postgres_engine(): + db = postgres_engine + if not isinstance(db, Engine): + APIBasicConfig.read() + db = create_engine(APIBasicConfig.DATABASE_CONN_URL) + return db + + +def copy_rename_table(source_table: DeclarativeBase, new_name: str): + dest_table = Table(new_name) + + core_source_table: Table = source_table.__table__ + for col in core_source_table.columns: + dest_table.append_column(col) + + return dest_table + class AttachedDatabaseConnBuilder(BaseBuilder): """ @@ -48,6 +70,9 @@ def create_main_db_conn() -> Connection: """ Connection with the autosubmit and as_times DDBB. """ + APIBasicConfig.read() + if APIBasicConfig.DATABASE_BACKEND == "postgres": + return get_postgres_engine().connect() builder = AttachedDatabaseConnBuilder() builder.attach_autosubmit_db() builder.attach_as_times_db() @@ -60,16 +85,19 @@ def create_autosubmit_db_engine() -> Engine: Create an engine for the autosubmit DDBB. Usually named autosubmit.db """ APIBasicConfig.read() - return create_engine( - f"sqlite:///{ os.path.abspath(APIBasicConfig.DB_PATH)}", poolclass=NullPool - ) + if APIBasicConfig.DATABASE_BACKEND == "postgres": + return get_postgres_engine() + return create_engine(f"sqlite:///{ os.path.abspath(APIBasicConfig.DB_PATH)}", poolclass=NullPool) def create_as_times_db_engine() -> Engine: """ Create an engine for the AS_TIMES DDBB. Usually named as_times.db """ + APIBasicConfig.read() + if APIBasicConfig.DATABASE_BACKEND == "postgres": + return get_postgres_engine() db_path = os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB) return create_engine(f"sqlite:///{ os.path.abspath(db_path)}", poolclass=NullPool) diff --git a/setup.py b/setup.py index 7ab919b..4bd18f4 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,8 @@ install_requires = [ "gunicorn~=22.0.0", "pydantic~=2.5.2", "SQLAlchemy~=2.0.23", - "python-cas~=1.6.0" + "python-cas>=1.6.0", + "psycopg2>=2.9.9" ] # Test dependencies -- GitLab From 14ab6831555fb3eacc67c31d0cdef4547f8216a3 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 3 May 2024 16:30:47 +0200 Subject: [PATCH 03/26] abstract experiment_graph_drawing table adapter --- .../components/experiment/graph_drawer.py | 204 +++++++++++++++++ .../components/representations/graph/graph.py | 2 +- autosubmit_api/database/adapters/__init__.py | 0 .../database/adapters/graph_draw.py | 56 +++++ autosubmit_api/database/common.py | 30 ++- autosubmit_api/database/db_jobdata.py | 214 ------------------ .../business/process_graph_drawings.py | 2 +- tests/experiments/.autosubmitrc | 1 + tests/test_graph.py | 2 +- 9 files changed, 291 insertions(+), 220 deletions(-) create mode 100644 autosubmit_api/components/experiment/graph_drawer.py create mode 100644 autosubmit_api/database/adapters/__init__.py create mode 100644 autosubmit_api/database/adapters/graph_draw.py diff --git a/autosubmit_api/components/experiment/graph_drawer.py b/autosubmit_api/components/experiment/graph_drawer.py new file mode 100644 index 0000000..cb23c59 --- /dev/null +++ b/autosubmit_api/components/experiment/graph_drawer.py @@ -0,0 +1,204 @@ +from typing import List, Optional, Tuple +import portalocker +import os +import traceback +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter +from autosubmit_api.logger import logger +from autosubmit_api.monitor.monitor import Monitor + + +class ExperimentGraphDrawing: + def __init__(self, expid): + """ + Sets and validates graph drawing. + :param expid: Name of experiment + :type expid: str + """ + APIBasicConfig.read() + self.expid = expid + self.folder_path = APIBasicConfig.LOCAL_ROOT_DIR + self.graph_data_db = ExpGraphDrawDBAdapter(expid) + self.lock_name = "calculation_in_progress.lock" + self.current_position_dictionary = None + self.current_jobs_set = set() + self.coordinates = list() + self.set_current_position() + self.should_update = False + self.locked = False + self.test_locked() + + def test_locked(self): + self.locked = True + try: + with portalocker.Lock( + os.path.join(self.folder_path, self.lock_name), timeout=1 + ) as fh: + self.locked = False + fh.flush() + os.fsync(fh.fileno()) + except portalocker.AlreadyLocked: + logger.error("It is locked") + self.locked = True + except Exception as exp: + self.locked = True + + def get_validated_data(self, allJobs): + """ + Validates if should update current graph drawing. + :return: None if graph drawing should be updated, otherwise, it returns the position data. + :rype: None or dict() + """ + job_names = {job.name for job in allJobs} + # Validating content + difference = job_names - self.current_jobs_set + if difference and len(difference) > 0: + # Intersection found. Graph Drawing database needs to be updated + self.should_update = True + # Clear database + return None + return self.current_position_dictionary + # return None if self.should_update == True else self.current_position_dictionary + + def calculate_drawing( + self, allJobs, independent=False, num_chunks=48, job_dictionary=None + ): + """ + Called in a thread. + :param allJobs: list of jobs (usually from job_list object) + :type allJobs: list() + :return: Last row Id + :rtype: int + """ + lock_name = ( + "calculation_{}_in_progress.lock".format(self.expid) + if independent == True + else self.lock_name + ) + lock_path_file = os.path.join(self.folder_path, lock_name) + try: + with portalocker.Lock(lock_path_file, timeout=1) as fh: + monitor = Monitor() + graph = monitor.create_tree_list( + self.expid, allJobs, None, dict(), False, job_dictionary + ) + if len(allJobs) > 1000: + # Logic: Start with 48 as acceptable number of chunks for Gmaxiter = 100 + # Minimum Gmaxiter will be 10 + maxiter = max(10, 148 - num_chunks) + # print("Experiment {} num_chunk {} maxiter {}".format( + # self.expid, num_chunks, maxiter)) + result = graph.create( + [ + "dot", + "-Gnslimit=2", + "-Gnslimit1=2", + "-Gmaxiter={}".format(maxiter), + "-Gsplines=none", + "-v", + ], + format="plain", + ) + else: + result = graph.create("dot", format="plain") + for u in result.split(b"\n"): + splitList = u.split(b" ") + if len(splitList) > 1 and splitList[0].decode() == "node": + + self.coordinates.append( + ( + splitList[1].decode(), + int(float(splitList[2].decode()) * 90), + int(float(splitList[3].decode()) * -90), + ) + ) + # self.coordinates[splitList[1]] = ( + # int(float(splitList[2]) * 90), int(float(splitList[3]) * -90)) + self.insert_coordinates() + fh.flush() + os.fsync(fh.fileno()) + os.remove(lock_path_file) + return self.get_validated_data(allJobs) + except portalocker.AlreadyLocked: + message = "Already calculating graph drawing." + print(message) + return None + except Exception as exc: + logger.error((traceback.format_exc())) + os.remove(lock_path_file) + logger.error( + ("Exception while calculating coordinates {}".format(str(exc))) + ) + return None + + def insert_coordinates(self) -> Optional[int]: + """ + Prepares and inserts new coordinates. + """ + try: + # Start by clearing database + self._clear_graph_database() + result = None + if self.coordinates and len(self.coordinates) > 0: + result = self._insert_many_graph_coordinates(self.coordinates) + return result + return None + except Exception as exc: + logger.error((str(exc))) + return None + + def set_current_position(self) -> None: + """ + Sets all registers in the proper variables. + current_position_dictionary: JobName -> (x, y) + current_jobs_set: JobName + """ + current_table = self._get_current_position() + if current_table and len(current_table) > 0: + self.current_position_dictionary = { + row[1]: (row[2], row[3]) for row in current_table + } + self.current_jobs_set = set(self.current_position_dictionary.keys()) + + def _get_current_position(self) -> List[Tuple[int, str, int, int]]: + """ + Get all registers from experiment_graph_draw.\n + :return: row content: id, job_name, x, y + :rtype: 4-tuple (int, str, int, int) + """ + try: + result = self.graph_data_db.get_all() + return [(item.id, item.job_name, item.x, item.y) for item in result] + except Exception as exc: + logger.error((traceback.format_exc())) + logger.error((str(exc))) + return None + + def _insert_many_graph_coordinates( + self, values: List[Tuple[str, int, int]] + ) -> Optional[int]: + """ + Create many graph coordinates + """ + try: + _vals = [ + {"job_name": item[0], "x": item[1], "y": item[2]} for item in values + ] + logger.debug(_vals) + return self.graph_data_db.insert_many(_vals) + except Exception as exc: + logger.error((traceback.format_exc())) + logger.error("Error on Insert many graph drawing : {}".format(str(exc))) + return None + + def _clear_graph_database(self): + """ + Clear all content from graph drawing database + """ + try: + self.graph_data_db.delete_all() + except Exception as exc: + logger.error((traceback.format_exc())) + logger.error(("Error on Database clear: {}".format(str(exc)))) + return False + return True diff --git a/autosubmit_api/components/representations/graph/graph.py b/autosubmit_api/components/representations/graph/graph.py index 5e4c96c..63f4901 100644 --- a/autosubmit_api/components/representations/graph/graph.py +++ b/autosubmit_api/components/representations/graph/graph.py @@ -9,7 +9,7 @@ from networkx.linalg.laplacianmatrix import laplacian_matrix from ...jobs.job_factory import Job from ...jobs.joblist_loader import JobListLoader from ....monitor.monitor import Monitor -from ....database.db_jobdata import ExperimentGraphDrawing +from ...experiment.graph_drawer import ExperimentGraphDrawing from .edge import Edge, RealEdge from typing import List, Dict, Tuple, Set, Any diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py new file mode 100644 index 0000000..8aed139 --- /dev/null +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -0,0 +1,56 @@ +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables +from autosubmit_api.database.common import get_postgres_engine, table_change_schema +from autosubmit_api.persistance.experiment import ExperimentPaths + + +from sqlalchemy import NullPool, create_engine +from sqlalchemy.schema import CreateSchema + + +from typing import Any, Dict, List + + +class ExpGraphDrawDBAdapter: + + def __init__(self, expid: str) -> None: + self.expid = expid + + if APIBasicConfig.DATABASE_BACKEND == "postgres": + self.table = table_change_schema(expid, tables.GraphDataTable) + self.engine = get_postgres_engine() + with self.engine.connect() as conn: + conn.execute(CreateSchema(self.expid, if_not_exists=True)) + self.table.create(conn, checkfirst=True) + conn.commit() + + else: + self.table = tables.GraphDataTable.__table__ + sqlite_graph_db_path = ExperimentPaths(expid).graph_data_db + self.engine = create_engine( + f"sqlite:///{ sqlite_graph_db_path}", poolclass=NullPool + ) + with self.engine.connect() as conn: + self.table.create(conn, checkfirst=True) + conn.commit() + + def get_all(self) -> List[Dict[str, Any]]: + with self.engine.connect() as conn: + result = conn.execute(self.table.select()).all() + + return [x._mapping for x in result] + + def delete_all(self) -> int: + with self.engine.connect() as conn: + result = conn.execute(self.table.delete()) + conn.commit() + return result.rowcount + + def insert_many(self, values) -> int: + with self.engine.connect() as conn: + result = conn.execute(self.table.insert(), values) + conn.commit() + return result.rowcount + + + diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index 2fc7812..b5d3efd 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -1,8 +1,9 @@ import os -from typing import Any +from typing import Any, Union from sqlalchemy import ( Connection, Engine, + MetaData, NullPool, Select, create_engine, @@ -17,10 +18,10 @@ from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig APIBasicConfig.read() -postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) +_postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) def get_postgres_engine(): - db = postgres_engine + db = _postgres_engine if not isinstance(db, Engine): APIBasicConfig.read() db = create_engine(APIBasicConfig.DATABASE_CONN_URL) @@ -124,3 +125,26 @@ def execute_with_limit_offset( total = conn.scalar(count_stmnt) return query_result, total + + +def table_change_schema(schema: str, source: Union[DeclarativeBase, Table]) -> Table: + """ + Copy the source table and change the schema of that SQLAlchemy table into a new table instance + """ + if issubclass(source, DeclarativeBase): + _source_table: Table = source.__table__ + elif isinstance(source, Table): + _source_table = source + else: + raise RuntimeError("Invalid source type on table schema change") + + metadata = MetaData(schema=schema) + dest_table = Table(_source_table.name, metadata) + + for col in _source_table.columns: + dest_table.append_column(col.copy()) + + logger.debug(_source_table.columns) + logger.debug(dest_table.columns) + + return dest_table diff --git a/autosubmit_api/database/db_jobdata.py b/autosubmit_api/database/db_jobdata.py index b19d6c2..e2198bd 100644 --- a/autosubmit_api/database/db_jobdata.py +++ b/autosubmit_api/database/db_jobdata.py @@ -23,7 +23,6 @@ import textwrap import traceback import sqlite3 import collections -import portalocker from datetime import datetime, timedelta from json import loads from time import mktime @@ -523,219 +522,6 @@ class MainDataBase(): return None -class ExperimentGraphDrawing(MainDataBase): - def __init__(self, expid): - """ - Sets and validates graph drawing. - :param expid: Name of experiment - :type expid: str - :param allJobs: list of all jobs objects (usually from job_list) - :type allJobs: list() - """ - MainDataBase.__init__(self, expid) - APIBasicConfig.read() - self.expid = expid - exp_paths = ExperimentPaths(expid) - self.folder_path = APIBasicConfig.LOCAL_ROOT_DIR - self.database_path = exp_paths.graph_data_db - self.create_table_query = textwrap.dedent( - '''CREATE TABLE - IF NOT EXISTS experiment_graph_draw ( - id INTEGER PRIMARY KEY, - job_name text NOT NULL, - x INTEGER NOT NULL, - y INTEGER NOT NULL - );''') - - if not os.path.exists(self.database_path): - os.umask(0) - if not os.path.exists(os.path.dirname(self.database_path)): - os.makedirs(os.path.dirname(self.database_path)) - os.open(self.database_path, os.O_WRONLY | os.O_CREAT, 0o777) - self.conn = self.create_connection(self.database_path) - self.create_table() - else: - self.conn = self.create_connection(self.database_path) - self.lock_name = "calculation_in_progress.lock" - self.current_position_dictionary = None - self.current_jobs_set = set() - self.coordinates = list() - self.set_current_position() - self.should_update = False - self.locked = False - self.test_locked() - - def test_locked(self): - self.locked = True - try: - with portalocker.Lock(os.path.join(self.folder_path, self.lock_name), timeout=1) as fh: - self.locked = False - fh.flush() - os.fsync(fh.fileno()) - except portalocker.AlreadyLocked: - print("It is locked") - self.locked = True - except Exception as exp: - self.locked = True - - def get_validated_data(self, allJobs): - """ - Validates if should update current graph drawing. - :return: None if graph drawing should be updated, otherwise, it returns the position data. - :rype: None or dict() - """ - job_names = {job.name for job in allJobs} - # Validating content - difference = job_names - self.current_jobs_set - if difference and len(difference) > 0: - # Intersection found. Graph Drawing database needs to be updated - self.should_update = True - # Clear database - return None - return self.current_position_dictionary - # return None if self.should_update == True else self.current_position_dictionary - - def calculate_drawing(self, allJobs, independent=False, num_chunks=48, job_dictionary=None): - """ - Called in a thread. - :param allJobs: list of jobs (usually from job_list object) - :type allJobs: list() - :return: Last row Id - :rtype: int - """ - lock_name = "calculation_{}_in_progress.lock".format(self.expid) if independent == True else self.lock_name - lock_path_file = os.path.join(self.folder_path, lock_name) - try: - with portalocker.Lock(lock_path_file, timeout=1) as fh: - self.conn = self.create_connection(self.database_path) - monitor = Monitor() - graph = monitor.create_tree_list( - self.expid, allJobs, None, dict(), False, job_dictionary) - if len(allJobs) > 1000: - # Logic: Start with 48 as acceptable number of chunks for Gmaxiter = 100 - # Minimum Gmaxiter will be 10 - maxiter = max(10, 148 - num_chunks) - # print("Experiment {} num_chunk {} maxiter {}".format( - # self.expid, num_chunks, maxiter)) - result = graph.create( - ['dot', '-Gnslimit=2', '-Gnslimit1=2', '-Gmaxiter={}'.format(maxiter), '-Gsplines=none', '-v'], format="plain") - else: - result = graph.create('dot', format="plain") - for u in result.split(b"\n"): - splitList = u.split(b" ") - if len(splitList) > 1 and splitList[0].decode() == "node": - - self.coordinates.append((splitList[1].decode(), int( - float(splitList[2].decode()) * 90), int(float(splitList[3].decode()) * -90))) - # self.coordinates[splitList[1]] = ( - # int(float(splitList[2]) * 90), int(float(splitList[3]) * -90)) - self.insert_coordinates() - fh.flush() - os.fsync(fh.fileno()) - os.remove(lock_path_file) - return self.get_validated_data(allJobs) - except portalocker.AlreadyLocked: - message = "Already calculating graph drawing." - print(message) - return None - except Exception as exp: - print((traceback.format_exc())) - os.remove(lock_path_file) - print(("Exception while calculating coordinates {}".format(str(exp)))) - return None - - def insert_coordinates(self): - """ - Prepares and inserts new coordinates. - """ - try: - # Start by clearing database - self._clear_graph_database() - result = None - if self.coordinates and len(self.coordinates) > 0: - result = self._insert_many_graph_coordinates(self.coordinates) - return result - return None - except Exception as exp: - print((str(exp))) - return None - - def set_current_position(self): - """ - Sets all registers in the proper variables. - current_position_dictionary: JobName -> (x, y) - current_jobs_set: JobName - """ - current_table = self._get_current_position() - if current_table and len(current_table) > 0: - self.current_position_dictionary = {row[1]: (row[2], row[3]) for row in current_table} - self.current_jobs_set = set(self.current_position_dictionary.keys()) - - def _get_current_position(self): - """ - Get all registers from experiment_graph_draw.\n - :return: row content: id, job_name, x, y - :rtype: 4-tuple (int, str, int, int) - """ - try: - if self.conn: - # conn = create_connection(DB_FILE_AS_TIMES) - self.conn.text_factory = str - cur = self.conn.cursor() - cur.execute( - "SELECT id, job_name, x, y FROM experiment_graph_draw") - rows = cur.fetchall() - return rows - return None - except Exception as exp: - print((traceback.format_exc())) - print((str(exp))) - return None - - def _insert_many_graph_coordinates(self, values): - """ - Create many graph coordinates - :param conn: - :param details: - :return: - """ - try: - if self.conn: - # exp_id = self._get_id_db() - # conn = create_connection(DB_FILE_AS_TIMES) - # creation_date = datetime.today().strftime('%Y-%m-%d-%H:%M:%S') - sql = ''' INSERT INTO experiment_graph_draw(job_name, x, y) VALUES(?,?,?) ''' - # print(row_content) - cur = self.conn.cursor() - cur.executemany(sql, values) - # print(cur) - self.conn.commit() - return cur.lastrowid - except Exception as exp: - print((traceback.format_exc())) - Log.warning( - "Error on Insert many graph drawing : {}".format(str(exp))) - return None - - def _clear_graph_database(self): - """ - Clear all content from graph drawing database - """ - try: - if self.conn: - # conn = create_connection(DB_FILE_AS_TIMES) - # modified_date = datetime.today().strftime('%Y-%m-%d-%H:%M:%S') - sql = ''' DELETE FROM experiment_graph_draw ''' - cur = self.conn.cursor() - cur.execute(sql, ) - self.conn.commit() - return True - return False - except Exception as exp: - print((traceback.format_exc())) - print(("Error on Database clear: {}".format(str(exp)))) - return False - class JobDataStructure(MainDataBase): def __init__(self, expid: str, basic_config: APIBasicConfig): diff --git a/autosubmit_api/workers/business/process_graph_drawings.py b/autosubmit_api/workers/business/process_graph_drawings.py index fc3517e..4a2e6ff 100644 --- a/autosubmit_api/workers/business/process_graph_drawings.py +++ b/autosubmit_api/workers/business/process_graph_drawings.py @@ -3,7 +3,7 @@ import traceback from autosubmit_api.database import tables from autosubmit_api.database.common import create_as_times_db_engine from autosubmit_api.common import utils as common_utils -from autosubmit_api.database.db_jobdata import ExperimentGraphDrawing +from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing from autosubmit_api.builders.configuration_facade_builder import ( ConfigurationFacadeDirector, AutosubmitConfigurationFacadeBuilder, diff --git a/tests/experiments/.autosubmitrc b/tests/experiments/.autosubmitrc index da7bbe7..4b894ee 100644 --- a/tests/experiments/.autosubmitrc +++ b/tests/experiments/.autosubmitrc @@ -1,6 +1,7 @@ [database] path = ./tests/experiments/ filename = autosubmit.db +backend = sqlite [local] path = ./tests/experiments/ diff --git a/tests/test_graph.py b/tests/test_graph.py index 0dc5bed..ba14361 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -10,7 +10,7 @@ from autosubmit_api.builders.joblist_loader_builder import ( JobListLoaderDirector, ) from autosubmit_api.database import tables -from autosubmit_api.database.db_jobdata import ExperimentGraphDrawing +from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.persistance.experiment import ExperimentPaths -- GitLab From 3756fb771fc51a9d5f8afe2824fc0bbabb06085f Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 7 May 2024 13:06:53 +0200 Subject: [PATCH 04/26] fix engine --- autosubmit_api/database/common.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index b5d3efd..ab0659e 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -18,7 +18,10 @@ from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig APIBasicConfig.read() -_postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) +try: + _postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) +except Exception: + pass def get_postgres_engine(): db = _postgres_engine -- GitLab From 9c4775cc6ab2515b87f3c2542f9b42dbd9573404 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Wed, 15 May 2024 14:38:29 +0200 Subject: [PATCH 05/26] use autosubmit db_structure --- .../autosubmit_legacy/job/job_list.py | 2 +- .../components/jobs/joblist_loader.py | 4 +- autosubmit_api/database/db_structure.py | 104 ------------------ autosubmit_api/database/tables.py | 75 ++----------- setup.py | 2 +- 5 files changed, 14 insertions(+), 173 deletions(-) delete mode 100644 autosubmit_api/database/db_structure.py diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py index cc6e085..8cec17e 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_list.py +++ b/autosubmit_api/autosubmit_legacy/job/job_list.py @@ -37,7 +37,7 @@ from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.common.utils import Status from bscearth.utils.date import date2str, parse_date # from autosubmit_legacy.job.tree import Tree -from autosubmit_api.database import db_structure as DbStructure +from autosubmit.database import db_structure as DbStructure from autosubmit_api.database.db_jobdata import JobDataStructure, JobRow from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder from autosubmit_api.history.data_classes.job_data import JobData diff --git a/autosubmit_api/components/jobs/joblist_loader.py b/autosubmit_api/components/jobs/joblist_loader.py index 1edc563..e9923d6 100644 --- a/autosubmit_api/components/jobs/joblist_loader.py +++ b/autosubmit_api/components/jobs/joblist_loader.py @@ -4,7 +4,7 @@ import os from fnmatch import fnmatch from autosubmit_api.components.jobs.joblist_helper import JobListHelper from autosubmit_api.components.jobs.job_factory import StandardJob, Job -from autosubmit_api.database.db_structure import get_structure +from autosubmit.database import db_structure from autosubmit_api.common.utils import Status from bscearth.utils.date import date2str from typing import Dict, List, Set @@ -144,7 +144,7 @@ class JobListLoader(object): self._job_dictionary[job.name] = job def load_existing_structure_adjacency(self): - self._structure_adjacency = get_structure(self.expid, self.configuration_facade.structures_path) + self._structure_adjacency = db_structure.get_structure(self.expid, self.configuration_facade.structures_path) def distribute_adjacency_into_jobs(self): parents_adjacency = {} diff --git a/autosubmit_api/database/db_structure.py b/autosubmit_api/database/db_structure.py deleted file mode 100644 index 06ad129..0000000 --- a/autosubmit_api/database/db_structure.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import textwrap -import traceback -import sqlite3 - -from autosubmit_api.persistance.experiment import ExperimentPaths - -def get_structure(expid, structures_path): - """ - Creates file of database and table of experiment structure if it does not exist. - Returns current structure as a Dictionary Job Name -> Children's Names - - :return: Map from job to children - :rtype: Dictionary Key: String, Value: List(of String) - """ - try: - exp_paths = ExperimentPaths(expid) - db_structure_path = exp_paths.structure_db - #pkl_path = os.path.join(exp_path, expid, "pkl") - if os.path.exists(db_structure_path): - # Create file - os.umask(0) - if not os.path.exists(db_structure_path): - os.open(db_structure_path, os.O_WRONLY | os.O_CREAT, 0o777) - # open(db_structure_path, "w") - # print(db_structure_path) - create_table_query = textwrap.dedent( - '''CREATE TABLE - IF NOT EXISTS experiment_structure ( - e_from text NOT NULL, - e_to text NOT NULL, - UNIQUE(e_from,e_to) - );''') - with create_connection(db_structure_path) as conn: - create_table(conn, create_table_query) - current_table = _get_exp_structure(db_structure_path) - # print("Current table: ") - # print(current_table) - current_table_structure = dict() - for item in current_table: - _from, _to = item - current_table_structure.setdefault(_from, []).append(_to) - current_table_structure.setdefault(_to, []) - # if _from not in current_table_structure.keys(): - # current_table_structure[_from] = list() - # if _to not in current_table_structure.keys(): - # current_table_structure[_to] = list() - # current_table_structure[_from].append(_to) - if (len(list(current_table_structure.keys())) > 0): - # print("Return structure") - return current_table_structure - else: - return dict() - else: - # pkl folder not found - raise Exception("structures db not found " + - str(db_structure_path)) - except Exception as exp: - print((traceback.format_exc())) - - -def create_connection(db_file): - """ - Create a database connection to the SQLite database specified by db_file. - :param db_file: database file name - :return: Connection object or None - """ - try: - conn = sqlite3.connect(db_file) - return conn - except: - return None - - -def create_table(conn: sqlite3.Connection, create_table_sql): - """ create a table from the create_table_sql statement - :param conn: Connection object - :param create_table_sql: a CREATE TABLE statement - :return: - """ - try: - c = conn.cursor() - c.execute(create_table_sql) - except Exception as e: - print(e) - - -def _get_exp_structure(path): - """ - Get all registers from experiment_status.\n - :return: row content: exp_id, name, status, seconds_diff - :rtype: 4-tuple (int, str, str, int) - """ - try: - with create_connection(path) as conn: - conn.text_factory = str - cur = conn.cursor() - cur.execute( - "SELECT e_from, e_to FROM experiment_structure") - rows = cur.fetchall() - return rows - except Exception as exp: - print((traceback.format_exc())) - return dict() diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py index a1fd39d..95f428f 100644 --- a/autosubmit_api/database/tables.py +++ b/autosubmit_api/database/tables.py @@ -1,28 +1,14 @@ -from sqlalchemy import MetaData, Integer, String, Text, Table -from sqlalchemy.orm import DeclarativeBase, mapped_column, Mapped - - -metadata_obj = MetaData() - +from sqlalchemy import Integer, Text, Table +from sqlalchemy.orm import mapped_column, Mapped +from autosubmit.database.tables import ( + BaseTable, + ExperimentTable, + ExperimentStatusTable, + JobPackageTable, + WrapperJobPackageTable, +) ## SQLAlchemy ORM tables -class BaseTable(DeclarativeBase): - metadata = metadata_obj - - -class ExperimentTable(BaseTable): - """ - Is the main table, populated by Autosubmit. Should be read-only by the API. - """ - - __tablename__ = "experiment" - - id: Mapped[int] = mapped_column(Integer, nullable=False, primary_key=True) - name: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String, nullable=False) - autosubmit_version: Mapped[str] = mapped_column(String) - - class DetailsTable(BaseTable): """ Stores extra information. It is populated by the API. @@ -38,20 +24,6 @@ class DetailsTable(BaseTable): hpc: Mapped[str] = mapped_column(Text, nullable=False) -class ExperimentStatusTable(BaseTable): - """ - Stores the status of the experiments - """ - - __tablename__ = "experiment_status" - - exp_id: Mapped[int] = mapped_column(Integer, primary_key=True) - name: Mapped[str] = mapped_column(Text, nullable=False) - status: Mapped[str] = mapped_column(Text, nullable=False) - seconds_diff: Mapped[int] = mapped_column(Integer, nullable=False) - modified: Mapped[str] = mapped_column(Text, nullable=False) - - class GraphDataTable(BaseTable): """ Stores the coordinates and it is used exclusively to speed up the process @@ -66,33 +38,6 @@ class GraphDataTable(BaseTable): y: Mapped[int] = mapped_column(Integer, nullable=False) -class JobPackageTable(BaseTable): - """ - Stores a mapping between the wrapper name and the actual job in slurm - """ - - __tablename__ = "job_package" - - exp_id: Mapped[str] = mapped_column(Text) - package_name: Mapped[str] = mapped_column(Text, primary_key=True) - job_name: Mapped[str] = mapped_column(Text, primary_key=True) - - -class WrapperJobPackageTable(BaseTable): - """ - It is a replication. It is only created/used when using inspectand create or monitor - with flag -cw in Autosubmit.\n - This replication is used to not interfere with the current autosubmit run of that experiment - since wrapper_job_package will contain a preview, not the real wrapper packages - """ - - __tablename__ = "wrapper_job_package" - - exp_id: Mapped[str] = mapped_column(Text) - package_name: Mapped[str] = mapped_column(Text, primary_key=True) - job_name: Mapped[str] = mapped_column(Text, primary_key=True) - - ## SQLAlchemy Core tables # MAIN_DB TABLES @@ -107,4 +52,4 @@ graph_data_table: Table = GraphDataTable.__table__ # Job package TABLES job_package_table: Table = JobPackageTable.__table__ -wrapper_job_package_table: Table = WrapperJobPackageTable.__table__ \ No newline at end of file +wrapper_job_package_table: Table = WrapperJobPackageTable.__table__ diff --git a/setup.py b/setup.py index 4bd18f4..c1a1722 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ install_requires = [ "scipy~=1.11.4", "python-dotenv~=1.0.1", "autosubmitconfigparser>=1.0.65", - "autosubmit>=3.13", + "autosubmit>=4.2.0", "Flask-APScheduler~=1.13.1", "gunicorn~=22.0.0", "pydantic~=2.5.2", -- GitLab From 3b151b4f52ee4a84f9e85231c4eab7f1098b3eb5 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Wed, 15 May 2024 15:17:45 +0200 Subject: [PATCH 06/26] use autosubmit db_manager in graph draw --- .../database/adapters/graph_draw.py | 59 ++-- autosubmit_api/database/db_manager.py | 272 ------------------ 2 files changed, 16 insertions(+), 315 deletions(-) delete mode 100644 autosubmit_api/database/db_manager.py diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py index 8aed139..2b89efe 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -1,56 +1,29 @@ -from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit.database.db_manager import create_db_table_manager from autosubmit_api.database import tables -from autosubmit_api.database.common import get_postgres_engine, table_change_schema from autosubmit_api.persistance.experiment import ExperimentPaths - - -from sqlalchemy import NullPool, create_engine -from sqlalchemy.schema import CreateSchema - - from typing import Any, Dict, List class ExpGraphDrawDBAdapter: - def __init__(self, expid: str) -> None: self.expid = expid - - if APIBasicConfig.DATABASE_BACKEND == "postgres": - self.table = table_change_schema(expid, tables.GraphDataTable) - self.engine = get_postgres_engine() - with self.engine.connect() as conn: - conn.execute(CreateSchema(self.expid, if_not_exists=True)) - self.table.create(conn, checkfirst=True) - conn.commit() - - else: - self.table = tables.GraphDataTable.__table__ - sqlite_graph_db_path = ExperimentPaths(expid).graph_data_db - self.engine = create_engine( - f"sqlite:///{ sqlite_graph_db_path}", poolclass=NullPool - ) - with self.engine.connect() as conn: - self.table.create(conn, checkfirst=True) - conn.commit() + self.graph_db_manager = create_db_table_manager( + table=tables.GraphDataTable, + db_filepath=ExperimentPaths(expid).graph_data_db, + schema=expid, + ) def get_all(self) -> List[Dict[str, Any]]: - with self.engine.connect() as conn: - result = conn.execute(self.table.select()).all() - + with self.graph_db_manager.get_connection() as conn: + result = self.graph_db_manager.select_all(conn) return [x._mapping for x in result] def delete_all(self) -> int: - with self.engine.connect() as conn: - result = conn.execute(self.table.delete()) - conn.commit() - return result.rowcount - - def insert_many(self, values) -> int: - with self.engine.connect() as conn: - result = conn.execute(self.table.insert(), values) - conn.commit() - return result.rowcount - - - + with self.graph_db_manager.get_connection() as conn: + rowcount = self.graph_db_manager.delete_all(conn) + return rowcount + + def insert_many(self, values: List[Dict[str, Any]]) -> int: + with self.graph_db_manager.get_connection() as conn: + rowcount = self.graph_db_manager.insert_many(conn, values) + return rowcount diff --git a/autosubmit_api/database/db_manager.py b/autosubmit_api/database/db_manager.py deleted file mode 100644 index ad46394..0000000 --- a/autosubmit_api/database/db_manager.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 Earth Sciences Department, BSC-CNS - -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . - -import sqlite3 -import os -from typing import List - -class DbManager(object): - """ - Class to manage an SQLite database. - """ - - def __init__(self, root_path: str, db_name: str, db_version: int = 1): - self.root_path = root_path - self.db_name = db_name - self.db_version = db_version - # is_new = not - if os.path.exists(self._get_db_filepath()): - self.connection = sqlite3.connect(self._get_db_filepath()) - elif os.path.exists(self._get_db_filepath() + ".db"): - self.connection = sqlite3.connect(self._get_db_filepath() + ".db") - else: - self.connection = None - # if is_new: - # self._initialize_database() - - def disconnect(self): - """ - Closes the manager connection - """ - if self.connection: - self.connection.close() - - def create_table(self, table_name: str, fields: List[str]): - """ - Creates a new table with the given fields - :param table_name: str - :param fields: List[str] - """ - if self.connection: - cursor = self.connection.cursor() - create_command = self.generate_create_table_command( - table_name, fields) - # print(create_command) - cursor.execute(create_command) - self.connection.commit() - - def create_view(self, view_name: str, statement: str): - """ - Creates a new view with the given statement - - Parameters - ---------- - view_name : str - Name of the view to create - statement : str - SQL statement - """ - if self.connection: - cursor = self.connection.cursor() - create_command = self.generate_create_view_command(view_name, statement) - # print(create_command) - cursor.execute(create_command) - self.connection.commit() - - def drop_table(self, table_name): - """ - Drops the given table - :param table_name: str - - """ - if self.connection: - cursor = self.connection.cursor() - drop_command = self.generate_drop_table_command(table_name) - cursor.execute(drop_command) - self.connection.commit() - - def insert(self, table_name, columns, values): - """ - Inserts a new row on the given table - :param table_name: str - :param columns: [str] - :param values: [str] - - """ - if self.connection: - cursor = self.connection.cursor() - insert_command = self.generate_insert_command( - table_name, columns[:], values[:]) - cursor.execute(insert_command) - self.connection.commit() - - def insertMany(self, table_name, data): - """ - Inserts multiple new rows on the given table - :param table_name: str - :param data: [()] - - """ - if self.connection: - cursor = self.connection.cursor() - insert_many_command = self.generate_insert_many_command( - table_name, len(data[0])) - cursor.executemany(insert_many_command, data) - self.connection.commit() - - def select_first(self, table_name): - """ - Returns the first row of the given table - :param table_name: str - :return row: [] - """ - if self.connection: - cursor = self._select_with_all_fields(table_name) - return cursor.fetchone() - - def select_first_where(self, table_name, where): - """ - Returns the first row of the given table that matches the given where conditions - :param table_name: str - :param where: [str] - :return row: [] - """ - if self.connection: - cursor = self._select_with_all_fields(table_name, where) - return cursor.fetchone() - - def select_all(self, table_name): - """ - Returns all the rows of the given table - :param table_name: str - :return rows: [[]] - """ - if self.connection: - cursor = self._select_with_all_fields(table_name) - return cursor.fetchall() - - def select_all_where(self, table_name, where): - """ - Returns all the rows of the given table that matches the given where conditions - :param table_name: str - :param where: [str] - :return rows: [[]] - """ - if self.connection: - cursor = self._select_with_all_fields(table_name, where) - return cursor.fetchall() - - def count(self, table_name): - """ - Returns the number of rows of the given table - :param table_name: str - :return int - """ - if self.connection: - cursor = self.connection.cursor() - count_command = self.generate_count_command(table_name) - cursor.execute(count_command) - return cursor.fetchone()[0] - - def drop(self): - """ - Drops the database (deletes the .db file) - - """ - if self.connection: - self.connection.close() - if os.path.exists(self._get_db_filepath()): - os.remove(self._get_db_filepath()) - - def _get_db_filepath(self) -> str: - """ - Returns the path of the .db file - """ - return os.path.join(self.root_path, self.db_name) - - def _initialize_database(self): - """ - Initialize the database with an options table - with the name and the version of the DB - - """ - if self.connection: - options_table_name = 'db_options' - columns = ['option_name', 'option_value'] - self.create_table(options_table_name, columns) - self.insert(options_table_name, columns, ['name', self.db_name]) - self.insert(options_table_name, columns, - ['version', self.db_version]) - - def _select_with_all_fields(self, table_name, where=[]): - """ - Returns the cursor of the select command with the given parameters - :param table_name: str - :param where: [str] - :return cursor: Cursor - """ - if self.connection: - cursor = self.connection.cursor() - count_command = self.generate_select_command(table_name, where[:]) - cursor.execute(count_command) - return cursor - - """ - Static methods that generates the SQLite commands to make the queries - """ - - @staticmethod - def generate_create_table_command(table_name: str, fields: List[str]) -> str: - create_command = f'CREATE TABLE IF NOT EXISTS {table_name} ( {", ".join(fields)} )' - return create_command - - @staticmethod - def generate_create_view_command(view_name: str, statement: str) -> str: - create_command = f'CREATE VIEW IF NOT EXISTS {view_name} as {statement}' - return create_command - - @staticmethod - def generate_drop_table_command(table_name: str): - drop_command = f'DROP TABLE IF EXISTS {table_name}' - return drop_command - - @staticmethod - def generate_insert_command(table_name, columns, values): - insert_command = 'INSERT INTO ' + table_name + '(' + columns.pop(0) - for column in columns: - insert_command += (', ' + column) - insert_command += (') VALUES ("' + str(values.pop(0)) + '"') - for value in values: - insert_command += (', "' + str(value) + '"') - insert_command += ')' - return insert_command - - @staticmethod - def generate_insert_many_command(table_name, num_of_values): - insert_command = 'INSERT INTO ' + table_name + ' VALUES (?' - num_of_values -= 1 - while num_of_values > 0: - insert_command += ',?' - num_of_values -= 1 - insert_command += ')' - return insert_command - - @staticmethod - def generate_count_command(table_name): - count_command = 'SELECT count(*) FROM ' + table_name - return count_command - - @staticmethod - def generate_select_command(table_name, where=[]): - basic_select = 'SELECT * FROM ' + table_name - select_command = basic_select if len( - where) == 0 else basic_select + ' WHERE ' + where.pop(0) - for condition in where: - select_command += ' AND ' + condition - return select_command -- GitLab From 7a7493f84bb5015a0d865ff8288ce4c787bfa843 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Thu, 16 May 2024 14:24:33 +0200 Subject: [PATCH 07/26] add status adapter --- .../bgtasks/tasks/status_updater.py | 37 ++++-------- .../database/adapters/experiment_status.py | 60 +++++++++++++++++++ autosubmit_api/database/db_common.py | 51 ++++------------ autosubmit_api/experiment/common_requests.py | 5 +- 4 files changed, 87 insertions(+), 66 deletions(-) create mode 100644 autosubmit_api/database/adapters/experiment_status.py diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index 971cd1f..5887b27 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -1,4 +1,3 @@ -from datetime import datetime import os import time from typing import Dict, List @@ -6,9 +5,9 @@ from typing import Dict, List from sqlalchemy import select from autosubmit_api.bgtasks.bgtask import BackgroundTaskTemplate from autosubmit_api.database import tables +from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter from autosubmit_api.database.common import ( create_autosubmit_db_engine, - create_as_times_db_engine, create_main_db_conn, ) from autosubmit_api.database.models import ExperimentModel @@ -87,30 +86,16 @@ class StatusUpdater(BackgroundTaskTemplate): @classmethod def _update_experiment_status(cls, experiment: ExperimentModel, is_running: bool): - with create_as_times_db_engine().connect() as conn: - try: - del_stmnt = tables.experiment_status_table.delete().where( - tables.experiment_status_table.c.exp_id == experiment.id - ) - ins_stmnt = tables.experiment_status_table.insert().values( - exp_id=experiment.id, - name=experiment.name, - status=( - RunningStatus.RUNNING - if is_running - else RunningStatus.NOT_RUNNING - ), - seconds_diff=0, - modified=datetime.now().isoformat(sep="-", timespec="seconds"), - ) - conn.execute(del_stmnt) - conn.execute(ins_stmnt) - conn.commit() - except Exception as exc: - conn.rollback() - cls.logger.error( - f"[{cls.id}] Error while doing database operations on experiment {experiment.name}: {exc}" - ) + try: + ExperimentStatusDbAdapter().upsert_status( + experiment.id, + experiment.name, + RunningStatus.RUNNING if is_running else RunningStatus.NOT_RUNNING, + ) + except Exception as exc: + cls.logger.error( + f"[{cls.id}] Error while doing database operations on experiment {experiment.name}: {exc}" + ) @classmethod def procedure(cls): diff --git a/autosubmit_api/database/adapters/experiment_status.py b/autosubmit_api/database/adapters/experiment_status.py new file mode 100644 index 0000000..afec31f --- /dev/null +++ b/autosubmit_api/database/adapters/experiment_status.py @@ -0,0 +1,60 @@ +from datetime import datetime +import os +from typing import Dict +from autosubmit.database.db_manager import create_db_table_manager +from sqlalchemy import delete, insert, select +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables + + +class ExperimentStatusDbAdapter: + def __init__(self) -> None: + APIBasicConfig.read() + self.table_manager = create_db_table_manager( + table=tables.ExperimentStatusTable, + db_filepath=os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB), + ) + + def get_all_dict(self) -> Dict[str, str]: + """ + Gets table experiment_status as dictionary {expid: status} + """ + result = dict() + with self.table_manager.get_connection() as conn: + cursor = conn.execute(select(self.table_manager.table)) + for row in cursor: + result[row.name] = row.status + return result + + def get_status(self, expid: str) -> str: + """ + Gets the current status of one experiment + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + select(self.table_manager.table).where( + self.table_manager.table.c.name == expid + ) + ).one_or_none() + return row.status if row else "NOT RUNNING" + + def upsert_status(self, exp_id: int, expid: str, status: str): + """ + Upsert (Delete/Insert) the status of one experiment + """ + with self.table_manager.get_connection() as conn: + del_stmnt = delete(tables.ExperimentStatusTable).where( + tables.ExperimentStatusTable.name == expid + ) + ins_stmnt = insert(tables.ExperimentStatusTable).values( + exp_id=exp_id, + name=expid, + status=status, + seconds_diff=0, + modified=datetime.now().isoformat(sep="-", timespec="seconds"), + ) + conn.execute(del_stmnt) + result = conn.execute(ins_stmnt) + conn.commit() + + return result.rowcount diff --git a/autosubmit_api/database/db_common.py b/autosubmit_api/database/db_common.py index 6eec21a..5875313 100644 --- a/autosubmit_api/database/db_common.py +++ b/autosubmit_api/database/db_common.py @@ -28,6 +28,7 @@ from bscearth.utils.log import Log from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder from autosubmit_api.builders.configuration_facade_builder import ConfigurationFacadeDirector, AutosubmitConfigurationFacadeBuilder +from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter from autosubmit_api.database.utils import get_headers_sqlite, map_row_result_to_dict_sqlite from autosubmit_api.experiment import common_db_requests as DbRequests from typing import Dict, Any, Tuple @@ -111,40 +112,6 @@ def close_conn(conn: Connection, cursor): return -def check_experiment_exists(name, error_on_inexistence=True): - """ - Checks if exist an experiment with the given name. - - :param error_on_inexistence: if True, adds an error log if experiment does not exists - :type error_on_inexistence: bool - :param name: Experiment name - :type name: str - :return: If experiment exists returns true, if not returns false - :rtype: bool - """ - if not check_db(): - return False - try: - (conn, cursor) = open_conn() - except DbException as e: - Log.error( - 'Connection to database could not be established: {0}', e.message) - return False - conn.isolation_level = None - - # SQLite always return a unicode object, but we can change this - # behaviour with the next sentence - conn.text_factory = str - cursor.execute( - 'select name from experiment where name=:name', {'name': name}) - row = cursor.fetchone() - close_conn(conn, cursor) - if row is None: - if error_on_inexistence: - Log.error('The experiment name "{0}" does not exist yet!!!', name) - return False - return True - def get_autosubmit_version(expid, log=None): """ @@ -230,8 +197,12 @@ def search_experiment_by_id(query, exp_type=None, only_active=None, owner=None): experiment_status = dict() experiment_times = dict() if len(table) > 0: - experiment_status = DbRequests.get_experiment_status() - # REMOVED: experiment_times = DbRequests.get_experiment_times() + # Get experiment status table + try: + experiment_status = ExperimentStatusDbAdapter().get_all_dict() + except Exception: + experiment_status = {} + for row in table: expid = str(row[1]) @@ -312,8 +283,12 @@ def get_current_running_exp(): result = list() experiment_status = dict() experiment_times = dict() - experiment_status = DbRequests.get_experiment_status() - # REMOVED: experiment_times = DbRequests.get_experiment_times() + # Get experiment status table + try: + experiment_status = ExperimentStatusDbAdapter().get_all_dict() + except Exception: + experiment_status = {} + for row in table: expid = str(row[1]) status = "NOT RUNNING" diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index 82be53f..c1a87ee 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -33,6 +33,7 @@ from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer from autosubmit_api.components.jobs.job_factory import SimpleJob from autosubmit_api.config.confConfigStrategy import confConfigStrategy from autosubmit_api.database import db_common as db_common +from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter from autosubmit_api.experiment import common_db_requests as DbRequests from autosubmit_api.database import db_jobdata as JobData from autosubmit_api.common import utils as common_utils @@ -151,7 +152,7 @@ def get_experiment_data(expid): try: autosubmit_config_facade = ConfigurationFacadeDirector(AutosubmitConfigurationFacadeBuilder(expid)).build_autosubmit_configuration_facade() try: - _, experiment_status = DbRequests.get_specific_experiment_status(expid) + experiment_status = ExperimentStatusDbAdapter().get_status(expid) result["running"] = (experiment_status == "RUNNING") except Exception as exc: logger.warning((traceback.format_exc())) @@ -463,7 +464,7 @@ def quick_test_run(expid): error_message = "" try: - name, status = DbRequests.get_specific_experiment_status(expid) + status = ExperimentStatusDbAdapter().get_status(expid) if status != "RUNNING": running = False except Exception as exp: -- GitLab From 959ff286aa5fde5f4fe7bd03913239fdc80f578a Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Thu, 16 May 2024 14:56:38 +0200 Subject: [PATCH 08/26] fix bg tasks --- .../bgtasks/tasks/status_updater.py | 18 +++-------------- .../workers/populate_details/populate.py | 20 ------------------- 2 files changed, 3 insertions(+), 35 deletions(-) diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index 5887b27..e9d289b 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -1,6 +1,6 @@ import os import time -from typing import Dict, List +from typing import List from sqlalchemy import select from autosubmit_api.bgtasks.bgtask import BackgroundTaskTemplate @@ -49,15 +49,6 @@ class StatusUpdater(BackgroundTaskTemplate): query_result = conn.execute(tables.experiment_table.select()).all() return [ExperimentModel.model_validate(row._mapping) for row in query_result] - @classmethod - def _get_current_status(cls) -> Dict[str, str]: - """ - Get the current status of the experiments - """ - with create_as_times_db_engine().connect() as conn: - query_result = conn.execute(tables.experiment_status_table.select()).all() - return {row.name: row.status for row in query_result} - @classmethod def _check_exp_running(cls, expid: str) -> bool: """ @@ -108,7 +99,7 @@ class StatusUpdater(BackgroundTaskTemplate): exp_list = cls._get_experiments() # Read current status of all experiments - current_status = cls._get_current_status() + current_status = ExperimentStatusDbAdapter().get_all_dict() # Check every experiment status & update for experiment in exp_list: @@ -116,10 +107,7 @@ class StatusUpdater(BackgroundTaskTemplate): new_status = ( RunningStatus.RUNNING if is_running else RunningStatus.NOT_RUNNING ) - if ( - current_status.get(experiment.name, RunningStatus.NOT_RUNNING) - != new_status - ): + if current_status.get(experiment.name) != new_status: cls.logger.info( f"[{cls.id}] Updating status of {experiment.name} to {new_status}" ) diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index 175b21b..fba8cba 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -27,7 +27,6 @@ class DetailsProcessor: def process(self): new_details = self._get_all_details() - self.create_details_table_if_not_exists() self._clean_table() return self._insert_many_into_details_table(new_details) @@ -88,25 +87,6 @@ class DetailsProcessor: conn.commit() return result.rowcount - def create_details_table_if_not_exists(self): - create_table_query = textwrap.dedent( - """ - CREATE TABLE - IF NOT EXISTS details ( - exp_id integer PRIMARY KEY, - user text NOT NULL, - created text NOT NULL, - model text NOT NULL, - branch text NOT NULL, - hpc text NOT NULL, - FOREIGN KEY (exp_id) REFERENCES experiment (id) - ); - """ - ) - with self.main_db_engine.connect() as conn: - conn.execute(text(create_table_query)) - conn.commit() - def _clean_table(self): # type: () -> None with self.main_db_engine.connect() as conn: -- GitLab From c8274c4a1609604b057127bc40e21da8996acfd7 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Thu, 16 May 2024 15:00:42 +0200 Subject: [PATCH 09/26] remove legacy code --- .../database/adapters/graph_draw.py | 14 +++--- autosubmit_api/database/db_jobdata.py | 35 -------------- autosubmit_api/database/queries.py | 8 ++-- autosubmit_api/database/tables.py | 6 +++ .../experiment/common_db_requests.py | 47 ------------------- 5 files changed, 17 insertions(+), 93 deletions(-) diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py index 2b89efe..70d649e 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -7,23 +7,23 @@ from typing import Any, Dict, List class ExpGraphDrawDBAdapter: def __init__(self, expid: str) -> None: self.expid = expid - self.graph_db_manager = create_db_table_manager( + self.table_manager = create_db_table_manager( table=tables.GraphDataTable, db_filepath=ExperimentPaths(expid).graph_data_db, schema=expid, ) def get_all(self) -> List[Dict[str, Any]]: - with self.graph_db_manager.get_connection() as conn: - result = self.graph_db_manager.select_all(conn) + with self.table_manager.get_connection() as conn: + result = self.table_manager.select_all(conn) return [x._mapping for x in result] def delete_all(self) -> int: - with self.graph_db_manager.get_connection() as conn: - rowcount = self.graph_db_manager.delete_all(conn) + with self.table_manager.get_connection() as conn: + rowcount = self.table_manager.delete_all(conn) return rowcount def insert_many(self, values: List[Dict[str, Any]]) -> int: - with self.graph_db_manager.get_connection() as conn: - rowcount = self.graph_db_manager.insert_many(conn, values) + with self.table_manager.get_connection() as conn: + rowcount = self.table_manager.insert_many(conn, values) return rowcount diff --git a/autosubmit_api/database/db_jobdata.py b/autosubmit_api/database/db_jobdata.py index e2198bd..e236070 100644 --- a/autosubmit_api/database/db_jobdata.py +++ b/autosubmit_api/database/db_jobdata.py @@ -424,41 +424,6 @@ class JobData(object): self._energy = energy if energy else 0 -class JobStepExtraData(): - def __init__(self, key, dict_data): - self.key = key - if isinstance(dict_data, dict): - # dict_data["ncpus"] if dict_data and "ncpus" in dict_data.keys( - self.ncpus = dict_data.get("ncpus", 0) if dict_data else 0 - # ) else 0 - self.nnodes = dict_data.get( - "nnodes", 0) if dict_data else 0 # and "nnodes" in dict_data.keys( - # ) else 0 - self.submit = int(mktime(datetime.strptime(dict_data["submit"], "%Y-%m-%dT%H:%M:%S").timetuple())) if dict_data and "submit" in list(dict_data.keys( - )) else 0 - self.start = int(mktime(datetime.strptime(dict_data["start"], "%Y-%m-%dT%H:%M:%S").timetuple())) if dict_data and "start" in list(dict_data.keys( - )) else 0 - self.finish = int(mktime(datetime.strptime(dict_data["finish"], "%Y-%m-%dT%H:%M:%S").timetuple())) if dict_data and "finish" in list(dict_data.keys( - )) and dict_data["finish"] != "Unknown" else 0 - self.energy = parse_output_number(dict_data["energy"]) if dict_data and "energy" in list(dict_data.keys( - )) else 0 - # if dict_data and "MaxRSS" in dict_data.keys( - self.maxRSS = dict_data.get("MaxRSS", 0) - # ) else 0 - # if dict_data and "AveRSS" in dict_data.keys( - self.aveRSS = dict_data.get("AveRSS", 0) - # ) else 0 - else: - self.ncpus = 0 - self.nnodes = 0 - self.submit = 0 - self.start = 0 - self.finish = 0 - self.energy = 0 - self.maxRSS = 0 - self.aveRSS = 0 - - class MainDataBase(): def __init__(self, expid): self.expid = expid diff --git a/autosubmit_api/database/queries.py b/autosubmit_api/database/queries.py index 88c8acd..55cca9e 100644 --- a/autosubmit_api/database/queries.py +++ b/autosubmit_api/database/queries.py @@ -55,12 +55,12 @@ def generate_query_listexp_extended( filter_stmts.append(tables.details_table.c.user == owner) if exp_type == "test": - filter_stmts.append(tables.experiment_table.c.name.like(f"t%")) + filter_stmts.append(tables.experiment_table.c.name.like("t%")) elif exp_type == "operational": - filter_stmts.append(tables.experiment_table.c.name.like(f"o%")) + filter_stmts.append(tables.experiment_table.c.name.like("o%")) elif exp_type == "experiment": - filter_stmts.append(tables.experiment_table.c.name.not_like(f"t%")) - filter_stmts.append(tables.experiment_table.c.name.not_like(f"o%")) + filter_stmts.append(tables.experiment_table.c.name.not_like("t%")) + filter_stmts.append(tables.experiment_table.c.name.not_like("o%")) if autosubmit_version: filter_stmts.append( diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py index 95f428f..38dd5f4 100644 --- a/autosubmit_api/database/tables.py +++ b/autosubmit_api/database/tables.py @@ -3,6 +3,8 @@ from sqlalchemy.orm import mapped_column, Mapped from autosubmit.database.tables import ( BaseTable, ExperimentTable, + experiment_run_table, + JobDataTable, ExperimentStatusTable, JobPackageTable, WrapperJobPackageTable, @@ -53,3 +55,7 @@ graph_data_table: Table = GraphDataTable.__table__ # Job package TABLES job_package_table: Table = JobPackageTable.__table__ wrapper_job_package_table: Table = WrapperJobPackageTable.__table__ + +# Job Data TABLES +job_data_table: Table = JobDataTable.__table__ +experiment_run_table: Table = experiment_run_table diff --git a/autosubmit_api/experiment/common_db_requests.py b/autosubmit_api/experiment/common_db_requests.py index e8aa22f..3b256c2 100644 --- a/autosubmit_api/experiment/common_db_requests.py +++ b/autosubmit_api/experiment/common_db_requests.py @@ -4,8 +4,6 @@ import sqlite3 from datetime import datetime from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit_api.database import tables -from autosubmit_api.database.common import create_as_times_db_engine APIBasicConfig.read() DB_FILES_STATUS = os.path.join( @@ -31,7 +29,6 @@ def create_connection(db_file): def insert_archive_status(status, alatency, abandwidth, clatency, cbandwidth, rtime): - try: with create_connection(DB_FILES_STATUS) as conn: sql = """ INSERT INTO archive_status(status, avg_latency, avg_bandwidth, current_latency, current_bandwidth, response_time, modified ) VALUES(?,?,?,?,?,?,?)""" @@ -73,47 +70,3 @@ def get_last_read_archive_status(): print((traceback.format_exc())) print(("Error on Get Last : " + str(exp))) return (False, None, None, None, None, None, None) - - -# SELECTS - - -def get_experiment_status(): - """ - Gets table experiment_status as dictionary - conn is expected to reference as_times.db - """ - experiment_status = dict() - try: - with create_as_times_db_engine().connect() as conn: - cursor = conn.execute(tables.experiment_status_table.select()) - for row in cursor: - experiment_status[row.name] = row.status - except Exception as exc: - logger.error(f"Exception while reading experiment_status: {exc}") - logger.error(traceback.format_exc()) - return experiment_status - - -def get_specific_experiment_status(expid): - """ - Gets the current status from database.\n - :param expid: Experiment name - :type expid: str - :return: name of experiment and status - :rtype: 2-tuple (name, status) - """ - try: - with create_as_times_db_engine().connect() as conn: - row = conn.execute( - tables.experiment_status_table.select().where( - tables.experiment_status_table.c.name == expid - ) - ).one_or_none() - if row: - return (row.name, row.status) - except Exception as exc: - logger.error(f"Exception while reading experiment_status for {expid}: {exc}") - logger.error(traceback.format_exc()) - - return (expid, "NOT RUNNING") -- GitLab From 0db8ce850519225945f982ec17dbaca2387bae04 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Thu, 16 May 2024 17:08:57 +0200 Subject: [PATCH 10/26] add db adapter layer for main DDBBs --- .../bgtasks/tasks/status_updater.py | 34 ++-- autosubmit_api/database/adapters/__init__.py | 26 +++ .../database/adapters/experiment.py | 27 ++++ .../database/adapters/experiment_details.py | 23 +++ .../database/adapters/experiment_run.py | 29 ++++ .../database/adapters/join/__init__.py | 0 .../database/adapters/join/experiment_join.py | 24 +++ autosubmit_api/database/common.py | 54 +------ autosubmit_api/database/db_common.py | 152 ------------------ autosubmit_api/views/v3.py | 22 +-- .../workers/populate_details/populate.py | 33 ++-- tests/experiments/autosubmit.db | Bin 20480 -> 20480 bytes .../metadata/graph/graph_data_a003.db | Bin 8192 -> 8192 bytes 13 files changed, 170 insertions(+), 254 deletions(-) create mode 100644 autosubmit_api/database/adapters/experiment.py create mode 100644 autosubmit_api/database/adapters/experiment_details.py create mode 100644 autosubmit_api/database/adapters/experiment_run.py create mode 100644 autosubmit_api/database/adapters/join/__init__.py create mode 100644 autosubmit_api/database/adapters/join/experiment_join.py diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index e9d289b..0177dd1 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -2,13 +2,11 @@ import os import time from typing import List -from sqlalchemy import select from autosubmit_api.bgtasks.bgtask import BackgroundTaskTemplate -from autosubmit_api.database import tables -from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter -from autosubmit_api.database.common import ( - create_autosubmit_db_engine, - create_main_db_conn, +from autosubmit_api.database.adapters import ( + ExperimentStatusDbAdapter, + ExperimentDbAdapter, + ExperimentJoinDbAdapter, ) from autosubmit_api.database.models import ExperimentModel from autosubmit_api.experiment.common_requests import _is_exp_running @@ -25,28 +23,20 @@ class StatusUpdater(BackgroundTaskTemplate): """ Clears the experiments that are not in the experiments table """ - with create_main_db_conn() as conn: - try: - del_stmnt = tables.experiment_status_table.delete().where( - tables.experiment_status_table.c.exp_id.not_in( - select(tables.experiment_table.c.id) - ) - ) - conn.execute(del_stmnt) - conn.commit() - except Exception as exc: - conn.rollback() - cls.logger.error( - f"[{cls.id}] Error while clearing missing experiments status: {exc}" - ) + + try: + ExperimentJoinDbAdapter().drop_status_from_deleted_experiments() + except Exception as exc: + cls.logger.error( + f"[{cls.id}] Error while clearing missing experiments status: {exc}" + ) @classmethod def _get_experiments(cls) -> List[ExperimentModel]: """ Get the experiments list """ - with create_autosubmit_db_engine().connect() as conn: - query_result = conn.execute(tables.experiment_table.select()).all() + query_result = ExperimentDbAdapter().get_all() return [ExperimentModel.model_validate(row._mapping) for row in query_result] @classmethod diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py index e69de29..55dadbb 100644 --- a/autosubmit_api/database/adapters/__init__.py +++ b/autosubmit_api/database/adapters/__init__.py @@ -0,0 +1,26 @@ +""" +This module contains the adapters for the database tables. + +The adapters are used to interact with the database tables delegating the SQL statements generation and execution order. + +Other modules can use the adapters to interact with the database tables without the need to know the SQL syntax. +""" + +from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter +from autosubmit_api.database.adapters.experiment_details import ( + ExperimentDetailsDbAdapter, +) +from autosubmit_api.database.adapters.experiment_status import ( + ExperimentStatusDbAdapter, +) +from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter +from autosubmit_api.database.adapters.join.experiment_join import ExperimentJoinDbAdapter + + +__all__ = [ + "ExperimentDbAdapter", + "ExperimentDetailsDbAdapter", + "ExperimentStatusDbAdapter", + "ExpGraphDrawDBAdapter", + "ExperimentJoinDbAdapter" +] diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py new file mode 100644 index 0000000..718fb73 --- /dev/null +++ b/autosubmit_api/database/adapters/experiment.py @@ -0,0 +1,27 @@ +from typing import Any, Dict, Optional +from autosubmit.database.db_manager import create_db_table_manager + +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables + + +class ExperimentDbAdapter: + def __init__(self): + self.table_manager = create_db_table_manager( + table=tables.ExperimentTable, + db_filepath=APIBasicConfig.DB_PATH, + ) + + def get_all(self): + with self.table_manager.get_connection() as conn: + rows = self.table_manager.select_all(conn) + return rows + + def get_by_expid(self, expid) -> Optional[Dict[str, Any]]: + with self.table_manager.get_connection() as conn: + row = conn.execute( + self.table_manager.table.select().where( + tables.ExperimentTable.name == expid + ) + ).one_or_none() + return row._mapping if row else None diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/adapters/experiment_details.py new file mode 100644 index 0000000..f590ae3 --- /dev/null +++ b/autosubmit_api/database/adapters/experiment_details.py @@ -0,0 +1,23 @@ +from typing import Any, Dict, List +from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables + + +class ExperimentDetailsDbAdapter: + def __init__(self) -> None: + APIBasicConfig.read() + self.table_manager = create_db_table_manager( + table=tables.DetailsTable, + db_filepath=APIBasicConfig.DB_PATH, + ) + + def delete_all(self) -> int: + with self.table_manager.get_connection() as conn: + rowcount = self.table_manager.delete_all(conn) + return rowcount + + def insert_many(self, values: List[Dict[str, Any]]) -> int: + with self.table_manager.get_connection() as conn: + rowcount = self.table_manager.insert_many(conn, values) + return rowcount diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py new file mode 100644 index 0000000..541fc7d --- /dev/null +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -0,0 +1,29 @@ +from typing import Dict, Optional +from autosubmit.database.db_manager import create_db_table_manager +from sqlalchemy import select + +from autosubmit_api.database import tables +from autosubmit_api.persistance.experiment import ExperimentPaths + + +class ExperimentRunDbAdapter: + def __init__(self, expid: str) -> None: + self.expid = expid + self.table_manager = create_db_table_manager( + table=tables.experiment_run_table, + db_filepath=ExperimentPaths(expid).graph_data_db, + schema=expid, + ) + + def get_last_run(self) -> Optional[Dict[str,str]]: + """ + Gets last run of the experiment + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + select(self.table_manager.table) + .order_by(tables.ExperimentRunTable.run_id.desc()) + .limit(1) + ).one_or_none() + + return row._mapping if row else None diff --git a/autosubmit_api/database/adapters/join/__init__.py b/autosubmit_api/database/adapters/join/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/adapters/join/experiment_join.py new file mode 100644 index 0000000..e66bbe1 --- /dev/null +++ b/autosubmit_api/database/adapters/join/experiment_join.py @@ -0,0 +1,24 @@ +from sqlalchemy import select +from autosubmit_api.database import tables +from autosubmit_api.database.common import create_main_db_conn + + +class ExperimentJoinDbAdapter: + """ + Adapter for experiments using Experiment, ExperimentStatus and ExperimentDetails tables. + """ + + def _get_connection(self): + return create_main_db_conn() + + def drop_status_from_deleted_experiments(self) -> int: + with self._get_connection() as conn: + del_stmnt = tables.experiment_status_table.delete().where( + tables.experiment_status_table.c.exp_id.not_in( + select(tables.experiment_table.c.id) + ) + ) + result = conn.execute(del_stmnt) + conn.commit() + + return result.rowcount diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index ab0659e..1592a25 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -1,44 +1,23 @@ import os -from typing import Any, Union +from typing import Any from sqlalchemy import ( Connection, Engine, - MetaData, NullPool, Select, create_engine, select, text, func, - Table, ) -from sqlalchemy.orm import DeclarativeBase from autosubmit_api.builders import BaseBuilder from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit.database import session -APIBasicConfig.read() -try: - _postgres_engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) -except Exception: - pass def get_postgres_engine(): - db = _postgres_engine - if not isinstance(db, Engine): - APIBasicConfig.read() - db = create_engine(APIBasicConfig.DATABASE_CONN_URL) - return db - - -def copy_rename_table(source_table: DeclarativeBase, new_name: str): - dest_table = Table(new_name) - - core_source_table: Table = source_table.__table__ - for col in core_source_table.columns: - dest_table.append_column(col) - - return dest_table + return session.Session().bind class AttachedDatabaseConnBuilder(BaseBuilder): @@ -91,7 +70,9 @@ def create_autosubmit_db_engine() -> Engine: APIBasicConfig.read() if APIBasicConfig.DATABASE_BACKEND == "postgres": return get_postgres_engine() - return create_engine(f"sqlite:///{ os.path.abspath(APIBasicConfig.DB_PATH)}", poolclass=NullPool) + return create_engine( + f"sqlite:///{ os.path.abspath(APIBasicConfig.DB_PATH)}", poolclass=NullPool + ) def create_as_times_db_engine() -> Engine: @@ -128,26 +109,3 @@ def execute_with_limit_offset( total = conn.scalar(count_stmnt) return query_result, total - - -def table_change_schema(schema: str, source: Union[DeclarativeBase, Table]) -> Table: - """ - Copy the source table and change the schema of that SQLAlchemy table into a new table instance - """ - if issubclass(source, DeclarativeBase): - _source_table: Table = source.__table__ - elif isinstance(source, Table): - _source_table = source - else: - raise RuntimeError("Invalid source type on table schema change") - - metadata = MetaData(schema=schema) - dest_table = Table(_source_table.name, metadata) - - for col in _source_table.columns: - dest_table.append_column(col.copy()) - - logger.debug(_source_table.columns) - logger.debug(dest_table.columns) - - return dest_table diff --git a/autosubmit_api/database/db_common.py b/autosubmit_api/database/db_common.py index 5875313..de5d3de 100644 --- a/autosubmit_api/database/db_common.py +++ b/autosubmit_api/database/db_common.py @@ -64,36 +64,6 @@ def open_conn(check_version=True) -> Tuple[Connection, Cursor]: conn = sqlite3.connect(APIBasicConfig.DB_PATH) cursor = conn.cursor() - # Getting database version - if check_version: - try: - cursor.execute('SELECT version ' - 'FROM db_version;') - row = cursor.fetchone() - version = row[0] - except sqlite3.OperationalError: - # If this exception is thrown it's because db_version does not exist. - # Database is from 2.x or 3.0 beta releases - try: - cursor.execute('SELECT type ' - 'FROM experiment;') - # If type field exists, it's from 2.x - version = -1 - except sqlite3.Error: - # If raises and error , it's from 3.0 beta releases - version = 0 - - # If database version is not the expected, update database.... - if version < CURRENT_DATABASE_VERSION: - if not _update_database(version, cursor): - raise DbException('Database version could not be updated') - - # ... or ask for autosubmit upgrade - elif version > CURRENT_DATABASE_VERSION: - Log.critical('Database version is not compatible with this autosubmit version. Please execute pip install ' - 'autosubmit --upgrade') - raise DbException('Database version not compatible') - return conn, cursor @@ -366,128 +336,6 @@ def get_experiment_by_id(expid): result['version'] = obj["autosubmit_version"] return result - -def _update_database(version, cursor): - Log.info("Autosubmit's database version is {0}. Current version is {1}. Updating...", - version, CURRENT_DATABASE_VERSION) - try: - # For databases from Autosubmit 2 - if version <= -1: - cursor.executescript('CREATE TABLE experiment_backup(id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, ' - 'name VARCHAR NOT NULL, type VARCHAR, autosubmit_version VARCHAR, ' - 'description VARCHAR NOT NULL, model_branch VARCHAR, template_name VARCHAR, ' - 'template_branch VARCHAR, ocean_diagnostics_branch VARCHAR);' - 'INSERT INTO experiment_backup (name,type,description,model_branch,template_name,' - 'template_branch,ocean_diagnostics_branch) SELECT name,type,description,model_branch,' - 'template_name,template_branch,ocean_diagnostics_branch FROM experiment;' - 'UPDATE experiment_backup SET autosubmit_version = "2";' - 'DROP TABLE experiment;' - 'ALTER TABLE experiment_backup RENAME TO experiment;') - if version <= 0: - # Autosubmit beta version. Create db_version table - cursor.executescript('CREATE TABLE db_version(version INTEGER NOT NULL);' - 'INSERT INTO db_version (version) VALUES (1);' - 'ALTER TABLE experiment ADD COLUMN autosubmit_version VARCHAR;' - 'UPDATE experiment SET autosubmit_version = "3.0.0b" ' - 'WHERE autosubmit_version NOT NULL;') - cursor.execute('UPDATE db_version SET version={0};'.format( - CURRENT_DATABASE_VERSION)) - except sqlite3.Error as e: - Log.critical('Can not update database: {0}', e) - return False - Log.info("Update completed") - return True - - -def update_experiment_description_owner(name, new_description=None, owner=None): - """ - We are suppossing that the front-end is making the owner validation. - :param expid: - :type expid: - :param new_description: - :type new_description: - :param owner: - :type owner: - """ - error = False - auth = False - description = None - message = None - try: - if new_description and owner: - result = _update_experiment_descrip_version(name, new_description) - if result: - auth = True - description = new_description - message = "Description Updated." - else: - error = True - if not new_description and not owner: - auth = False - message = "Not a valid user and no description provided" - elif new_description and not owner: - # Description provided by no valid user - auth = False - message = "It seems that your session has expired, please log in again." - else: - message = "No description provided." - except Exception as exp: - error = True - message = str(exp) - return { - 'error': error, - 'auth': auth, - 'description': description, - 'message': message - } - - -def _update_experiment_descrip_version(name, description=None, version=None): - """ - Updates the experiment's description and/or version - - :param name: experiment name (expid) - :rtype name: str - :param description: experiment new description - :rtype description: str - :param version: experiment autosubmit version - :rtype version: str - :return: If description has been update, True; otherwise, False. - :rtype: bool - """ - if not check_db(): - return False - try: - (conn, cursor) = open_conn() - except DbException as e: - raise Exception( - "Could not establish a connection to the database.") - conn.isolation_level = None - - # Changing default unicode - conn.text_factory = str - # Conditional update - if description is not None and version is not None: - cursor.execute('update experiment set description=:description, autosubmit_version=:version where name=:name', { - 'description': description, 'version': version, 'name': name}) - elif description is not None and version is None: - cursor.execute('update experiment set description=:description where name=:name', { - 'description': description, 'name': name}) - elif version is not None and description is None: - cursor.execute('update experiment set autosubmit_version=:version where name=:name', { - 'version': version, 'name': name}) - else: - raise Exception( - "Not enough data to update {}.".format(name)) - row = cursor.rowcount - close_conn(conn, cursor) - if row == 0: - raise Exception( - "Update on experiment {} failed.".format(name)) - return False - return True - - class DbException(Exception): """ Exception class for database errors diff --git a/autosubmit_api/views/v3.py b/autosubmit_api/views/v3.py index 23c9d62..a8347c6 100644 --- a/autosubmit_api/views/v3.py +++ b/autosubmit_api/views/v3.py @@ -8,7 +8,6 @@ from flask import request, session, redirect from autosubmit_api.auth import ProtectionLevels, with_auth_token from autosubmit_api.database.db_common import ( get_current_running_exp, - update_experiment_description_owner, ) from autosubmit_api.experiment import common_requests as CommonRequests from autosubmit_api.experiment import utils as Utiles @@ -113,16 +112,17 @@ def update_description(user_id: Optional[str] = None): """ Updates the description of an experiment. Requires authenticated user. """ - expid = None - new_description = None - if request.is_json: - body_data = request.json - expid = body_data.get("expid", None) - new_description = body_data.get("description", None) - return ( - update_experiment_description_owner(expid, new_description, user_id), - HTTPStatus.OK if user_id else HTTPStatus.UNAUTHORIZED, - ) + raise NotImplementedError + # expid = None + # new_description = None + # if request.is_json: + # body_data = request.json + # expid = body_data.get("expid", None) + # new_description = body_data.get("description", None) + # return ( + # update_experiment_description_owner(expid, new_description, user_id), + # HTTPStatus.OK if user_id else HTTPStatus.UNAUTHORIZED, + # ) @cross_origin(expose_headers="Authorization") diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index fba8cba..ade623d 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -1,10 +1,8 @@ -import textwrap - -from sqlalchemy import text +from autosubmit_api.database.adapters import ( + ExperimentDetailsDbAdapter, + ExperimentDbAdapter, +) from autosubmit_api.logger import logger -from autosubmit_api.database import tables - -from autosubmit_api.database.common import create_autosubmit_db_engine from autosubmit_api.builders.configuration_facade_builder import ( ConfigurationFacadeDirector, AutosubmitConfigurationFacadeBuilder, @@ -23,7 +21,8 @@ Experiment = namedtuple("Experiment", ["id", "name"]) class DetailsProcessor: def __init__(self, basic_config: APIBasicConfig): self.basic_config = basic_config - self.main_db_engine = create_autosubmit_db_engine() + self.experiment_db = ExperimentDbAdapter() + self.details_db = ExperimentDetailsDbAdapter() def process(self): new_details = self._get_all_details() @@ -32,8 +31,7 @@ class DetailsProcessor: def _get_experiments(self) -> List[Experiment]: experiments = [] - with self.main_db_engine.connect() as conn: - query_result = conn.execute(tables.experiment_table.select()).all() + query_result = self.experiment_db.get_all() for exp in query_result: experiments.append( @@ -80,16 +78,9 @@ class DetailsProcessor: return result def _insert_many_into_details_table(self, values: List[dict]) -> int: - with self.main_db_engine.connect() as conn: - result = conn.execute( - tables.details_table.insert(), values - ) # Executemany style https://docs.sqlalchemy.org/en/20/tutorial/data_insert.html#insert-usually-generates-the-values-clause-automatically - conn.commit() - return result.rowcount + rowcount = self.details_db.insert_many(values) + return rowcount - def _clean_table(self): - # type: () -> None - with self.main_db_engine.connect() as conn: - with conn.execution_options(isolation_level="AUTOCOMMIT"): - conn.execute(tables.details_table.delete()) - conn.execute(text("VACUUM;")) + def _clean_table(self) -> int: + rowcount = self.details_db.delete_all() + return rowcount diff --git a/tests/experiments/autosubmit.db b/tests/experiments/autosubmit.db index 472787336d33e91b529d921869dcc82dea543426..9e09467e75b138708f6697d30642b0281b119884 100644 GIT binary patch delta 19 acmZozz}T>Wae_2s;zSu|#>9;Y3;Y2==LVtx delta 19 acmZozz}T>Wae_2s^h6nF#^{X+3;Y2=jRuwg diff --git a/tests/experiments/metadata/graph/graph_data_a003.db b/tests/experiments/metadata/graph/graph_data_a003.db index 1862073cccbab88a43d010644c5c6316b4202aa1..736e381f49520ad0d6caa607f0a964faeffa80ce 100755 GIT binary patch delta 17 YcmZp0XmFSy&FDK(#+lJ~W5NP?05Qr1hyVZp delta 17 YcmZp0XmFSy&FDT+#+lK5W5NP?05Na{d;kCd -- GitLab From 258aa3ff7505796f7700690fa22364baa141666b Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 17 May 2024 14:44:55 +0200 Subject: [PATCH 11/26] refactor db_common --- autosubmit_api/builders/experiment_builder.py | 1 - .../database/adapters/experiment.py | 18 +- .../database/adapters/join/experiment_join.py | 40 +- autosubmit_api/database/db_common.py | 345 ------------------ autosubmit_api/database/utils.py | 16 - autosubmit_api/experiment/common_requests.py | 193 +++++++++- autosubmit_api/views/v3.py | 4 +- autosubmit_api/views/v4.py | 24 +- 8 files changed, 251 insertions(+), 390 deletions(-) delete mode 100644 autosubmit_api/database/db_common.py delete mode 100644 autosubmit_api/database/utils.py diff --git a/autosubmit_api/builders/experiment_builder.py b/autosubmit_api/builders/experiment_builder.py index 4aab284..c21e53d 100644 --- a/autosubmit_api/builders/experiment_builder.py +++ b/autosubmit_api/builders/experiment_builder.py @@ -7,7 +7,6 @@ from autosubmit_api.builders.configuration_facade_builder import ( from autosubmit_api.database import tables from autosubmit_api.database.common import ( create_autosubmit_db_engine, - create_main_db_conn, ) from autosubmit_api.database.models import ExperimentModel diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index 718fb73..b22b409 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict from autosubmit.database.db_manager import create_db_table_manager from autosubmit_api.config.basicConfig import APIBasicConfig @@ -13,15 +13,25 @@ class ExperimentDbAdapter: ) def get_all(self): + """ + Return all experiments. + """ with self.table_manager.get_connection() as conn: rows = self.table_manager.select_all(conn) return rows - def get_by_expid(self, expid) -> Optional[Dict[str, Any]]: + def get_by_expid(self, expid: str) -> Dict[str, Any]: + """ + Get experiment by expid. + + :param expid: Experiment ID. + :raises: sqlalchemy.orm.exc.NoResultFound if no experiment is found. + :raises: sqlalchemy.orm.exc.MultipleResultsFound if more than one experiment is found. + """ with self.table_manager.get_connection() as conn: row = conn.execute( self.table_manager.table.select().where( tables.ExperimentTable.name == expid ) - ).one_or_none() - return row._mapping if row else None + ).one() + return row._mapping diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/adapters/join/experiment_join.py index e66bbe1..97c0a50 100644 --- a/autosubmit_api/database/adapters/join/experiment_join.py +++ b/autosubmit_api/database/adapters/join/experiment_join.py @@ -1,6 +1,10 @@ from sqlalchemy import select from autosubmit_api.database import tables -from autosubmit_api.database.common import create_main_db_conn +from autosubmit_api.database.common import ( + create_main_db_conn, + execute_with_limit_offset, +) +from autosubmit_api.database.queries import generate_query_listexp_extended class ExperimentJoinDbAdapter: @@ -22,3 +26,37 @@ class ExperimentJoinDbAdapter: conn.commit() return result.rowcount + + def search( + self, + query: str = None, + only_active: bool = False, + owner: str = None, + exp_type: str = None, + autosubmit_version: str = None, + order_by: str = None, + order_desc: bool = False, + limit: int = None, + offset: int = None, + ): + """ + Search experiments with extended information. + """ + statement = generate_query_listexp_extended( + query=query, + only_active=only_active, + owner=owner, + exp_type=exp_type, + autosubmit_version=autosubmit_version, + order_by=order_by, + order_desc=order_desc, + ) + with self._get_connection() as conn: + query_result, total_rows = execute_with_limit_offset( + statement=statement, + conn=conn, + limit=limit, + offset=offset, + ) + + return query_result, total_rows diff --git a/autosubmit_api/database/db_common.py b/autosubmit_api/database/db_common.py deleted file mode 100644 index de5d3de..0000000 --- a/autosubmit_api/database/db_common.py +++ /dev/null @@ -1,345 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 Earth Sciences Department, BSC-CNS - -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . - -""" -Module containing functions to manage autosubmit's database. -""" -import os -from sqlite3 import Connection, Cursor -import sqlite3 - -from bscearth.utils.log import Log -from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder -from autosubmit_api.builders.configuration_facade_builder import ConfigurationFacadeDirector, AutosubmitConfigurationFacadeBuilder -from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter -from autosubmit_api.database.utils import get_headers_sqlite, map_row_result_to_dict_sqlite -from autosubmit_api.experiment import common_db_requests as DbRequests -from typing import Dict, Any, Tuple - -CURRENT_DATABASE_VERSION = 1 - - -def check_db(): - """ - Checks if database file exist - - :return: None if exists, terminates program if not - """ - APIBasicConfig.read() - if not os.path.exists(APIBasicConfig.DB_PATH): - Log.error('Some problem has happened...check the database file.' + - 'DB file:' + APIBasicConfig.DB_PATH) - return False - return True - - -def open_conn(check_version=True) -> Tuple[Connection, Cursor]: - """ - Opens a connection to database - - :param check_version: If true, check if the database is compatible with this autosubmit version - :type check_version: bool - :return: connection object, cursor object - :rtype: sqlite3.Connection, sqlite3.Cursor - """ - APIBasicConfig.read() - print((APIBasicConfig.DB_PATH)) - conn = sqlite3.connect(APIBasicConfig.DB_PATH) - cursor = conn.cursor() - - return conn, cursor - - -def close_conn(conn: Connection, cursor): - """ - Commits changes and close connection to database - - :param conn: connection to close - :type conn: sqlite3.Connection - :param cursor: cursor to close - :type cursor: sqlite3.Cursor - """ - conn.commit() - cursor.close() - conn.close() - return - - - -def get_autosubmit_version(expid, log=None): - """ - Get the minimun autosubmit version needed for the experiment - - :param expid: Experiment name - :type expid: str - :return: If experiment exists returns the autosubmit version for it, if not returns None - :rtype: str - """ - if not check_db(): - return False - - try: - (conn, cursor) = open_conn() - except DbException as e: - if log: - log.error( - 'Connection to database could not be established: {0}', e.message) - return False - conn.isolation_level = None - - # SQLite always return a unicode object, but we can change this - # behaviour with the next sentence - conn.text_factory = str - cursor.execute('SELECT autosubmit_version FROM experiment WHERE name=:expid', { - 'expid': expid}) - row = cursor.fetchone() - close_conn(conn, cursor) - if row is None: - if log: - log.error('The experiment "{0}" does not exist yet!!!', expid) - return None - return row[0] - - -def search_experiment_by_id(query, exp_type=None, only_active=None, owner=None): - """ - Search experiments using provided data. Main query searches in the view listexp of ec_earth.db. - - :param searchString: string used to match columns in the table - :type searchString: str - :param typeExp: Assumes values "test" (only experiments starting with 't') or "experiment" (not experiment starting with 't') or "all" (indistinct). - :type typeExp: str - :param onlyActive: Assumes "active" (only active experiments) or "" (indistinct) - :type onlyActive: str - :param owner: return only experiment that match the provided owner of the experiment - :type owner: str - :return: list of experiments that match the search - :rtype: JSON - """ - if not check_db(): - return False - try: - (conn, cursor) = open_conn() - except DbException as e: - Log.error( - 'Connection to database could not be established: {0}', e.message) - return False - if owner: - query = "SELECT id,name,user,created,model,branch,hpc,description FROM experiment e left join details d on e.id = d.exp_id WHERE user='{0}'".format(owner) - # print(query) - else: - query = "SELECT id,name,user,created,model,branch,hpc,description FROM experiment e left join details d on e.id = d.exp_id WHERE (name LIKE '" + query + \ - "%' OR description LIKE '%" + query + \ - "%' OR user LIKE '%" + query + "%')" - if exp_type and len(exp_type) > 0: - if exp_type == "test": - query += " AND name LIKE 't%'" - elif exp_type == "experiment": - query += " AND name NOT LIKE 't%'" - else: - # Indistinct - pass - # Query DESC by name - query += " ORDER BY name DESC" - # print(query) - cursor.execute(query) - table = cursor.fetchall() - cursor.close() - conn.close() - result = list() - experiment_status = dict() - experiment_times = dict() - if len(table) > 0: - # Get experiment status table - try: - experiment_status = ExperimentStatusDbAdapter().get_all_dict() - except Exception: - experiment_status = {} - - for row in table: - expid = str(row[1]) - - status = experiment_status.get(expid, "NOT RUNNING") - if only_active == "active" and status != "RUNNING": - continue - - completed = "NA" - total = "NA" - submitted = 0 - queuing = 0 - running = 0 - failed = 0 - suspended = 0 - version = "Unknown" - wrapper = None - last_modified_timestamp = None - last_modified_pkl_datetime = None - hpc = row[6] - try: - autosubmit_config_facade = ConfigurationFacadeDirector(AutosubmitConfigurationFacadeBuilder(expid)).build_autosubmit_configuration_facade() - version = autosubmit_config_facade.get_autosubmit_version() - wrapper = autosubmit_config_facade.get_wrapper_type() - last_modified_pkl_datetime = autosubmit_config_facade.get_pkl_last_modified_time_as_datetime() - hpc = autosubmit_config_facade.get_main_platform() - except Exception as exp: - last_modified_pkl_datetime = None - pass - - total, completed, last_modified_timestamp = experiment_times.get( - expid, ("NA", "NA", None)) - - # Getting run data from historical database - - try: - current_run = ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)).build_reader_experiment_history().manager.get_experiment_run_dc_with_max_id() - if current_run and current_run.total > 0: - completed = current_run.completed - total = current_run.total - submitted = current_run.submitted - queuing = current_run.queuing - running = current_run.running - failed = current_run.failed - suspended = current_run.suspended - last_modified_timestamp = current_run.modified_timestamp - except Exception as exp: - print(("Exception on search_experiment_by_id : {}".format(exp))) - pass - - result.append({'id': row[0], 'name': row[1], 'user': row[2], 'description': row[7], - 'hpc': hpc, 'status': status, 'completed': completed, 'total': total, - 'version': version, 'wrapper': wrapper, "submitted": submitted, "queuing": queuing, - "running": running, "failed": failed, "suspended": suspended, "modified": last_modified_pkl_datetime}) - return {'experiment': result} - - -def get_current_running_exp(): - """ - Simple query that gets the list of experiments currently running - - :rtype: list of users - """ - if not check_db(): - return False - try: - (conn, cursor) = open_conn() - except DbException as e: - Log.error( - 'Connection to database could not be established: {0}', e.message) - return False - query = "SELECT id,name,user,created,model,branch,hpc,description FROM experiment e left join details d on e.id = d.exp_id" - APIBasicConfig.read() - # print(query) - cursor.execute(query) - table = cursor.fetchall() - cursor.close() - conn.close() - result = list() - experiment_status = dict() - experiment_times = dict() - # Get experiment status table - try: - experiment_status = ExperimentStatusDbAdapter().get_all_dict() - except Exception: - experiment_status = {} - - for row in table: - expid = str(row[1]) - status = "NOT RUNNING" - completed = "NA" - total = "NA" - submitted = 0 - queuing = 0 - running = 0 - failed = 0 - suspended = 0 - user = str(row[2]) - version = "Unknown" - wrapper = None - last_modified_timestamp = None - last_modified_pkl_datetime = None - if (expid in experiment_status): - status = experiment_status[expid] - if status == "RUNNING": - try: - autosubmit_config_facade = ConfigurationFacadeDirector(AutosubmitConfigurationFacadeBuilder(expid)).build_autosubmit_configuration_facade() - version = autosubmit_config_facade.get_autosubmit_version() - wrapper = autosubmit_config_facade.get_wrapper_type() - last_modified_pkl_datetime = autosubmit_config_facade.get_pkl_last_modified_time_as_datetime() - hpc = autosubmit_config_facade.get_main_platform() - except Exception as exp: - last_modified_pkl_datetime = None - pass - if (expid in experiment_times): - if len(user) == 0: - # Retrieve user from path - path = APIBasicConfig.LOCAL_ROOT_DIR + '/' + expid - if (os.path.exists(path)): - main_folder = os.stat(path) - user = os.popen( - 'id -nu {0}'.format(str(main_folder.st_uid))).read().strip() - total, completed, last_modified_timestamp = experiment_times[expid] - # Try to retrieve experiment_run data - try: - current_run = ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)).build_reader_experiment_history().manager.get_experiment_run_dc_with_max_id() - if current_run and current_run.total > 0: - completed = current_run.completed - total = current_run.total - submitted = current_run.submitted - queuing = current_run.queuing - running = current_run.running - failed = current_run.failed - suspended = current_run.suspended - last_modified_timestamp = current_run.modified_timestamp - except Exception as exp: - print(("Exception on get_current_running_exp : {}".format(exp))) - pass - result.append({'id': row[0], 'name': row[1], 'user': user, 'description': row[7], - 'hpc': hpc, 'status': status, 'completed': completed, 'total': total, - 'version': version, 'wrapper': wrapper, "submitted": submitted, "queuing": queuing, - "running": running, "failed": failed, "suspended": suspended, "modified": last_modified_pkl_datetime}) - return {'experiment': result} - - -def get_experiment_by_id(expid): - # type: (str) -> Dict[str, Any] - result = {'id': 0, 'name': expid, 'description': "NA", 'version': "NA"} - if not check_db(): - return result - (conn, cursor) = open_conn() - query = "SELECT id, name, description, autosubmit_version FROM experiment WHERE name ='" + expid + "'" - cursor.execute(query) - headers = get_headers_sqlite(cursor) - row = cursor.fetchone() - close_conn(conn, cursor) - if row is not None: - obj = map_row_result_to_dict_sqlite(row, headers) - result['id'] = obj["id"] - result['name'] = obj["name"] - result['description'] = obj["description"] - result['version'] = obj["autosubmit_version"] - return result - -class DbException(Exception): - """ - Exception class for database errors - """ - - def __init__(self, message): - self.message = message diff --git a/autosubmit_api/database/utils.py b/autosubmit_api/database/utils.py deleted file mode 100644 index 0c46773..0000000 --- a/autosubmit_api/database/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlite3 import Cursor -from typing import List, Any - - -def get_headers_sqlite(cursor: Cursor): - """ - Get headers in position of a sqlite query cursor - """ - return list(map(lambda attr: attr[0], cursor.description)) - - -def map_row_result_to_dict_sqlite(row: List[Any], headers: List[str]): - """ - Return a dict of the rows as values with keys as their respective header. - """ - return {header: row[i] for i, header in enumerate(headers)} diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index c1a87ee..1631d84 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -29,11 +29,13 @@ import multiprocessing import subprocess from collections import deque + from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer from autosubmit_api.components.jobs.job_factory import SimpleJob from autosubmit_api.config.confConfigStrategy import confConfigStrategy -from autosubmit_api.database import db_common as db_common +from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter +from autosubmit_api.database.adapters import ExperimentJoinDbAdapter from autosubmit_api.experiment import common_db_requests as DbRequests from autosubmit_api.database import db_jobdata as JobData from autosubmit_api.common import utils as common_utils @@ -164,7 +166,10 @@ def get_experiment_data(expid): result["owner"] = autosubmit_config_facade.get_owner_name() result["time_last_access"] = autosubmit_config_facade.get_experiment_last_access_time_as_datetime() result["time_last_mod"] = autosubmit_config_facade.get_experiment_last_modified_time_as_datetime() - result["description"] = db_common.get_experiment_by_id(expid)["description"] + try: + result["description"] = ExperimentDbAdapter().get_by_expid(expid).get("description", "NA") + except Exception: + result["description"] = "NA" result["version"] = autosubmit_config_facade.get_autosubmit_version() result["model"] = autosubmit_config_facade.get_model() result["branch"] = autosubmit_config_facade.get_branch() @@ -760,8 +765,8 @@ def get_experiment_tree_structured(expid, log): try: APIBasicConfig.read() - # TODO: Encapsulate this following 2 lines or move to the parent function in app.py - curr_exp_as_version = db_common.get_autosubmit_version(expid, log) + # TODO: Encapsulate this following 2 lines or move to the parent function in app. + curr_exp_as_version: str = ExperimentDbAdapter().get_by_expid(expid).get("autosubmit_version") main, secondary = common_utils.parse_version_number(curr_exp_as_version) if main and main >= 4: as_conf = Autosubmit4Config(expid) @@ -1257,3 +1262,183 @@ def enforceLocal(log): except Exception: log.info("Locale C.utf8 is not found, using '{0}' as fallback".format("C")) locale.setlocale(locale.LC_ALL, 'C') + + +def search_experiment_by_id( + query: str, exp_type: str = None, only_active: bool = None, owner: str = None +): + """ + Search experiments using provided data. Main query searches in the view listexp of ec_earth.db. + + :param searchString: string used to match columns in the table + :type searchString: str + :param typeExp: Assumes values "test" (only experiments starting with 't') or "experiment" (not experiment starting with 't') or "all" (indistinct). + :type typeExp: str + :param onlyActive: Assumes "active" (only active experiments) or "" (indistinct) + :type onlyActive: str + :param owner: return only experiment that match the provided owner of the experiment + :type owner: str + :return: list of experiments that match the search + :rtype: JSON + """ + result = list() + query_result, _ = ExperimentJoinDbAdapter().search( + query=query, exp_type=exp_type, only_active=only_active, owner=owner + ) + + for raw_row in query_result: + row = raw_row._mapping + expid = str(row["name"]) + completed = "NA" + total = "NA" + submitted = 0 + queuing = 0 + running = 0 + failed = 0 + suspended = 0 + version = "Unknown" + wrapper = None + # last_modified_timestamp = None + last_modified_pkl_datetime = None + hpc = row["hpc"] + try: + autosubmit_config_facade = ConfigurationFacadeDirector( + AutosubmitConfigurationFacadeBuilder(expid) + ).build_autosubmit_configuration_facade() + version = autosubmit_config_facade.get_autosubmit_version() + wrapper = autosubmit_config_facade.get_wrapper_type() + last_modified_pkl_datetime = ( + autosubmit_config_facade.get_pkl_last_modified_time_as_datetime() + ) + hpc = autosubmit_config_facade.get_main_platform() + except Exception: + last_modified_pkl_datetime = None + pass + + total, completed = ("NA", "NA") + + # Getting run data from historical database + try: + current_run = ( + ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)) + .build_reader_experiment_history() + .manager.get_experiment_run_dc_with_max_id() + ) + if current_run and current_run.total > 0: + completed = current_run.completed + total = current_run.total + submitted = current_run.submitted + queuing = current_run.queuing + running = current_run.running + failed = current_run.failed + suspended = current_run.suspended + # last_modified_timestamp = current_run.modified_timestamp + except Exception as exp: + print(("Exception on search_experiment_by_id : {}".format(exp))) + pass + + result.append( + { + "id": row["id"], + "name": row["name"], + "user": row["user"], + "description": row["description"], + "hpc": hpc, + "status": row["status"], + "completed": completed, + "total": total, + "version": version, + "wrapper": wrapper, + "submitted": submitted, + "queuing": queuing, + "running": running, + "failed": failed, + "suspended": suspended, + "modified": last_modified_pkl_datetime, + } + ) + return {"experiment": result} + + +def get_current_running_exp(): + """ + Simple query that gets the list of experiments currently running + + :rtype: list of users + """ + result = list() + query_result, _ = ExperimentJoinDbAdapter().search(only_active=True) + + for raw_row in query_result: + row = raw_row._mapping + expid = str(row["name"]) + status = "NOT RUNNING" + completed = "NA" + total = "NA" + submitted = 0 + queuing = 0 + running = 0 + failed = 0 + suspended = 0 + user = str(row["user"]) + version = "Unknown" + wrapper = None + # last_modified_timestamp = None + last_modified_pkl_datetime = None + status = str(row["status"]) + if status == "RUNNING": + try: + autosubmit_config_facade = ConfigurationFacadeDirector( + AutosubmitConfigurationFacadeBuilder(expid) + ).build_autosubmit_configuration_facade() + version = autosubmit_config_facade.get_autosubmit_version() + wrapper = autosubmit_config_facade.get_wrapper_type() + last_modified_pkl_datetime = ( + autosubmit_config_facade.get_pkl_last_modified_time_as_datetime() + ) + hpc = autosubmit_config_facade.get_main_platform() + except Exception: + # last_modified_pkl_datetime = None + pass + + # Try to retrieve experiment_run data + try: + current_run = ( + ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)) + .build_reader_experiment_history() + .manager.get_experiment_run_dc_with_max_id() + ) + if current_run and current_run.total > 0: + completed = current_run.completed + total = current_run.total + submitted = current_run.submitted + queuing = current_run.queuing + running = current_run.running + failed = current_run.failed + suspended = current_run.suspended + # last_modified_timestamp = current_run.modified_timestamp + except Exception as exp: + print(("Exception on get_current_running_exp : {}".format(exp))) + + # Append to result + result.append( + { + "id": row["id"], + "name": row["name"], + "user": user, + "description": row["description"], + "hpc": hpc, + "status": status, + "completed": completed, + "total": total, + "version": version, + "wrapper": wrapper, + "submitted": submitted, + "queuing": queuing, + "running": running, + "failed": failed, + "suspended": suspended, + "modified": last_modified_pkl_datetime, + } + ) + return {"experiment": result} diff --git a/autosubmit_api/views/v3.py b/autosubmit_api/views/v3.py index a8347c6..a7e091c 100644 --- a/autosubmit_api/views/v3.py +++ b/autosubmit_api/views/v3.py @@ -6,14 +6,14 @@ import requests from flask_cors import cross_origin from flask import request, session, redirect from autosubmit_api.auth import ProtectionLevels, with_auth_token -from autosubmit_api.database.db_common import ( +from autosubmit_api.experiment.common_requests import ( get_current_running_exp, ) from autosubmit_api.experiment import common_requests as CommonRequests from autosubmit_api.experiment import utils as Utiles from autosubmit_api.logger import logger, with_log_run_times from autosubmit_api.performance.performance_metrics import PerformanceMetrics -from autosubmit_api.database.db_common import search_experiment_by_id +from autosubmit_api.experiment.common_requests import search_experiment_by_id from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.builders.joblist_helper_builder import ( JobListHelperBuilder, diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py index d7101ce..8794b74 100644 --- a/autosubmit_api/views/v4.py +++ b/autosubmit_api/views/v4.py @@ -18,11 +18,7 @@ from autosubmit_api.builders.experiment_history_builder import ( ) from autosubmit_api.common.utils import Status from autosubmit_api.database import tables -from autosubmit_api.database.common import ( - create_main_db_conn, - execute_with_limit_offset, -) -from autosubmit_api.database.queries import generate_query_listexp_extended +from autosubmit_api.database.adapters import ExperimentJoinDbAdapter from autosubmit_api.logger import logger, with_log_run_times from cas import CASClient from autosubmit_api import config @@ -231,11 +227,11 @@ class ExperimentView(MethodView): else: page_size = None offset = None - except: + except Exception: return {"error": {"message": "Invalid params"}}, HTTPStatus.BAD_REQUEST # Query - statement = generate_query_listexp_extended( + query_result, total_rows = ExperimentJoinDbAdapter().search( query=query, only_active=only_active, owner=owner, @@ -243,14 +239,9 @@ class ExperimentView(MethodView): autosubmit_version=autosubmit_version, order_by=order_by, order_desc=order_desc, + limit=page_size, + offset=offset, ) - with create_main_db_conn() as conn: - query_result, total_rows = execute_with_limit_offset( - statement=statement, - conn=conn, - limit=page_size, - offset=offset, - ) # Process experiments experiments = [] @@ -270,7 +261,7 @@ class ExperimentView(MethodView): exp = exp_builder.product # Get current run data from history - last_modified_timestamp = exp.created + # last_modified_timestamp = exp.created completed = 0 total = 0 submitted = 0 @@ -292,7 +283,7 @@ class ExperimentView(MethodView): running = current_run.running failed = current_run.failed suspended = current_run.suspended - last_modified_timestamp = current_run.modified_timestamp + # last_modified_timestamp = current_run.modified_timestamp except Exception as exc: logger.warning((f"Exception getting the current run on search: {exc}")) logger.warning(traceback.format_exc()) @@ -412,7 +403,6 @@ class ExperimentWrappersView(MethodView): decorators = [with_auth_token(), with_log_run_times(logger, "WRAPPERS")] def get(self, expid: str, user_id: Optional[str] = None): - job_package_reader = JobPackageReader(expid) job_package_reader.read() -- GitLab From f925963d5ea55690ec0a5a0e58b98629923ef343 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 17 May 2024 16:40:23 +0200 Subject: [PATCH 12/26] limit SQLAlchemy usage --- autosubmit_api/builders/experiment_builder.py | 33 ++++++--------- autosubmit_api/database/__init__.py | 31 ++++---------- autosubmit_api/database/adapters/__init__.py | 12 +++++- .../database/adapters/experiment.py | 7 ++++ .../database/adapters/experiment_details.py | 25 +++++++++++ .../database/adapters/experiment_status.py | 21 +++++++++- .../database/adapters/job_packages.py | 42 +++++++++++++++++++ autosubmit_api/database/common.py | 8 ++++ autosubmit_api/database/db_jobdata.py | 2 +- .../persistance/job_package_reader.py | 33 +++++---------- .../business/process_graph_drawings.py | 19 +++------ 11 files changed, 149 insertions(+), 84 deletions(-) create mode 100644 autosubmit_api/database/adapters/job_packages.py diff --git a/autosubmit_api/builders/experiment_builder.py b/autosubmit_api/builders/experiment_builder.py index c21e53d..77ba3b5 100644 --- a/autosubmit_api/builders/experiment_builder.py +++ b/autosubmit_api/builders/experiment_builder.py @@ -1,13 +1,11 @@ import datetime +from autosubmit_api.logger import logger from autosubmit_api.builders import BaseBuilder from autosubmit_api.builders.configuration_facade_builder import ( AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector, ) -from autosubmit_api.database import tables -from autosubmit_api.database.common import ( - create_autosubmit_db_engine, -) +from autosubmit_api.database.adapters import ExperimentDbAdapter, ExperimentDetailsDbAdapter from autosubmit_api.database.models import ExperimentModel @@ -23,19 +21,14 @@ class ExperimentBuilder(BaseBuilder): """ Produce basic information from the main experiment table """ - with create_autosubmit_db_engine().connect() as conn: - result = conn.execute( - tables.experiment_table.select().where( - tables.experiment_table.c.name == expid - ) - ).one() + result = ExperimentDbAdapter().get_by_expid(expid) # Set new product self._product = ExperimentModel( - id=result.id, - name=result.name, - description=result.description, - autosubmit_version=result.autosubmit_version, + id=result["id"], + name=result["name"], + description=result["description"], + autosubmit_version=result["autosubmit_version"], ) def produce_details(self): @@ -43,12 +36,11 @@ class ExperimentBuilder(BaseBuilder): Produce data from the details table """ exp_id = self._product.id - with create_autosubmit_db_engine().connect() as conn: - result = conn.execute( - tables.details_table.select().where( - tables.details_table.c.exp_id == exp_id - ) - ).one_or_none() + result = None + try: + result = ExperimentDetailsDbAdapter().get_by_exp_id(exp_id) + except Exception: + logger.error(f"Error getting details for exp_id {exp_id}") # Set details props if result: @@ -80,6 +72,7 @@ class ExperimentBuilder(BaseBuilder): ).isoformat() except Exception: self._product.modified = None + logger.error(f"Error getting modified date for expid {expid}") @property def product(self) -> ExperimentModel: diff --git a/autosubmit_api/database/__init__.py b/autosubmit_api/database/__init__.py index 03445e0..13b7af0 100644 --- a/autosubmit_api/database/__init__.py +++ b/autosubmit_api/database/__init__.py @@ -1,28 +1,11 @@ -from sqlalchemy import Connection, Table -from autosubmit_api.database.common import ( - create_as_times_db_engine, - create_autosubmit_db_engine, +from autosubmit_api.database.adapters import ( + ExperimentDbAdapter, + ExperimentDetailsDbAdapter, + ExperimentStatusDbAdapter, ) -from autosubmit_api.database import tables - - -def _create_autosubmit_db_tables(conn: Connection): - experiment_table: Table = tables.ExperimentTable.__table__ - experiment_table.create(conn, checkfirst=True) - details_table: Table = tables.DetailsTable.__table__ - details_table.create(conn, checkfirst=True) - - -def _create_as_times_db_tables(conn: Connection): - experiment_status_table: Table = tables.ExperimentStatusTable.__table__ - experiment_status_table.create(conn, checkfirst=True) def prepare_db(): - with create_as_times_db_engine().connect() as conn: - _create_as_times_db_tables(conn) - conn.commit() - - with create_autosubmit_db_engine().connect() as conn: - _create_autosubmit_db_tables(conn) - conn.commit() + ExperimentDbAdapter().create_table() + ExperimentDetailsDbAdapter().create_table() + ExperimentStatusDbAdapter().create_table() diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py index 55dadbb..2a8b182 100644 --- a/autosubmit_api/database/adapters/__init__.py +++ b/autosubmit_api/database/adapters/__init__.py @@ -14,7 +14,13 @@ from autosubmit_api.database.adapters.experiment_status import ( ExperimentStatusDbAdapter, ) from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter -from autosubmit_api.database.adapters.join.experiment_join import ExperimentJoinDbAdapter +from autosubmit_api.database.adapters.join.experiment_join import ( + ExperimentJoinDbAdapter, +) +from autosubmit_api.database.adapters.job_packages import ( + JobPackagesDbAdapter, + WrapperJobPackagesDbAdapter, +) __all__ = [ @@ -22,5 +28,7 @@ __all__ = [ "ExperimentDetailsDbAdapter", "ExperimentStatusDbAdapter", "ExpGraphDrawDBAdapter", - "ExperimentJoinDbAdapter" + "ExperimentJoinDbAdapter", + "JobPackagesDbAdapter", + "WrapperJobPackagesDbAdapter", ] diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index b22b409..061d060 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -12,6 +12,13 @@ class ExperimentDbAdapter: db_filepath=APIBasicConfig.DB_PATH, ) + def create_table(self): + """ + Create the experiment table. + """ + with self.table_manager.get_connection() as conn: + self.table_manager.create_table(conn) + def get_all(self): """ Return all experiments. diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/adapters/experiment_details.py index f590ae3..ae47122 100644 --- a/autosubmit_api/database/adapters/experiment_details.py +++ b/autosubmit_api/database/adapters/experiment_details.py @@ -12,12 +12,37 @@ class ExperimentDetailsDbAdapter: db_filepath=APIBasicConfig.DB_PATH, ) + def create_table(self): + """ + Create the details table. + """ + with self.table_manager.get_connection() as conn: + self.table_manager.create_table(conn) + def delete_all(self) -> int: + """ + Clear the details table. + """ with self.table_manager.get_connection() as conn: rowcount = self.table_manager.delete_all(conn) return rowcount def insert_many(self, values: List[Dict[str, Any]]) -> int: + """ + Insert many rows into the details table. + """ with self.table_manager.get_connection() as conn: rowcount = self.table_manager.insert_many(conn, values) return rowcount + + def get_by_exp_id(self, exp_id: int) -> Dict[str, Any]: + """ + Get experiment details by the numerical exp_id. + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + self.table_manager.table.select().where( + tables.DetailsTable.exp_id == exp_id + ) + ).one() + return row._mapping diff --git a/autosubmit_api/database/adapters/experiment_status.py b/autosubmit_api/database/adapters/experiment_status.py index afec31f..122ee16 100644 --- a/autosubmit_api/database/adapters/experiment_status.py +++ b/autosubmit_api/database/adapters/experiment_status.py @@ -1,6 +1,6 @@ from datetime import datetime import os -from typing import Dict +from typing import Dict, List from autosubmit.database.db_manager import create_db_table_manager from sqlalchemy import delete, insert, select from autosubmit_api.config.basicConfig import APIBasicConfig @@ -15,6 +15,13 @@ class ExperimentStatusDbAdapter: db_filepath=os.path.join(APIBasicConfig.DB_DIR, APIBasicConfig.AS_TIMES_DB), ) + def create_table(self): + """ + Create the experiment_status table. + """ + with self.table_manager.get_connection() as conn: + self.table_manager.create_table(conn) + def get_all_dict(self) -> Dict[str, str]: """ Gets table experiment_status as dictionary {expid: status} @@ -26,6 +33,18 @@ class ExperimentStatusDbAdapter: result[row.name] = row.status return result + def get_only_running_expids(self) -> List[str]: + """ + Gets list of running experiments + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute( + select(self.table_manager.table).where( + self.table_manager.table.c.status == "RUNNING" + ) + ).all() + return [row.name for row in rows] + def get_status(self, expid: str) -> str: """ Gets the current status of one experiment diff --git a/autosubmit_api/database/adapters/job_packages.py b/autosubmit_api/database/adapters/job_packages.py new file mode 100644 index 0000000..99a7cde --- /dev/null +++ b/autosubmit_api/database/adapters/job_packages.py @@ -0,0 +1,42 @@ +from typing import Dict, List +from autosubmit.database.db_manager import create_db_table_manager +from sqlalchemy import select + +from autosubmit_api.database import tables +from autosubmit_api.persistance.experiment import ExperimentPaths + + +class JobPackagesDbAdapter: + def __init__(self, expid: str) -> None: + self.expid = expid + self.table_manager = create_db_table_manager( + table=tables.JobPackageTable, + db_filepath=ExperimentPaths(expid).job_packages_db, + schema=expid, + ) + + def get_all(self) -> List[Dict[str, str]]: + """ + Get all job packages. + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute(select(self.table_manager.table)).all() + return [row._mapping for row in rows] + + +class WrapperJobPackagesDbAdapter: + def __init__(self, expid: str) -> None: + self.expid = expid + self.table_manager = create_db_table_manager( + table=tables.WrapperJobPackageTable, + db_filepath=ExperimentPaths(expid).job_packages_db, + schema=expid, + ) + + def get_all(self) -> List[Dict[str, str]]: + """ + Get all job packages. + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute(select(self.table_manager.table)).all() + return [row._mapping for row in rows] diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index 1592a25..c055a8f 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -23,6 +23,8 @@ def get_postgres_engine(): class AttachedDatabaseConnBuilder(BaseBuilder): """ SQLite utility to build attached databases. + + MUST BE USED ONLY FOR ADAPTERS and TESTS. """ def __init__(self) -> None: @@ -52,6 +54,8 @@ class AttachedDatabaseConnBuilder(BaseBuilder): def create_main_db_conn() -> Connection: """ Connection with the autosubmit and as_times DDBB. + + MUST BE USED ONLY FOR ADAPTERS and TESTS. """ APIBasicConfig.read() if APIBasicConfig.DATABASE_BACKEND == "postgres": @@ -66,6 +70,8 @@ def create_main_db_conn() -> Connection: def create_autosubmit_db_engine() -> Engine: """ Create an engine for the autosubmit DDBB. Usually named autosubmit.db + + MUST BE USED ONLY FOR ADAPTERS and TESTS. """ APIBasicConfig.read() if APIBasicConfig.DATABASE_BACKEND == "postgres": @@ -78,6 +84,8 @@ def create_autosubmit_db_engine() -> Engine: def create_as_times_db_engine() -> Engine: """ Create an engine for the AS_TIMES DDBB. Usually named as_times.db + + MUST BE USED ONLY FOR ADAPTERS and TESTS. """ APIBasicConfig.read() diff --git a/autosubmit_api/database/db_jobdata.py b/autosubmit_api/database/db_jobdata.py index e236070..88d64cd 100644 --- a/autosubmit_api/database/db_jobdata.py +++ b/autosubmit_api/database/db_jobdata.py @@ -514,7 +514,7 @@ class JobDataStructure(MainDataBase): self.db_version = self._select_pragma_version() # self.query_job_historic = None # Historic only working on DB 12 now - self.query_job_historic = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id FROM job_data WHERE job_name=? ORDER BY counter DESC" + # self.query_job_historic = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id FROM job_data WHERE job_name=? ORDER BY counter DESC" if self.db_version < DB_VERSION_SCHEMA_CHANGES: try: diff --git a/autosubmit_api/persistance/job_package_reader.py b/autosubmit_api/persistance/job_package_reader.py index 2dbe4ba..e295b33 100644 --- a/autosubmit_api/persistance/job_package_reader.py +++ b/autosubmit_api/persistance/job_package_reader.py @@ -1,13 +1,12 @@ from typing import Dict, List -from sqlalchemy import select +from autosubmit_api.database.adapters import ( + JobPackagesDbAdapter, + WrapperJobPackagesDbAdapter, +) from autosubmit_api.logger import logger -from autosubmit_api.database import tables -from autosubmit_api.database.common import AttachedDatabaseConnBuilder -from autosubmit_api.persistance.experiment import ExperimentPaths class JobPackageReader: - def __init__(self, expid: str) -> None: self.expid = expid self._content: List[Dict] = [] @@ -17,23 +16,13 @@ class JobPackageReader: self._package_to_symbol: Dict[str, str] = {} def read(self): - conn_builder = AttachedDatabaseConnBuilder() - conn_builder.attach_db( - ExperimentPaths(self.expid).job_packages_db, "job_packages" - ) - - with conn_builder.product as conn: - try: - statement = select(tables.JobPackageTable) - self._content = [x._mapping for x in conn.execute(statement).all()] - if len(self._content) == 0: - raise Warning( - "job_packages table empty, trying wrapper_job_packages" - ) - except Exception as exc: - logger.warning(exc) - statement = select(tables.WrapperJobPackageTable) - self._content = [x._mapping for x in conn.execute(statement).all()] + try: + self._content = JobPackagesDbAdapter(self.expid).get_all() + if len(self._content) == 0: + raise Warning("job_packages table empty, trying wrapper_job_packages") + except Exception as exc: + logger.warning(exc) + self._content = WrapperJobPackagesDbAdapter(self.expid).get_all() self._build_job_to_package() self._build_package_to_jobs() diff --git a/autosubmit_api/workers/business/process_graph_drawings.py b/autosubmit_api/workers/business/process_graph_drawings.py index 4a2e6ff..1b095d1 100644 --- a/autosubmit_api/workers/business/process_graph_drawings.py +++ b/autosubmit_api/workers/business/process_graph_drawings.py @@ -1,7 +1,6 @@ import time import traceback -from autosubmit_api.database import tables -from autosubmit_api.database.common import create_as_times_db_engine +from autosubmit_api.database.adapters import ExperimentStatusDbAdapter from autosubmit_api.common import utils as common_utils from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing from autosubmit_api.builders.configuration_facade_builder import ( @@ -12,7 +11,7 @@ from autosubmit_api.builders.joblist_loader_builder import ( JobListLoaderBuilder, JobListLoaderDirector, ) -from typing import List, Any +from typing import List, Any, Optional def process_active_graphs(): @@ -20,14 +19,7 @@ def process_active_graphs(): Process the list of active experiments to generate the positioning of their graphs """ try: - with create_as_times_db_engine().connect() as conn: - query_result = conn.execute( - tables.experiment_status_table.select().where( - tables.experiment_status_table.c.status == "RUNNING" - ) - ).all() - - active_experiments: List[str] = [exp.name for exp in query_result] + active_experiments = ExperimentStatusDbAdapter().get_only_running_expids() for expid in active_experiments: try: @@ -38,7 +30,7 @@ def process_active_graphs(): autosubmit_configuration_facade.get_autosubmit_version() ): _process_graph(expid, autosubmit_configuration_facade.chunk_size) - except Exception as exp: + except Exception: print((traceback.format_exc())) print(("Error while processing: {}".format(expid))) @@ -47,8 +39,7 @@ def process_active_graphs(): print(("Error while processing graph drawing: {}".format(exp))) -def _process_graph(expid, chunk_size): - # type: (str, int) -> List[Any] | None +def _process_graph(expid: str, chunk_size: int) -> Optional[List[Any]]: result = None experimentGraphDrawing = ExperimentGraphDrawing(expid) locked = experimentGraphDrawing.locked -- GitLab From c69a19dd569417e00fca0c985af85964b0eda1a3 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 21 May 2024 10:18:15 +0200 Subject: [PATCH 13/26] fix graph_draw table doesn't exist --- autosubmit_api/components/experiment/graph_drawer.py | 6 +++--- autosubmit_api/database/adapters/graph_draw.py | 7 +++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/autosubmit_api/components/experiment/graph_drawer.py b/autosubmit_api/components/experiment/graph_drawer.py index cb23c59..a461fcf 100644 --- a/autosubmit_api/components/experiment/graph_drawer.py +++ b/autosubmit_api/components/experiment/graph_drawer.py @@ -19,6 +19,7 @@ class ExperimentGraphDrawing: self.expid = expid self.folder_path = APIBasicConfig.LOCAL_ROOT_DIR self.graph_data_db = ExpGraphDrawDBAdapter(expid) + self.graph_data_db.create_table() self.lock_name = "calculation_in_progress.lock" self.current_position_dictionary = None self.current_jobs_set = set() @@ -40,7 +41,7 @@ class ExperimentGraphDrawing: except portalocker.AlreadyLocked: logger.error("It is locked") self.locked = True - except Exception as exp: + except Exception: self.locked = True def get_validated_data(self, allJobs): @@ -72,7 +73,7 @@ class ExperimentGraphDrawing: """ lock_name = ( "calculation_{}_in_progress.lock".format(self.expid) - if independent == True + if independent is True else self.lock_name ) lock_path_file = os.path.join(self.folder_path, lock_name) @@ -104,7 +105,6 @@ class ExperimentGraphDrawing: for u in result.split(b"\n"): splitList = u.split(b" ") if len(splitList) > 1 and splitList[0].decode() == "node": - self.coordinates.append( ( splitList[1].decode(), diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py index 70d649e..34c380f 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -13,6 +13,13 @@ class ExpGraphDrawDBAdapter: schema=expid, ) + def create_table(self): + """ + Create the graph data table. + """ + with self.table_manager.get_connection() as conn: + self.table_manager.create_table(conn) + def get_all(self) -> List[Dict[str, Any]]: with self.table_manager.get_connection() as conn: result = self.table_manager.select_all(conn) -- GitLab From 437fcf57664d95bf926cfd5936a904b3075971cb Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Ku Date: Fri, 24 May 2024 10:25:26 +0200 Subject: [PATCH 14/26] use tmpdir for testing --- .../database/adapters/experiment_run.py | 4 +- autosubmit_api/database/tables.py | 7 + tests/conftest.py | 204 +++++++++++++++++- tests/custom_utils.py | 12 -- tests/experiments/.autosubmitrc | 22 -- .../metadata/data/job_data_a007.db | Bin 0 -> 356352 bytes tests/test_auth.py | 2 +- tests/test_config.py | 35 +-- tests/test_database.py | 2 +- tests/test_endpoints_v3.py | 2 - tests/test_endpoints_v4.py | 2 +- tests/test_fixtures.py | 114 ++++++++++ tests/utils.py | 193 +++++++++++++++++ 13 files changed, 519 insertions(+), 80 deletions(-) delete mode 100644 tests/custom_utils.py delete mode 100644 tests/experiments/.autosubmitrc create mode 100644 tests/test_fixtures.py create mode 100644 tests/utils.py diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py index 541fc7d..aaed948 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -15,14 +15,14 @@ class ExperimentRunDbAdapter: schema=expid, ) - def get_last_run(self) -> Optional[Dict[str,str]]: + def get_last_run(self) -> Optional[Dict[str, str]]: """ Gets last run of the experiment """ with self.table_manager.get_connection() as conn: row = conn.execute( select(self.table_manager.table) - .order_by(tables.ExperimentRunTable.run_id.desc()) + .order_by(tables.experiment_run_table.c.run_id.desc()) .limit(1) ).one_or_none() diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py index 38dd5f4..c28b56e 100644 --- a/autosubmit_api/database/tables.py +++ b/autosubmit_api/database/tables.py @@ -5,11 +5,15 @@ from autosubmit.database.tables import ( ExperimentTable, experiment_run_table, JobDataTable, + ExperimentStructureTable, + table_change_schema, ExperimentStatusTable, JobPackageTable, WrapperJobPackageTable, ) +table_change_schema = table_change_schema + ## SQLAlchemy ORM tables class DetailsTable(BaseTable): """ @@ -59,3 +63,6 @@ wrapper_job_package_table: Table = WrapperJobPackageTable.__table__ # Job Data TABLES job_data_table: Table = JobDataTable.__table__ experiment_run_table: Table = experiment_run_table + +# Structure TABLES +experiment_structure_table: Table = ExperimentStructureTable.__table__ diff --git a/tests/conftest.py b/tests/conftest.py index be699e0..1728ee8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,31 +2,55 @@ # Reference: https://docs.pytest.org/en/latest/reference/fixtures.html#conftest-py-sharing-fixtures-across-multiple-files import os +import tempfile +from typing import Tuple from flask import Flask import pytest -from autosubmitconfigparser.config.basicconfig import BasicConfig from autosubmit_api.app import create_app from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api import config -from tests.custom_utils import custom_return_value +from tests import utils +from sqlalchemy import Engine, create_engine FAKE_EXP_DIR = "./tests/experiments/" +DEFAULT_DATABASE_CONN_URL = ( + "postgresql://postgres:mysecretpassword@localhost:5432/autosubmit_test" +) + + +# FIXTURES #### + +# Config fixtures -#### FIXTURES #### @pytest.fixture(autouse=True) def fixture_disable_protection(monkeypatch: pytest.MonkeyPatch): + """ + This fixture disables the protection level for all the tests. + + Autouse is set, so, no need to put this fixture in the test function. + """ monkeypatch.setattr(config, "PROTECTION_LEVEL", "NONE") monkeypatch.setenv("PROTECTION_LEVEL", "NONE") -@pytest.fixture -def fixture_mock_basic_config(monkeypatch: pytest.MonkeyPatch): - # Get APIBasicConfig from file - monkeypatch.setenv("AUTOSUBMIT_CONFIGURATION", os.path.join(FAKE_EXP_DIR, ".autosubmitrc")) +@pytest.fixture( + params=[ + pytest.param("fixture_sqlite", marks=pytest.mark.sqlite), + pytest.param("fixture_pg", marks=pytest.mark.pg), + ] +) +def fixture_mock_basic_config(request: pytest.FixtureRequest): + """ + Sets a mock basic config for the tests. + """ + request.getfixturevalue(request.param) yield APIBasicConfig +# Flask app fixtures + + @pytest.fixture def fixture_app(fixture_mock_basic_config): app = create_app() @@ -46,3 +70,169 @@ def fixture_client(fixture_app: Flask): @pytest.fixture def fixture_runner(fixture_app: Flask): return fixture_app.test_cli_runner() + + +# Fixtures sqlite + + +@pytest.fixture(scope="session") +def fixture_temp_dir_copy(): + """ + Fixture that copies the contents of the FAKE_EXP_DIR to a temporary directory with rsync + """ + with tempfile.TemporaryDirectory() as tempdir: + # Copy all files recursively + os.system(f"rsync -r {FAKE_EXP_DIR} {tempdir}") + yield tempdir + + +@pytest.fixture(scope="session") +def fixture_gen_rc_sqlite(fixture_temp_dir_copy: str): + """ + Fixture that generates a .autosubmitrc file in the temporary directory + """ + rc_file = os.path.join(fixture_temp_dir_copy, ".autosubmitrc") + with open(rc_file, "w") as f: + f.write( + "\n".join( + [ + "[database]", + f"path = {fixture_temp_dir_copy}", + "filename = autosubmit.db", + "backend = sqlite", + "[local]", + f"path = {fixture_temp_dir_copy}", + "[globallogs]", + f"path = {fixture_temp_dir_copy}/logs", + "[historicdb]", + f"path = {fixture_temp_dir_copy}/metadata/data", + "[structures]", + f"path = {fixture_temp_dir_copy}/metadata/structures", + "[historiclog]", + f"path = {fixture_temp_dir_copy}/metadata/logs", + "[graph]", + f"path = {fixture_temp_dir_copy}/metadata/graph", + ] + ) + ) + yield fixture_temp_dir_copy + + +@pytest.fixture +def fixture_sqlite(fixture_gen_rc_sqlite: str, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv( + "AUTOSUBMIT_CONFIGURATION", os.path.join(fixture_gen_rc_sqlite, ".autosubmitrc") + ) + yield fixture_gen_rc_sqlite + + +# Fixtures Postgres + + +@pytest.fixture(scope="session") +def fixture_temp_dir_copy_exclude_db(): + """ + Fixture that copies the contents of the FAKE_EXP_DIR to a temporary directory with rsync + and exclues .db files + """ + with tempfile.TemporaryDirectory() as tempdir: + # Copy all files recursively excluding .db files + os.system(f"rsync -r --exclude '*.db' {FAKE_EXP_DIR} {tempdir}") + yield tempdir + + +@pytest.fixture(scope="session") +def fixture_gen_rc_pg(fixture_temp_dir_copy_exclude_db: str): + """ + Fixture that generates a .autosubmitrc file in the temporary directory + """ + rc_file = os.path.join(fixture_temp_dir_copy_exclude_db, ".autosubmitrc") + conn_url = os.environ.get("PYTEST_DATABASE_CONN_URL", DEFAULT_DATABASE_CONN_URL) + with open(rc_file, "w") as f: + f.write( + "\n".join( + [ + "[database]", + f"path = {fixture_temp_dir_copy_exclude_db}", + "backend = postgres", + f"conn_url = {conn_url}", + "[local]", + f"path = {fixture_temp_dir_copy_exclude_db}", + "[globallogs]", + f"path = {fixture_temp_dir_copy_exclude_db}/logs", + "[historicdb]", + f"path = {fixture_temp_dir_copy_exclude_db}/metadata/data", + "[structures]", + f"path = {fixture_temp_dir_copy_exclude_db}/metadata/structures", + "[historiclog]", + f"path = {fixture_temp_dir_copy_exclude_db}/metadata/logs", + "[graph]", + f"path = {fixture_temp_dir_copy_exclude_db}/metadata/graph", + ] + ) + ) + yield fixture_temp_dir_copy_exclude_db + + +@pytest.fixture +def fixture_pg_db(fixture_gen_rc_pg: str): + """ + This fixture cleans and setup a PostgreSQL database for testing purposes. + """ + conn_url = os.environ.get("PYTEST_DATABASE_CONN_URL", DEFAULT_DATABASE_CONN_URL) + engine = create_engine(conn_url) + + with engine.connect() as conn: + utils.setup_pg_db(conn) + conn.commit() + + yield (fixture_gen_rc_pg, engine) + + with engine.connect() as conn: + utils.setup_pg_db(conn) + conn.commit() + + +@pytest.fixture +def fixture_pg_db_copy_all(fixture_pg_db: Tuple[str, Engine]): + """ + This fixture recursively search all the .db files in the FAKE_EXP_DIR and copies them to the test database + """ + engine = fixture_pg_db[1] + # Get .db files absolute paths from the FAKE_EXP_DIR recursively + all_files = [] + for root, dirs, files in os.walk(FAKE_EXP_DIR): + for filepath in files: + if filepath.endswith(".db"): + all_files.append(os.path.join(root, filepath)) + + for filepath in all_files: + # Infer which type of DB is this + if "metadata/structures" in filepath: + utils.copy_structure_db(filepath, engine) + elif "metadata/data" in filepath: + utils.copy_job_data_db(filepath, engine) + elif "metadata/graph" in filepath: + utils.copy_graph_data_db(filepath, engine) + elif "autosubmit.db" in filepath: + utils.copy_autosubmit_db(filepath, engine) + elif "as_times.db" in filepath: + utils.copy_as_times_db(filepath, engine) + elif "pkl/job_packages" in filepath: + utils.copy_job_packages_db(filepath, engine) + + yield fixture_pg_db + + +@pytest.fixture +def fixture_pg( + fixture_pg_db_copy_all: Tuple[str, Engine], monkeypatch: pytest.MonkeyPatch +): + """ + This fixture cleans and setup a PostgreSQL database for testing purposes. + """ + monkeypatch.setenv( + "AUTOSUBMIT_CONFIGURATION", + os.path.join(fixture_pg_db_copy_all[0], ".autosubmitrc"), + ) + yield fixture_pg_db_copy_all[0] diff --git a/tests/custom_utils.py b/tests/custom_utils.py deleted file mode 100644 index 9148a98..0000000 --- a/tests/custom_utils.py +++ /dev/null @@ -1,12 +0,0 @@ -from http import HTTPStatus - - -def dummy_response(*args, **kwargs): - return "Hello World!", HTTPStatus.OK - - -def custom_return_value(value=None): - def blank_func(*args, **kwargs): - return value - - return blank_func diff --git a/tests/experiments/.autosubmitrc b/tests/experiments/.autosubmitrc deleted file mode 100644 index 4b894ee..0000000 --- a/tests/experiments/.autosubmitrc +++ /dev/null @@ -1,22 +0,0 @@ -[database] -path = ./tests/experiments/ -filename = autosubmit.db -backend = sqlite - -[local] -path = ./tests/experiments/ - -[globallogs] -path = ./tests/experiments/logs - -[historicdb] -path = ./tests/experiments/metadata/data - -[structures] -path = ./tests/experiments/metadata/structures - -[historiclog] -path = ./tests/experiments/metadata/logs - -[graph] -path = ./tests/experiments/metadata/graph \ No newline at end of file diff --git a/tests/experiments/metadata/data/job_data_a007.db b/tests/experiments/metadata/data/job_data_a007.db index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..7fcd6487e0efd128f66e1889e268c268079f8b1b 100755 GIT binary patch literal 356352 zcmeEv37lL-oqu05({uE6LK1R7$S|A{lJq%O0;Dt3NgC#&rzZyyn#rVdz+@(wnM5Qi z%%FJUiDKe?EH1c$$0~cR|Qnm_4u#u>bf55|5v{{Ue$Z`YIier2y1e(mbfsnQ#N@$|>6Ub0K_D0mY;0)>1nT}L5D09C#Q(zZ zj|l$){|`bsMf$G!Cmdt#(U-?!2Ld(G&jn(iiXE;$HP!+Dw11WX%YbFTGGH073|I#K zw+!^v)Gj@1W$?g(@jaF63O(gZ2R4=abA9=1CU%$ij7*Qzb{F%xQog0o-;>|bB2{P^ z=x^bb&L{sEA30cA{gPGTx}_^u24B5!dSv%lg;(JJkFcYaa+`Yd;^!^rH?-j2fjw}Z zQhrOm*wQ~xYUv;D?QIz>7W#6|I7VN-zjPt{(dfjH@#)H>=a;;k zUt3ElzXOiRj>}6&Co3b^C zr;kkOuWEAQIn&QQRPp`o_~;?{shda+_&FnEW20jequ1&Wet5#?td;S~SKQL(#nx@7Xs$4fc zIf9K}|MT(ji9HoB!7V-c&AH*;QcHp#X!6LobRM4DH*#HZXs87qt6tF>o!_n%zs?=4 zR8?={$h7`CE0aFNLIZ-vN!pjYkvP}EuXc3*fw4W46?*%4Ca1Ry=RU+v&&!7U3zrY) z&u35Nh5YGT&7RiP>ms#FyH^GS*x^ha9s?(;JaS}u0>77eM^R4lf7L(_7bU&iV5oNK z1s4Q&V#mUMI8`}(q%uBQaeWOd-Hfa7e9}=}2)|tY!meQ5($%YjFIk7jtXy}fGI`)& zWqi5}cTW8lQjVyWiSA3i-_FgQy9@hYbVBp!W%w-`B@b5l$T)Of{tsSx>cI8>eaz^@ z!9!ye(s8<*5I@QE#PrCRZ|UBV17n6C!(BacV0@qN&w=T2Q#tLLO_w)yWa>}_Y@Qc@ z_Zc`?nI0jw5ZU7Gn6%4uoVfCVZSmj5e;xlt{HO6B#vhM=JO0n{e~f=Q{`vT)<0s?y z$3Gl@fBe1i+vD$uzd3$e{I&60;xCEc6n{bdx$)`vMEsg~C4O~$SA1Li@_282OMFxO zqIgF<9bX$?9X~t1D!wAVB)%Zt7_X(LBB!%|mI2FvWxz6E8L$jk1}p=X0n318z%pPN z`13L_oQCrarQoBOgpa`leDtq_kKVQLap@ZP*m@y+Y`FkF^3Q^gp4ITN>3sOeod+M6 zoC_Zpp93Ekoeduw&VrBit?{&Kg)n+ zz%pPNunbrRECZGS%YbFTGGH073|I#KvKcrf7+xJzH60naVwXfe77DKnaaBt2Fc}m$ zHGUI8w%s-V|Bd)pVN~EVF#CUB{6q2g#oq&S|8I)FKK`osOX&~DkL;giz%pPNunbrR zECZGS%YbFTGGH073|Iy%1JyE6njM9zm(LdAvu}12d=_Sj9%1t=Q4w^{5+8o!ESa(I zoSB5rwwV$5OwU{npNW|qd|o(1qzmWF5Wzv~43P<}nhC<^>4)KCIjMarseB5l+e`!w zjVc3P53 zz+W=}2U0Fc^%IzYoS!iBx(` zGO;GrwkFx$l}L3#2G43=lj!V9CA(6oO4kj~tE}Ji%Yzf+)B7XQ=&{!Bf&R^fEv;QG z$6Cp{&_lzU`U<7;wtR7@FwhU#t(monwVm143tL+Iayz!?3Z+8-7FgbT2+C3k$Sn<& za=miKP;PU+v~#F8pC2q0`tndON$M5zrD7r1OMaJlJ|3;m3;#aW3TsIhHW#{cB{(4| z-8(WiRlx(pm+>QGW2E*_X`q#mN5P*zhkh_Lw-j3DDu0lgWb7e_g0bx3ptOBwUS>ztvoE14GUxAk9uIy?%u)> z`O?ZeFQg*hw+SdB+0d2YRcsyZzig;*MIKh}#p+10QUbLzaEkunzHY1x-&61%zd&b5 z4uj7;vqYa1*MT$=Bu7smcw=3IX6?6TR6*;$ipby#wzeIi((pm<3hI{kK1gQ_VzI$tV2--+* zzAO&K@5j(KaysV{ZD$6Cq1%KUJo zdGk|X0cojaITtd@L*2!~U};FOiAOHxi^KGC`M-F}#P4K{ciuFxJ~=X8r!@GD-^qE& zHAwkur}iVey}8onfnuL1C7E#6e&CPfO~9heB&#Ps2u&j2-(5hakCD5X>*;}KaiAZ@ z**W%ISCRsGd|f%eT)-~CePH<-$#0)e4!Awn+uIEne;K)?M4~H!y@BR3eJ_c&ZhC9T zLEyXJ1;`gEu0T2$UFw{WqQ5NPYZxjJnw@Ka?|#?7E|*}3T5FQJ>%i5Y{VxOBUw3|J zXaM_&4mgnSbqx++_u^a_$?&@}1_E!3L<(;w^Q5G^H=m;|oHtR-@jE%c(kJQcCFp2| zHs?t{=YZ##>>37j4~VDH3LNe$a~g}DesYKTDd@8cn;RY0k`!aZ8yn~$@J2H8Y z-0!XQezhJPPp;iPHM+JkMRHkx13zhn%yf$64G$qWV#44XEayIq-N2s8?j!q1IrKU? z(p&Zvu&-aY@6g_)Vz@W?{JjBQk92>3ng+*2#(wp-VVzB13hzG?EG$nnN%Yb z&vlhI4Rz;p#nM*rYLv!uNFfv&Li%GlY8cO_XT%VKZHJy8R0YE-M4-7 znjPI4_2ySbM@C^U1#AM+d-S2*N~0JZ9WgcrSXY1Q!071Sx!Z%g&9WY3-9bE`KswFtQ(uI0PzPV){*@t=oSI%_&YUe%~e%icW83r znsu$uxBh=M4xg=K8ITw_zBT0U|2xjU%m44_%Cr&Rzp{SsUDp4%{(sr}|HL9%|6dwd zu>L=BMy>x(9t@A0ZT){j3wJ)O|Br+7z=+!cncp~u_5azReyjEW`QR7vJFWk3{eQ{l zvH!FTSOzEq$G3*(^8bmmFZut;L{~BcssQ5qSJofC)cXI{|F`}>3JtpESl;{PuQ{3Z}Ty*UwE-E>;x ziiVx_FO1$;_omvrYHp9*7Jhx`AA^5@3eT{=+0cTPKz)vgxFf5A^-GsV(<>K6qf5iF zC@8x3?F+_Y7)!xL#~3leMNyJtM}*y}u541xN+&?>U0GkbjHN-4?+*6nOZlD<(QhVD z{X!1CCXs~yS}+sx`3XSNjAGS=1Idv8WvnG9O(s(9iOy6ykzPA-WV-swmC4DPnV~q4 zaHd2;Y-vo8uw_dyM@c|^pNbGbr#saw0G;Rrz?ZPpqk_&NXgn0dR!^}}&7*`Rl}!pP z+Cp;e{zzvoX$DlQWK>HWRQY`HOp1z3r>F=3R7&>R$<6KT%C-X_@Z2%A>p+nsQB3_` zr?q9AXaBO}38n#P#|)pwrg>v7 z0n528UEtt}qO{nrX~MywRfZVM_h_0WUVs8R)h0kI&ylvKpn&i*+tlP`pk88Ny@r9L zmVrdd!$3L>=vT0`YJLnb0-moS0MD-wkj&@_(AE?Lc)6Obf#q83+odKA4MIGqS6&&^ z(E+olF67CstUD-$#H9cc%`7c)$S}4BIJW4c`Y+OO0TmP`K+>x1o;3CJJRoH=*E9eR z>m(jx=Qxk{@Nlr5HVawA?%^Qa;mYdlN^}7J5KDDB4&)=UCeTb_%Ox`8WHuono9gJ{ z0xvVW>jCT<8SGgO*rB1|Itmsqhc4&i0tOLB?(VFkD@#O3zh>!oDpqQkqyti@eGgKo zQh|ar-64<~stBMm8?(V^OCYMX!}ZnMVH$Il?#iZJ3=q={F<0MY={q{ORoLML_n+%; zZ#Wfg$@WAhkrh-Zk1@CG?#hIenPMGqpf$t5fu9-Xm@}?-546mH_e(5&O~vds!wc^F z9YRwKErF8GI&T!(LUBK}KxZzl1yEWm44{0hFvpZ~y;`7Q20Wi+>9aaa!U`|A?-K|~ zHnfN+r&%U50Oi9h-S5Du-Uu(a?+xBIsaRIT zoN`_~l>4NuDHuQ-+!*l^Ky5H3KxcGR8_WqvWIY7HTu9dqcd&HV{0P8KX2E?|dGRZT z74awA^$(=Broiu|YBmxEHnb)f*r>t;Q!b!Ga=(uds|-B6nWc9)TvTg<7u=WjQXyGZ zB*L#}($v$_KrQfr5YV8tz(B(s7MN3zaleHmyV4y1|J5wLUPpm!HE09)$WIWpTp)L^ zsjTzPVXC7?OAPQx5U^?uFkm$sU=B6qe%T;48DPGUrI)Hu^#*vsN8XM|HO7_P+Em7Q z(=b#KIAt=3{(nK>(Lj7f^M$dqnie%KXy~gy8a-6^vf4M-ye@K6_{Px3gHJ$({L^y) zU^d*+5(sMFFSwr{_C7n@T1&kanZeC=rR9egveKPhNpKGjzni5ybhndqCUZIZGm{bj z%iyiL9hPp$W_)}Ke$DR6GO)L>_)4OK(w8 zyI(OD&TORMl-CFZlXf0aZJ}thN{7s$&TKpbU}@hm02aPuP*}vAsSw~^Cgewxpn`W} za10N>f~D8!P?00yGq!MMIRz!ZNg$?_^EB$p1SAAJ8(0Ytv~L*z0pBt-?Guf+3s45$ z?A0s)lmX}N@C}e|g!Cc@Q1!1F3vc|Cre!K`4~RP9_yEei($*9l5Q1i7r+X<-eSHlD z>WfA@EWJP!78%(Ap!YNdzTM#|mady01@unec(aB9<^6#$kjeVI`fFfHx6(z0tn*+_!-cX%I5V-6eDzG^JIal3{L=^X;8$T+Tpf=^RV zPXv9}xON2)p?%i?BJf><5+NHj4+%-xm!KphGC;sCmaf*3ARm!6g3~E(xkT=2Gik@U zFxAoHC2t#}%K@$SZ3EEa+Xh7|nRpM_gzRY$uY}DP#W#AW;BU+_!3?io5fLfDGTtTiLR{Q7h~xZ9bVET=d#B;YBkRo zzYIY}q7%m5ecHNTWkSnLX(_N^w8xYMw>_q)VZ2|l$ACJO0K`F-YE{%8d%XP-3Qje- z1Wd|l#M(k}PnE}-L!H@pD!?+@V+xCO`w|2`&Msxqdr8LS_Wp{c-|A42Bhd$R_)-c= zvAG0O!fCC#G69L%<0SyWXpc4R(`}C_pp5MG5_p5q+RwHO*Fv&HcOAsj{#UP#QLpHCwv~_%PaI4IipKrfj5K_83hxF!5EEzV5J5wLLD~p&>)Ivjm`DAEl|M zCxY4IQ-BDgJ*Gs+9lrETQxe>--9SPTrVaOgnx)U{NRW@n8o^Bzw_GCi%P@lIbY-SG zdc0_l7Xn(NJ*H?S6X*g<2HT}9+65%QU)*~yOCMLk`q<;r7K&9mlLA$G_cl}!NM$-~ z{QuK)T*F@VQ^>&atr3&{pFQX9p`!M#4D@q{FRiRU`faQKxB7pp|F`;okH%ZF-(>A2 zyZ)bD|Bo*^*-D%<(J21+s}FLo9oP8c3lQ>5yZ+x?%fR`p*#(i)cKttSQhvb-=C$>V zp$sG{owrY1m(Z^NM;62|Givq!Y#nX8{-3;9pk4p(DfA!hsh{2q9N$_qhyM@rZW%U7 z0|6eeF#zbkV8RNf;*Ng1((~xMt^aTRf9wBS|KIxm@B-^00cKy&&xJC({=e9C!utPq z{eSJw21ciOP6yGh|7X|#)9qGZakOofp2uP={i(Xc z!^3rRg#jcJ>0~;Q^o#4_1Ef?S;#4N;kC~x$K!R6xNJ+pj07?Q%;W$F#z9MKI#g$wm zKtmW?&tpNB=8zp8UPrM>!a{a=QjC_iP$*XvaTZ59bIDpjer6fnfnYu&2qL@gA!g#`o($JL&M2L6hhBbh|D>|eIFbF`? zI3>}cXqkyF9$O{wfCwo8SboIP&m26}iw+NcQPV6XB_X|!&_49BcG{YP0>aO1(}i9J zKyxSmX1W1xYMt~HEr8HG-O+kQCl}Zk4 zE&w{bl0!-dh5%^j0Jq80ypv3TCXtcK0uf(f>7N`js+AlbTCd?kCd=Ai>takT^)D0sp62s!nluXbFWapGtn@6G>;a3sW6E zT;OG9_i6y^6&zBq7yv-QN*VzfD-JR8=u!!5A_+L}WvME`;lw=@s&pa+snod{stBMm z8?)i_0i@RsOY81v;!Ai}PvOEQs~GQt!FMtP%rsZlKL?`IFmJE#lCfoA^p@Bu6!%o=vpCe5jpqU^uO+6i&=ONvq|0~ROA>tNx|hT_1EQ5I!A7(4 zB1$exyzs<-P*9G!WtS}lKI_T^BxGYYa1KE5nqmq8O|hnZN~T!cCK7z-0hEC-jpr<6 z=~M?$^`>~?iH~Snret9$>JDl32IXF9YYGm~7RS!^QlRSpH55p;SfaqO#jK^m!dMW~ z@tg=t_4A_udz*zP4rmyVOe|wSTJ1q$KwDEVpj4&Cxb`gI!)uIblR#sv;X@c>i4DgX z3pPkR-qGK%^aqEHYK`&26BlX75LT8^F{d?l>gkD~*7(|1Ai`^nDG_LmDG_pq?|IA} zYb;10@pwmn%F-`%B*;f(jbJIoEtkmsa);3zGrUkokC&L^(K7+9*Bn!{XpSjb$ppH< zlHfbJ3t$!+fIiC74^*%|`v2o!qgbUgDNs%Jm?4EK0;xJ{@Pvh^t0W;sEWbbYx3+5G(&Z1hqpY%X-;UiGAuZ*W{1fG=e8WKw&mG*HZKaqR?r zEpC3>JutYl%=(nxfi2_^e82gg{N~(nFN#)M^E(C$Jy6z42Doq|r@?Nr88XQln;3dBSze3N%>naOYa@_n1~xVLvJtV2g~y0;GZUp7>@B0t<;Air%z zhLsY?%|Olm;XYa!zNh#vp~ELX!uq*h*z7%rdqPi5kBsjbncRcd=4=&wjPExscB~(t z7{{IBS)FnZ?5bSoM^5p$1ltz8Fz2^+|35;vUH{*%|8Lj-FZ7bWwUv70Q+ECT;&5LE z2(jz`b4x>72W(`YPLV!ycqmWI6EV*4&CdVBfS%G=4k=r`18tR&$?5&1mWMpYsN4Pj z*4h342Dlhx>QLpcW&S@H&IaH=`)3)j3{=Cw@vU{-{SW%O{~7nJBa8-;nYi~?c1>Jj z_5W7?Z}tCH|L>8$OGEtRjn>Bh+xUMQ|4)41sj(xIR{wAP|6;CxXmh@3{y&j`h4zWfv-AICoVrZZA_G0cy?G+KKyMnPAKhDF zAH@DGz4`Jccv(V$1=nK?ZR^K*60TAhE)od{{*Y%C^V#K=_~YbF0G^TQE&3qJ)G5T#04&b9OZcK+Ya|J(V0+IjTliurzs4zTn8qiZYZ z!blU;cK+Y4|0gD>t^c3z?%Te3&5rJj_5Zo1mUI?91(dF?+dpxzvTkg;GCnbRU}D|K zk!e5we_^208~;!C|NCi8ZDd(E9{gkA_fSRso7tFy@Cak9R}@3pomej#lh_6kKwTLT z7m?~*lZ5RbVH1dSW!JH1v9wN&i*Uty1>06Niet0L#1zC|raGiOxSUuo@Wfb|2!UX+ zUK;^|G1iMh;L77NXpPHKtd|4`w$8yVc_51eN+bZx87!UU0IELLE112T;`CmCf;v-0 z+%@F@Y6=c$tk)%83e;FH4FyuHmqbC5?WZRwNOxsA0RAZ~Eu9|)=ywFO&(<&?MR2iu zn|AixQ@GOB6bvX;S*+K^z=tu`OT&kV^^({~IDyx+L8QCVZNNk=ON|a2)y8@SvmF{T zM64I1A~|Q|HEjw|PfrAk^|}a%FvfaOBIIBq@d&`081R_GAc&)Z;3T9nm0j2WmZd-H zNRW@n8bKY!Ete<{K*AV$P4Pk*>?dUC9&x{W(kjp@P-N zdIdADp;)Ce$xz#~&cYIgDgvoY+02^ttZ_%9tAh1Qmqycz7Db~=X+&Er7F{3HNA{~w z6CKc9t69*aC!ld3ouuI_rF>7Q>PFrA?t1LYm4}hF!LGU5pf@C6y)d{|zUh*9Hc7LD zYpt+!je1+CsgY@;ZLsCH^fKiV>Bb~Fq&=2Qb;O-f@M0V6oU09bqVhdpVY4T0hWIm> zfi`$_CrekUV13$P%L$5A-@sr4CueIVLluEkreikT0Vs{}|Fz!czyr)>aBR^XIScxn zP8eu~QP#Jz^e!DdFKp_D+}u2*!6pP5bn` z69OQz?v8xt@Vz$;?}QWAvvh+4Xddr`wI9&5OuPW4SthD`Cj@?=rr>}OG#kr!DNx@D zH56cvE#pNB(#kqnNI??b3Bfa=X>xuPusgj_!+`5eFPrYj_$*JO59nkhx{H^Zz_1 z)Xx7Ga7VOOR3Z>%0SXW#8z=+`qyb6!rtQr%JFB^6`eZc+kX^!B90UEiUZH*1btNf~ z&8u>Lxqw}O`@r%wlHXeWzt#U+{Xcnlt^VJ}|3l}>hnlSZ-_HNr`F~|fpS&0UN%Q|U z{@>r%vn7@R4+F=yHt+!ezWkqS006yx&j0|K&ba>H$aF9Z0I>eQUH^}~-14rQX!?a{ zU&)1WVptc_UDxXWt^W_rXJUL57oYSUl7Q(9GMa#)8Ke!7c^{ue=Hx*N@*5n1kcLDK zRu1m2Op*q>J=fdY4Pz>okt5=K70LCz2H43@xd!;|cMa@v$zY1z|IhCKhbtpm|KIxm z&>vX;pD$2n{eSEKKUw~NA`nh^yd0Zh8L$kv7&zY7$X$QF^xtxgpD(hg>>W1#0ErVQ z!S^j1va$bQ#_$(9_#v35_>+xdTZ zlC1yV-w*3-$n9jFl%V2V85E(Sd!L~hBF-kY-9s{b_-CGH=dP4;0X)+rka0f^bk_pi zl{te&`e7mR91Zs7N}C6YeK!7|-rm+d)1b(QP=6c$-@0z!p}kYes`R{l+WCL$|6Bk6 z$?^Xe1Rf2h~hAYq&0}81Z1x*)s<<3H3{S_*j^Yy(z2C(XTON0m#N_!u4M|EcfCC(j?Du$ zI+LkPn-rTOhotc`5vjpK(gp#7F(i#b-~wsQG6mp!s8bic2D+cfvLm4lwDb!KA$U>WT(g+;o3 z4=U9jGZPJn{)VMHb*RXZ=mXmFZ3;@Yxq!*n9@8=biP__=0KsUFHSJRk5r=z%jZ?M9 z%tQm2H?s5=2hcq1amy{5mZ^k@cP0T@vYHTaUQ=*D?eP{b1*$#PP~fr0Kml1JNWRTz zlwjGQeT!bf(re~N0lU*f8U|eU7#OJ19@CnF0i|KH5b@2xhtVEu_;A@{WJ9&b%tQkd zH?s61hmET3am$4oG88+TOsA`|$F!cF2xgD-K!njAQzGOJU%vlHf@+VE1Xwm`-=b-j zo~t84J|b-dtA0Uo%O!GGo1W7i174`3$BXv32hbYrF-0qxKo3~e9wS&-HfZ0X{VW|+ z!TQ+aRqvo!r86l|=d#Cu6sicMGG(*jZa`_Y$C@Pwc!w|W(ltHE`Ya-g_kv)LcO8Sk zSWH`Wc&o6-!L~;;Fq)cRsV~p7FHv|M1+* zbGq5;|2->3;QN{~{Z8ND?EJr7|KHC4gZ|3S|F;eg<;imH<44BEpwE>ygRt}ecK-j# zng0*M=6`nozo(ZEYOnUGW8iopX7vA=voHJqFtOg&l_4^Nd}ZJ2%dP(pw(O~h0kHl* zx*O~@qHOp7quOI2g|56uMnbIrU&@z<@}=QH(pKDJb4Su`{eP?fr=oo^j^W=}to|QV zD6pwvIalo7TG&RGG&t5;+Bt}88yv!o{6_5Yt7|Noh-{~rO_hEZ=wg)?rwVTO2YSw$M|1*j9grkmCwTQG#NH);0&IYaq2u8i3M&ISp8zP{LqW?!g$pnA_xi=D%?`hQwaPXyB&UI|1P^@fxPSL&~H0TZ6H?Wk`iL>M*GTdQ`TQ1?cGuVYH(UzP`|4-}a@uJ@F*?`ulH>7AK6`%`j z*4wAy{yY2HNjE@JRt4*$H+<1oDOTxB3RJ0wGgJ{sWy)s5y8xw8Z)gzu$#`MZt+OlX z>hcopT}jaYuYQ81pXl&vS~yqCiDo`^XLB|B|1|8$=hN}=LpsdS%7m7g(iOmhQEwJ+ZhMTONf|{&i5*xL?D6U^vGg?^D#spAZKt4Ao10E_WE~Bqu1r8;_IL+C zFxq2H`}Ejj1O%#ErTZnL#PATV{v=DEbpTakj6$>{nrhIrOqV?d)EQ@oEahHlYYGks zL9?;#UJ6uutf9bTkC6gte@?VOKtT%Z@#+t=bpQM)pbSjDK*NB`9s>i?o}CH<+M0p^ zr7Ds&Te}VTFxq1cA1-^0Y)Bh;ayF7+k5}Kp(p?T4&KoDWAXl`KY>y{9HDoCE7^sl; z^i=R^>gkDK_IMbGFxq2EgxulFj{uNBg74%ke9RMIk5|8irFZH`kdH_k!Qp?WxaAVL ztL;cQI&f1RJzgMYX0!xojrN$Ll}wp|(ps zoS}+9D$_9=9s-m`d#njI)yLz}%7m7gQW01%+GENB+6WmUqE_Sa@KMJQQ@G1#ljOR~9(%m&n-rYO zP^c z|I^bCwO9SrFmSvOH~Rm~*;oC4kQpSw|8HMY*|%ZJ`u||rtp4Bn|IiG;jW5G%UtzE` zROW(M>;KE@d%dHr2~+U(y{-T6KQ2M$vz5USQ~>ylj<7)t_n-(LfYIM6-`j$dIm)@| z^p4NT`ISCNXXjHu$_4ODyZ%28WtDlPRB52h^!eOX?aOH$6X?-O`MyD}V8@=UFIUX> zTm3&qu*1V(Al3T+*8i7we1M0^=P@HAhgSdpBMHsPf})f;Lk@aPSZ0@gusBG=v`Y=FyN(i zwssu&FzO98e1I3=Y2gVbK$X}aQoD<9VCjXr7Oqia!Lfx9>+PF0WRNLGxtU8^cmH7DlCIN{ARm!F?r#rL+;WM6IgwsF zabz011XCS7ULa;>bPUiM^@bF!B=PfrO?T2P2`rHuU%Zc{F%_(j-tcYjq*$dh$xz#} zNdeVRMIe>wmRz zZNH-6TzW$QlW~Grw1wiHYN5_-JOHqa_L#!*+G7m9^PnPacf$u+x<`k~vB$UFLqWN0 zZmKiw8;^%&0ur;w`vHQ{9&6gC#vTJ8cW?@tXaI9NOYe06%|rix+lw_V(`{u_zF{fa znt}srkN0^gQ0=jX0*yV!;5*MfCH8p336|bAKMH79eA^BU18yq|3xPYaEJ|zD))WjV zRgtXO+P%Ps(H?8~fF50i4s3XOD~Q{H4VpH*lBL%BtN0;x>LY}Xa|BDxE!cO^RNMyhU1l8MPtW0Q`DeVRpjP{ta zfHp#fh{1Pi7Mf^4{9Kk^px$MVJ-+$hC^)ytO|&PS*fnjTxTi|D#d)#%|EFdS*`D?3 z%fRu%f;r~@ak5{1;7PYmUTcUyxyR1`!|<)0|F`r1&!wC0N2W*jlkVNl{|^jU{r}U~S+rOFjAP(6yxlhnDE zqx+9o|G$3=P$QOq(2s=ZJ#V0vPXQ5jTlvZmJk#p`t^R*#bG}&aALt=t6o$Su$~HN`Wsv^Q{r?4l?*!tjo6m|})HKkz zxuLB-9X(Wcy!Lf9?~9xaeAT^dc!5%BN{S< z-VmuscV-o1mQ?T%+ng#uOCR3R>cbKIsba+XV zbgih(JtMCFU$XJkQ|8kDQ=})Q@S2lFzO8@MW6~1!ql>^xE?^A z2GezJfu(*GwOenv;+4x+5Wi-eXme(L-@SO@2Z8RX- zz>;>O4#yy0ayJF#GP>zx)(Onfl?h19AYTU%jP_X5J~j3j0J-Dw08j#K@ws4=F-e#36GF)~xIcGc`ef&v1JrT?v9|0na_LveOclhcvohHF|t}J4rfq*8K7V1clk4PKA z#h;+K>7?D54nP^{9K zWT;>S#j|dxB9O{-%!a1`rO_U1f=#(?qj-~RQn;N32H&|TASN0n#y-Z9IDm-KSe_m_Owr1296g_@tOY@Ry)y^bUD_fTL-t} zoF~A|3fcL8JO6Lj|EK#N*!h2X>RWm_BXdN!)SG@@SCU!t<>!2-df{DFndK!f|2_(L z`OQyUfpjiM(yhNN-)pez|J(Kd?fU=aT(Ns=VcQ&2>Zy56sT&u~14C<`axdkFO68&9 zLWx`xE^bI>9l3vC=l?wmKVCkZAI7kasmkPmkuf51aNmK-)X3!M{sTuVLdi}z-89sl z&lO8{{vR|;dn&t+?9)-V?~q;pujS7>|8Lj-f7-gD_QIb6296gN&E@|SXI~xggK5?T zkr{+46RXd&{y&&QtN*w9e`p5Q|F{0X)&H}N8R-m;&(I8+dDjie@Zq0%4#;p9PZ=5F znO6Tlm*4CYm<&CdUH>1N6j{gK>i@0(ukK$c#!jsNPsfnPF>J=CFQFT~{2BEBUkSv| zZ$2~D)pS{7t|3u>VRV1pt7~6c^UlbJ!gqw;7`!#`l|Z}O(fW&3{4WHZhEZThl`_Pd zau+Stu_oCD3$1o#g+3m#QXs}_BfGZU^2)1N8r8`($dPc~rtONUC2qxk3e+ZvjI_`! zsm-K0qD*Ks(Efwse*s`HDhxG>E=6G|5b*_NQ)>jGL>r(fvb5bnRE@kYM1k)7F$Jn< zN@U$2LQDHCDyyw2NFWT&#%AJP4pfPuh66=nC^+Ej$qEj#UD*ua-^|iw^W%U~uuMaN zEHR`MNNdb0WNB*(3Y4nI)@$a9$<77{`S&mN3yS2_Xc*RYh* zu^=CjHiPZoqrl}71$4?$!kg;o0RuBLqccqa*Qhe2aPfl5LwVRc&V@GBm2H=@NXG`D zOdWvqOqR}7(dt!(ZQHj~u+o_ntO;jfT|*TCRpw(hJktm;jWR=nzK^4$`?NryR$7Gl zux=pJl>!0$>Kj>lkq$6vm2=1pmuv|xu1fZw$!2`i#lpuT7tP|nNM*vyOlhV8XfWyw zB_*H-c1jF83fM^yldqm;>A5Owx6W|M<}Xonl9UjIznN^>3E0sVio2>j+8irH;?)B# zqg|%BWVOk1sT6RIkYte1zxtmF4qDf zM!T#bM6%0*jaRs2Jo}LMAmur9t zqg|#<$o)QV0fL2;h+GmZBszfsm^{I>LC1o8MA{7UAEUtK5@xF?=(H2ZWU8YFOzm<6 z;2P~Rg)5mt4_esuOYPW*sRpDMu#`~I>h1E9{3|F}=}ZdNloQ`%s3M@se1KgJ15Be` z*2J3fexIXbx0l4;xw43Pt>i@kW_B*KVK|B9GKwYFJb=Q>i^CDo-MTu$P64WEb;aKC1+pq|Jj6VlKB3WTR!_k>;GH- z-}?X7|F`}>d7bp^C&z{>NX)EV|KHgg()$0h^Ko$e|JwC1$Ibg_cA?E${XYyTS^d9_ z|0j>D)&H~ABE}C9I)?rN{r?3Z?|%mA|IY-ShEZoowKBw-f{T_xT?y#_lUXpQ4{6vS(~NRUmzEA! z0BY*#nczCZmB55iXGocl=Q2GkWQg)kWg(db1}ZFFqhmonB5ei_-AaMWB{FDvZE;f_ zJz!u4^#7*=T%*pA!j(YR9CrekUX!Sb7*h66oRyvb{RqEypRRmO- z56~H&1~82}LxaE%0OJM@F#blvwlYLUs&)e;=I3+WEV;zARqG7fSH9}hIraZ!S@%R+ z)=`)$&0Wl~l8Yv>F|0D-1$2fhfCi(^P*MVVV3T+P7i<#S^SO4GwCY*3I(Wm{S6)rg z(TQmJ0yB<=R9h(SsunBg|Ca+Uqg|%Bm|a%yCT?1x-pv$*Dt+z(mRvGfDxVZ~xqW3P zMa6BeU|rg#LRTg*al5<>Fc|H!ripU9tRhN~_!JdUlH4R1AI5Z+^FUT_m)ln^)HF_E zmj&$PTy|NmDM%m;fn8qeUBm9=n)dC$PH9B-uYnR(k z->hMS*;~#UEEd7Uy zR&STvR}4|G(wP*jX=kSoLlps4<^$~VB7kYM%bK`TZsR1tB*K~m;YnHCRKvX3QI?MB z0FzcZhyK59*LyS(sQfYn8zdOpFAUYMGT{a6@+m-r(JoUO&{RkmDeR;i?nPn4mKL|X z@*0*VRM>9)f7`BRicU4X1Wn3m$J#=1SCvPbW3~E!d1lxC+cGdW1IG)en&$ucWWTp# zCuZ!2;BZ}T=l@|S)UN+$*Z+fNVAub%>;KvH|LppILl}q9mn-J`VVt2j+?T-&ErIs{=Wg-oyo|ff%u!_H^lFYZ;W3Z|4IB%{E_B=X?~#j zH_bORzb*c(=DXvInoo`Wra2S)NbDQU`(r<99*p(H*2S)E`dO?w_O{rn*o&K9*tECl zolR#q<(t0Vw5{oZ#*a3>rSY4MyBeR@6l=Vt@mCGMYn9e_#KC`nSQVgYR!xR=*_rU-jwehofJw-xvL1{Xn!gdRz2Z^n&O$(Z$h7 z-B0VjT=)LETkEFkcGq22x4!PIx|-S_)_$S(&f4R(v$d18SJZB(J)<^M^H|MSYVNDK zqvka=Q#Hdioi)p9>LULhc{K7+L2h0Y4q2EQNtTJZkh9l=)z4+o3E?A#urtzj9k4E!Z9 z5RU{`1?k4DWcupxsgd9s_FWnYA6pO!p2@Ot5OuO863np-?YQaGX^~*Nt^&*qjIWFY z*Xzn~!sw7+DNdqHER6)$1!FkFEDyt15D3jDKn@@f`dtFD2Z7LR1ab`mp&42r*CG&2 z1pu-aFI7{9K=%8UB9MIuq{##1N(4gl36K#4LcdEuo{d0gHUim=Kxl>*$S4BQQ~)4X z;iYQI5XjYjr3j>qKpH(jh7kzOCqQ;05c*vLQbHg!8-ZMbKxl>*$Swq;sQ^H>;iYQI z5XcU{QUtOcfi!r4^dS(MPk>yGKJOwFfzS#FWD^3R83afUFI95{1hUbu6oFiVKs3JvAQvMLS|$Uz2!YUi z3CIRKRGk+H9BMrtik8WT>cT_OeCbe~$VsjDP<+O!1394;kdt=ggk}&<+7Pwo2*^p+ zuM|1SAP~)O0Z1Bw&@vfF3W3mk2}lxw)c6BQAP`ysfviIyG=l(Hij%SBgL` zL?D{q0+0(32rZL=JPU!)dE1QPKFay|l~6%fdI2!v)3Am`$xYL0+F&haZnAZH^G z&2It7SqOxd$v|2W2+fy(oQXif{yBlYWOk&K{WNu+)a&6WVvuZl!X0HX0oR0dGj9EnN*YEO?ueF4d`=#J>|_=(_~;y1;o;=AG(#m|U`njdZc za`QdSw>Q7Cd7^o+Io-T4_Q%){V_%PbJoeVu4YA#^jj^?{Q)4wvKW+L_(_KxkX_|;0 zjea%ylIXp4KdgHwT3dHh-QPqv)ZG(3tM1g=|EkN@ez5kDx@&8ns4LdyYtOCSQ}bAD zxb{TtlG>S?Y4FrvSF@ldRrC3po|^YZ-W7Ra+!%a8{G;)2H-4+} z_f4;BYHqr|@nBlu<@eCI~v~H@cf34He4E9+whBqv4(%H|91TYFdFgP z`ZqPC>fhI}yuK-Vy#9h{Df;93k?8O0x9a?vGvHM}bh@_%V-pNK>Q{{SF!0^e!HLlQ z8k%9?JNOL8@zB`#uMY(9xujXjiw_>|IcI{;5<1O$E!=Vj{$(M!IjjaAGVm=p$tspZ zo~Xb#@rpGEp@9C`ub2Y*1_ILjngIIxGH`9!x#TA+@K4ZbtYHPxvlVy*0j)3q`bPvr zYfwO6LqIeK0euy(SaT2x=qrB36wp5)AkD7{pf4jJTBrc}5(1(HGN3OapydWYUqC># z1_ksm0-`wx=<|5RnuAb45BU{SK%YZEnqLz@pG82lPyzHA1VjsDK%YiH%M5@%g@9-c z3g|%uL~{_(C-I6k2cdvI;a5xnokT#IUlTwNARt<(0Qxurq6IRbk0BsUD1&&0K8k>7 z4GQRf1VnQX(0zEtnuAb4_xcr6K=&XZ&94cdk02mgr~vvf0-^;npua~z8V#rb^dSU9 zYfwNRL_jnL0et|kSaT2x=>2}h6wuuWNb_p~=q?0A3l%``LqN1Z2K09b$RPC#ycYpU za=E~r2#Dq&pgZu2H3y-9{?@OU0(uVu()^kLx*Y-0LIu#@ARt;G19~?C(uD4aJM=CD zL~GDH^iBjsa}dxw@QO7Dp@81*S4;uD4FPF>O#rkPQfvkX`U z{#*=TUq_!#@&PO0|jou&W9FY=kpru zL}-TGv%LcQElK7bcr5~=IeI{^LqM7u1kfw-iZz84(5wB5DWF#&An%q6ffpelT7ZCF zihw-35d>a{fM`DLh+l?)XpSDx%Mp;K1_5+4Ua_W-0(yyGF$MHuxXg7yx{SVTYMzgH zXg=bZ!Qlj&A>E+oAslHL{=h84p*eauH=&`?)Brdyz{}N?Q8YKAk<%0-ni~)dwO}%s zBRCX8^9h*eA{P2x!ZM9xJQ@SYAC+SWP2+m<3W4t-9kd3$yN|4#Z8v9w0>f59txB|F`;oyZ)bD|IfSho%9AkaEAkGH|>8A8C7grW5m9hS5x_TT88k zGLWa0J2&P2fu4MM(@=LlS1fIX@Nj&Lc(BMJlwR{?>Y+hYh_s*7fi?%k zCr747_qP(J`v;0WX@yn0{=arV2zQ%hBtA{_|H0wFWbm{={QL2T;(r%^b$mL$BfcTt z5|1?hsQHV{?{B`f`MTz3H*ah{tGPDzdQgZu z2yO5=sLi$v7#YwvEZXX>H5Fc?FQiTKfT?i5zL1{tDt#e6?3MaL`pa$lLi)>LeIfm2 zzrK+EvQJ+~f4Ny-NPn4M5pLIX6J#*_cK@Qc`WL;yzvz|zMK>=G`^dpw;9GLkw`AP6 zWUp_@RlX(Ld`tR$OE&wKTsF@-1ofEeY#e7j1fLO@-&0 z3ePYVo@y#=HWk(y3IqRRD*Uyn@aLw&ADIdtH5LA|sqibN!iP+SpD-2PYbt!dsqk-2 zg>N?%zR^_pYE$7$Ooca|7WP%AUS}vfWGLHjD7)HFw!=_1XeiriDBEZ#%Noko7|PBu zl&v(BEishE3}rR?8%T}Q8dKpproxq`!X>7{n5ocowShkvei`_csqkl}!XKInziTS| zhN$|~F_8ddmgrRJop{#5u z+ioZuFqCaElwD#d%NWWoG?blfC_CLyw%AbCWGIU?glYpX4}LuoIxo}|dR^$c&>utJ z4__M|41Xj1u}F91{K(yrTWhx0Y^XW2rna`Z_6bnTr|N%L|HYbzt^G*tr8VEFnT$Lfxhnk2 z@Xg?P-xb;v{CfN=@jVScYIvyOjSYPbq54uV>i;hG(%5vY9J@4@jkUxYn||H&Sko7q?txVcZfUx% zX|$=osk7-USgqi{8Xt>(EIL^C{krRdC*rp@+y&P*3-JNl8a6gOt6_2d|J47e{%df& z+v{Ile|`O)`u_U%`j-0o=r3W!qOsn^SKtxajt)Ko$G6LMSomd&F5M+$GN`XI@iPgxO(Tp)iTGqKIJ;sgLMxC>fmOX z5uCpyyoUC`(u9BDV{@GAqpow^AAMF8=SpbL_4jj}>qD+{eK7ilD$doRJ=fpOajy5e z&UI(>msOmrTYIi|&vCAI#rp#gN%vIznXfE(c#iTzDoeo$*Y|P%R=&@_Xzv_n9dVuI zYX3t$dyYfxbRBAk|Dj5A9O`n{p$6xuUznpjXMS9dnTw~)#n+mPN6p2%%*8|I;>)xT z$(yygw`y}=q0N2a;;=Uw`gyv{XoOPWGV2$Gy}{XPBM-dO_5FnF`)gg_U*`Hg+Z>i-(XUs(PO4w8QNQk1zh0q! zEvjEHRljahzqYGi*QsC6Q@^fKzb;k3#?`NN=BGUHra8)AH%Ix)=P17^erw=VuVpd+ zD%h#sreFsg;ZL6Fo#R|d*SQkp)$$p(|6daJX)x|HM1x9+nvO5go@`%}gq=rHU0JbP z9@&)y_Q--=0oyC@`N49Q&d_bsK#qj3adm6|l?HvIgpeFOkxaJ34q)kov*C`eOzhEs z{y6OamjDdLeTFpQv^ZnrL6phzEP*JU1vD|1PH_-bzt2$J+CEK8JYE3X4-8?avh8ap zj!b*Q!?iU934|foXK0<519hJv4F@>z$N`JHnid=D2o6#yz#m|#W_}#7d%avk0iIu> zAmg*Qj<%+tz$-a{eTLQoA;x`%G=#uBw1*DJV0+F8I+BUXd%pjlEd94bN45J5)vfuJ zh7BS!CZ}RgK`QNRRiXgY)YCJ8q|IDAvj&(j?lVN0ki)JxWYH)RV&@&k0&I-g0Sx?z zrJw0okdH{4!J0cLaJfVQopScxG1bun24-OYzY77bai1XyS8^p?XkjBMvE7(Jn`r~2 z|H{&1Dq8(MLv?HRQLxgP6s(C3@w^+V2&gh2u+Pv10Moe7kS64wk7)|PeA`yCs~+80 ziQVxuZ01^d$By5y^amYa(kkcJXJ}dGwVKs4_+@Z5QpvVdGUMENrMdf6CcMBtL(c*l zjQb2x8qib-j2t$c*9|+-36Ouv(l1ol?tO-qWe!kuRESQF1rINxNjdjcTPW_T7Ax%k zw;FI6?J~thdVhg|!-8#&R9M7R1Efb;`hgA>ITAL*WtmQjO18ZSD(UP3rz;bfxLrOU zFc|H!rirR{nIa;a@OltsvVi6hmcHpAs@^Uy%Pi6~PQ}_H?1Ya|($*9t5Qe}mpXcR3 zwaXd~RJ+VM$OuCudS!@_d&iE4So+fZIH31B{e2AuinS#az?Q(Gt5O=Qwx*y!sfuiY zT|O5GG1_GfA+lZObfm<~r=TO104hGp(kC1`spOYDfFOCh;3>aT4JgJ>AP6^TNSO|E-y=7OTkKKQm`hSooEeJ1XP(1u*+uw zOru@atQ^B_90$xcB8G0;>B;FGPj;m`ASU)MmOi8dOj_j}b~*O>?`hUd;+MhLz|Gjx zep`R~RVKWEU2X*$jCPsQpx9*r+u7SwY%HH_1IX`W={+iJw_T2X{v<^wn_jxLytH|z zwou$v<V0mw?wJoK>UkbArTA741a#{b*+f3b?l)S=4Mfzi>uj5aJAT5T{{`Go;&o zlV~=Vif;QXpSW1F@uVC*NMR>^ z0?V{D1qp;9h%>yz%YhnasNq0~Gh`fOy0RjqOmL8B1N`kQZI~Yi^j`nqKQt7`(SwwN zgcD}Q1?**zte3Sl1qDh~9%pzl5Mqoo)DQx?cDY5viWZr!jEFg7baX&e7Eo~kO9_XL zYU2!>fAB>O8&3Qna*@e8VQ30KO+7smJkIbUV8R$@NSTo1*gPzx#RiRng-j9{SjE!W zIu_(3vSu(%fy*T<+KhtEq{X9es-p)C%s~AA27qgfGo*0wg33er((s@MZBm583$&?D zKza&GOI5V`IK$@eeT9OR&ZJ;XI}z`ODgvs^2gDh!2bjhI=0GGM-Q0VVBI~T(VyGgZ%6x!bP6JG%UDm8S#cdo1%vLubi}3I)2AG&? zSci+JHXYz9>~h_@jAjKYei@7nx)@u!I=d_?6JEeBr+@~dU8Xdkso)p|?1bAc6H^Va z&td6VDr~o1u3HzT=w#DNFEFLI%c4-+Ri*FZ#Gw8^^uqxBXa6h%&r}9ZoO+sT%Kk$a z@Mo0uLkaIIS*8ot=z!^Z@R1hGd&)jZrna7bA^Y2lxj_iQr?WBaOPR>Mow)z~6O#w7 zpBSHpkbQuNg6=|pnbjog@@h9E+p`iin$U5L^eXomn zwaSIzK!6WB3{skKLoJ#Sp7c?MU#ik&j0TyE8S|QqYIcJ zJx9K~Z~NvoJGwIo?85j`OqJ1*(b38jE}kJRq;+UF=4@q8+34tqk>0ybe~S9L5_s)ZOc#o{0!8Fk{m9;=}cxb8T~#(DPATt8aR>;`~M9B2BXeU zqjgbrh7?g++Gi+5v)ag{_C5FgElcln5LK@;Tz>K;6sV#pQPhJ7t(tv?C<#zgkU-c2 zo#B9&1660J;Xu_Hat@q*h6D$R4B&q=OYfK;2kc&VX(&+CgM@y_L!nQm2-1i)oo~NSK>kOAau#$q6&ZJ;9 z?lVN8LKOj3<_vU(mjO(p&X8_$q&ypZzES|@K~By-Lkut(&8ysfvdGeQ9bj_kIrRVW z$8TO)js8EAPE@(i5XkFkWx@;S3@-&5j5v?R1SW2mw*m&EUDh;F)h=^HQa}zjQ_ub{SzyaXQM~($*9t5Qe}mZ}D=V+GPy~s$J$BB%I-P&H;46cb{x!>Ad-IfW1=u zu}3u&DApD!NIH^eg#v9&L4i`0+vUwbh|w-<2$AhFrGo_FiN~4Kk%Ewrlc%zDnnOpm zb~*moM>T9n_7=JD>!>vK^h|KOoChY1c9}9E_xl1eXCdPV#5oH|2nsn_&r-9F1^I}q z8L)0zF5$L{fbNj`GgBQsU}~3p0Itz4Q@E0~^q@^Sx^Rv*0W(ey{DG#EAr-COF2^5Z zy|#2F1#8C9h#RU1s4^d5m%9O`(JpJ^PIY#f1>w0kfNilz@aaC5#&m$mV>gFgZhqo^ zO#~{x42KN}#?CsMzADeNUuD7z*yT+?gV8Qi8a#HH1>t#MXP~>E*~QY;Dr~o1Zhqp$ z6rIa10~#s5R9h(SsunBN|J(WhXYRzeMTQKVSiI6W|Ia4-#ZyXq;}isFk-2~HlJ?s9 zf3fqQo&V?S+jG&Go&UG<|1DH(FTXw6`F~)5(qi@hp0EsjUsIY0eS@?5|59SeUKNhDTK^wb@5k_6P@LtvOCsQ$26zkIti017 z1_s#pe;fagGHe_F-)jAT@MUfMf9oNu{~sBfsz75f4rLH?!{=!2{(pA=KO6sV_5W_S z6?BFppwlqw45^ZWsM%z1uMO6o?UI&h1fLMHI=a#zjeht_mUio88stbQt68!9!4{4F zzYW%(cLX@H&ag8{%Y;S)-v~hee>Gq*>I^kn7gc9SFCP}vRWx>pCgWMeo#C?qu2E-5;Y!jz588wh^MPn#Rpomg zZei&h6|G)pxZ>3BQn1pQ6s##{O=UwB0afM$bcVYCrcr08S<0Hf56XZckOKfZn_SRF zp6yD*?Y$p*a!jtCoB}3~-5ff@<)3}ls=4+5B=n~};j`TZzh;LzTAARe~2BXf9 z(txJI16x(k0u3;N3HSN_%URl{-ejBLP*kez&1TvpVXUr9VB&Up2VgMT zWlaDx6FxU4MzlOi&e2CJkAteqV-+EU3Yn$O3s`y?-%F%XN>sd_)TT!Ba*5pG!aKPL z%rw=}0|sUwF0TY|jdq#Bl}w=rts0AoXvwST{ZW=;Dq6i=Uj87v%hH(?tWw~mp^AVi z^8t2w2w)oRvL^17_xmouL}lyB0?c5S@B1Q4U)2HDG;)3d(b7NhE=>d~uO+jQk=Rh4 zXTQpX7qH7kpuuRDDGfTitgI{s-97exu-nu<_kDt;HcKH*RQgj~E3z00M9cv54 zUDaZ>`hWKbg8lxCWZ=Z&Rddb%lgWO$b873#P|x`ONadb~-fQRo?fk!;|A(pm!CZ-K z2w~^{^^5JxZ%>N2TQ0Ec|JnI}JO7UYBDTeVUH>13S4g*N*Z;HY|Jn8bMy5yY`hTQ< zEDj8ms#?;I1P1*{>;DC<{{I>2@>)<511A=@82x|d?5qAi%n6gR=ZD@4v!st&|KIxm z*8jKuKiQTKOs#%LGh(pwcKv_N9)vdj-^TyDn}zlNF%-t?|M{j0R{!6+dup^YV*P*X z|HCGSe;)sTVc>#5{QTxKV_i*`HRc);^%q9(tou~$BQ+04?hAi7^o!8);3-fA1TT|w z30|n?M$(vYe=40=lY|}CVLi31D+_c_*~+~SUc}NSRS2OMyqvap{kbT@l+MH?9_d69 z*1FEN3sHojijYRguWTTAIXsu(#h^4%(WTN|$#yj>4PuZ)<=#)!u+*RfT!rA}w5HhE za|%%j8>w`vQ?Q{Gfk@P0WhQtzG?(B-lBB2@0d~TDpJ6M4j>^3!|C^=%QDM6UFQ+wr zhoX};Efh`CxwqOvaaXliL5Mm5xQs$nii^k{U9eJ;teCavG+80=-jhFN>6bcK6qKYD)|ClNh!_Gy#sPy-h^lF#iV&4vehMB)p@SEQ(iuSWC`&(Z5LF|> z3^Aa$Y8t03L`B#sNrb1|Ep1If0$~V*s0Y0qs6tc?2Z|6?aNvm21P7p+zW3xKEPZo+ z954zh8VY22DW$+qji#+BDDbicLew!J#3)475CTd@Pa}7<$$}10Ti<)~A(p=6&{1uG z$Z3sj8a5n(Ddj@avMB&H_4G_|A?mfjgi(k}nUMQ^4-1YunXv#*e>*VnF_u25V?jP5 zYX(sYTrQD2+?1q5Hr3Gsrb5(f0IpGpO5x%K(TxdRXp@roSfI^-T|W7KmOiYa^|8wh zU!h>7GbvalrLm!kfGYEG!^{CdN&4+*bXBl^>C$LyNh}&&8jeMS(P%6dJu~Jk^3UOs zO%Om}N}~997KH~LAKCNd3+E3FZ|W&t z+Z7iCmjl5SB-lh;5ZQj@3#YSmro(sj-R_E2SJCTLo*ua~g-tlxvK>-P(%d!0-C*7B zJ}(Dqx2xem>2?JNPEeBIfNVv^50$_) zy4}4%h_Tz%5F&NEj1D3-^fU>wRnr&3EJYnUoVH4?$Q7~#$L6Z&6=&S4VZ-T8DHjRn zjYi}ySIX{7D=_^)h zB2alPnGJ}^t;!w3Wz|A z`A;Z1+2f%XnD&j!gV=H?6n9m5v^gu7WO-Y@I8+$uClbEQ+QiyqB1sfE zuz%_H90-N`w~&?3pfHty+|ocP*DGfXO_JBNDn`N2}5kIV`rNxfpeR4n9r$?p6*d>Tb0s(&~>_5VFq|KAO-lb1o)(25QT z=~#Sa5qAFH&i~u_|8lO_y|u8-Ws~jte|G)9tt|Y*&i_w|eF(sw!25G~d#<-vSei^n zR|2NSp~$ZP_vEbq7YzMd0RFRomVsw511A=>@&N!cVFZ7k0RZCd^8o-c9E-CuWG3w0 zEC9gz|E<>lxBkEN|E>S;yp>u1pA3E|FRF7>f|EHouu8VZAj2qhddKenXZQb`8eLn# zVF6}oNb3OC(C+`2>mS;jFWUA0?fU=L|F{1CGuUCZ(4JHVPAodh*Z-HCeUJYSaxC!u zi3-;G|91W#wz|U?cI*FJ|KIxm*8lG(GO*T(@zIL)|6yGWGA&me?(YY=8{y?(<>2nh zq`xeLRJZ&8+5P{7e9^}L+xY*zcK*MWi5##y{WGlp_k%#_2T!WAWe;W<_{(A7#Im!w z>mOuO|LZh?c29*ZzC@ul?7WAupo+!+qUbhx;!h;SH@Rl7n9%ppRtW z_grtjzb98D->0TW#`la&?vb~;hx}u>&??#Xo{xx(PmH4nz{dr8@+Dw)AC<#D(3pT>H7LoE`*0cm@>aaUYA6k4D<~5<`E4!3Ph1~Z!HhO z`*&}?EDps(JBGHA(>Yf^TqMyXh?*a=nBP|5VJwIm0|u2}+4cX)-Sic@%h8j_8=u^j z3+(#;cKv_5{=fDAQT1Wh|Ks6{*8jKuzxDqifS3k3{JH#ptN;JYaaQe>{NFHeV$nH# z5Wu7Vr}O{l@w)~AQeA0S`RtP)fYmA6?fgIYcCG%OyUUyr>;Hp-nJyVY+}$$ghb)!B zhZwMcEP7Fvb0MP)uYHBV(vTQU!GVWj9%d5Jf#(0>Z;AN53_t8G&@HpDJ~=X0mzFqE z{BJ3rx!-*5kz64i=K#lEn!n`px;^;D5!^Ax+=_ARAlCn5?mzMMUG6_@ zPLU?AKcxS*8i1$21|S1|i>v@h!FN0VFUt??{J-`8OL@9U02w{;mvmeIU+CQVIMF;Q zvGM=V{qiw5I(FBWE9U!QCA8vjUj_&7_yFtF*pbPDq@&{Nq#Ybju7!a3s-xsDAI=Zs zZ2O+d?j!rik&MnJl!L;@;Hvs8Oa{Lz<@*N9Y|CyOnUp|T|KCWe@s^NLSH5n!jsLgt z|2F>r$%+485coDP_FY+T#0t$rZ7zwWuU6E#;wt_Z&&^g!?)sId0w;Tq12 z&W7gNmBX`FhRf;m?!=P$|LuJXbX`Yzu8t&I&m)mUb{s2?ZRKHGimdb0%TA0e9or|- z3+c$O;0Rehe#ExqNOl@xLTV)~(5APA-V|_Y@1pDW0=K0%K)Hl<3%3v_1rpY6O2R6= zO$oV#N4UI_LP)qY_n-ftnLV@jzqgMFu&_H`nHBAsIs44_&FuNl?D_urA#@=GuX}i2 zraSFu4SZCN<|~KRM+GVDHo~r$TT`MniKGaKl@?WW{037#x=^tYJ6}|eh;SGIz>r}@ zBTDy(!1oV5JSx)!2e^c&9L>!q)|C{bFx?dvlSs|@MjR=uOi?)&%FY*+!-$*V!`Rj9 z4?qO46QZ97WXjvH{ZToZn>z>{9or;sFFOumKI#}FY@zN|HYx`MDZB@8MFuG(Tp}iu z4;I9`>hxuGLl!_1&_ud{gQbIBH8hTZRNvNGSzb|n0KUMu;N()ZkCvjK1?Wj07TW{0RuA-E$c3T z8yPK&;2NgThgL)i_GJlE4ew@uO6lP;8*PYPUh(Gy*7zn1>-p_+XSiKH^Jah<87+%q zBwNphvT;6iB2us~s|yGDXWs>YMS3>}Slrqd5>9=Aik8*9{8b8|%r?VhBa=$$o-e7g z7y=M2>rS8{GFleVfTqHSQA7&%WeHOaaDQ8--?d=}?DF!j5W0Zr%_O~X%og$kSkVV| z`8NPpq+KRlV)pl85Rrm?S;ABUq`PFghl3^VL=Wlm|469(wl|#(x67hTV^Vhc4!{s; zmzg2T+2wR^+HaSIsRlGNGR=F47Gal{RWO4yU~SWxQth&+sY#G_`Babt+b%O4aCSKz z23K>KSq~>$Q&V77MP9)!p9Df8?J`41&@MyZ zZ~q>eFx5cAgiL!qI!d<7O@GO-;j_2tRFa>ln0h=D$}Z0X6OneAm@w_2@3|I{f_+)S zR09KfnT9zQ%sa|q@OA=jmYB11I^l(OkE+8124-CRzYF8NhHK?(WZ-Pmy7TS-i_L!D zQwzT@#s2@x@N4>ZxBu_<|8H{V{}m>k|G(4!!&clPH@ltw-|7FI{@?BYyZwK7J!eT< z?BgF?{6Ba8->=&L-|7Efv`M z`XA0bbnAb&{&(wt?wp84z7oOxSX>IIBO+-}+Qt8K`u|0EHbjc%;X#e|V<^B$wUgr$ z`GRncKtKF|&OTzbdf;SYac&sVjfsR3XxHS{@%~)jSYgM7v7)}PT01;4HjuV@etd9Z zY{+!MnVFqCu(;k|yc14{n~fL?PXF)p|Ndns=>IPT`9`G9@M^y*-?!-VRF$drL{EDX z_J!#fxJM?74`DiBt2aV)hU;ru{oiD0H!__`PrJcO5wVJvZkYNG(EncoFe7z_E#bm@ zAK+AP!oTqCLJiZoqp!K@$cWCTGi+JEA-owT8>wW7noDW#{FMbSX9^3i0~#WA zhC~DYu=2r9_GXj5ERt8AgMP;3>*s*ZaP1!vI{Mir?nU~4P{pOS7>O;^y=rN(6xK7Z zUuXnekvc=d71SAKl7V9kG9;foHx95w+HLnH!SZy5YY!1BpUyCo%z6qdu1sS>#0zB$ z4S*q1XUGguMrR0!Qa(9VhLn@%wgVcG20cWJ&>61nV+N;RXP8N*z30^GEw-j6K`^us zUx)`euyuwE2aL`TI7s@$&%lAuTAte^)9V+<0SV03yuwi6R}W^AsV+U7EehD0ngXk; z)EO?+10j(*Lxzxm&JgGbOj#tmJl6qqh?Mc@C|PH?=1&`2ed*}d2YE(%{JN)yS%1{z#89VVohW{jd(;A4b|iW>~amj zjI_(-erP=#YTma2qZ6K;l@X>IUPymVrY~}U#jVe0msh==d=uGb7;L0_!rWa{oBfpq zFJPCefrd!COf)cd8J+Nau!X4x$bTc#XKdI3yS!=-q4S$walVfp$84eQRhAgA%T<6Y z(k>G&G5gyXz+KDlmxrbrAU!D4$2nNyPV|tjN)sxd?M?SY_~nT*jY-+%rGO#QE;B=v zvCDwS@0W+B8qh$zWK19O5G}$kuWDolr{CJ9yG!=V6E!sn(k?Fva$wtKh6BbfqZ6Lp z(P*jx|9fQmlf`jBeSPIK369{;`b%^ zm!1QE;z#zm{eQRr@Am)Q{=YaBk9O7ag}h(U3mryo?6t&hLM}*G*d;KW9at(rBQGBWuK_$_WxVl{(o*P4}M^phnaaHs0b4|i1~4B zWTYSu6YV%Wd%W0j>|_yy7e{70rcV80_WxsW_`f^<|Ds&=I#7y%vrVn%TmOs2KCS;v z-3J`8_Q<~Cqni_M{qNTQZvF4p|8D(XoD)A<7Abaj{{PPZ-~YDEt^YA1fm{E(_|}mBGF@rcLF*?uZos#nA=C7Gh7QWBXx$?gbVK#I_cez=}pLU zTvP4D0nm``f<^EBGWkS11h@p9;f7lp+rpb6*+}#xv)N2(`u~)!EO-H(;ToVJQfEjs z7&=3}xQBCwrT*W5oyh{^dt_o&I(RGWunl`ZOXv(Kp=_X`mt_leuUc9xrKtp+;cCDY zsWT*8p3cyKrNS5*u+nLOG%u4ce1%W{zhUp&36-ZaBv5Hq|L>J)Oo$lt|5pKqNSz@w zL=Byxfk^594MeFVpgAPdG4FvaU1zvq?_Oqb8ahJ>tpxqQS5uQ9b%v{g9N0QTh66)q zXmFtP{{{z%9>9N#OmA8o2XX|rFccU%LkX+|{l8aJQ(%p4r8B$|2#M4gGK6TIp+Sez z|0_CR+Quns1n3Z{;L%a4&Tzw?Z!v6W{h&m6Uj4sUk7q*Z44Z+8NSz@uVb1%$i5t!t z_RV0x0yu>{y18Gb9UKeh9cdWsd60mcCFbm$Hs-UaIy_)t5cK~m0dAySCb)(v450N- zY-p+hsW4R{ZMMF-tYvW^6at%Wr%>eGL1>u<>i1O(k?SY)UeAMkr!#m zKm)D#5ST3EW!1cXG|Wrh%Kmkl}sQx=2- z0GFjl?v&|nkB(C9a`XB}88)A9(s(}!CG)9(Dg`6JWQ2hY#2pH4|)$8nVQYyM|WAoouH ze^Dv@#a>M=k z+&I3y(C;zs_VIGOcziz%7FeV(r)7xh&i~)}|5MxnW@+ly{}bc6F?h&kP8~ZY9+t6@ zU)5;^ZvFr7W&MAV>Hkw8--y&1uJfz%l~f~{5&C~|tf}?`PbDxb)0;^ZAOD?eWoorG zEWFOJ?WOxR`oBr)43il+P`Nwn9jYDF8R{xpx?$=&Kxdc)n2|ce>%xWi8ZbCKz$zEC zDc;66asX%HC1K$^d!I~(hDnHaO4k3ko%rka@Mb7B(#cFZoyh2uEG>PKdgJS_EO-H( zVFGA~)EN>D_!A`nJEL`mvg<)Z+zpWLl!?_U;jOU4+D?oTI@-1ow@(PijZ8X~O=&c2 zq3%@{E9eY609T~WkZ=h#TL6~v8iPhOh=Mx-(ovaM@sYR_p_+Y$PqgyN5f;YlQJ2B8cNq0w$0wg3{FEmC}D?*-`Sd) z1i=vK4BLYo*g8Xo14CzMa9{}D6$ju0@!0IROh#0PQaF%(UCvNos0SqlVVZZgrl!Ce z+sGE^46g@5B6WrgAzEjs>F{EAK$fBd+#w#D-7b?65~5U{VcX1G88)hOSp8PNZ?0^CTuOmMM5ljFI<#ApC*U}6)d8jyC#A|*#+iVH1%|#fv!tmmK9*W5({l+^= zbUxFYNG6QmTx_B4RhAgA%j*DFq+KRlV)hTfO3NrqdKOEeTWP8WV2RYg!4h}EVc5EN z3!$=YFM!H;5lOf*jY-+%>i|QfU1o-8z%FA)r7ZU&X{tdi`7=tXat~1{HmRI|?pe+Z zPM@^}*l8~&iPc+dO-+Je2<-B;K@M!Y%y1B}%g8}OOTG1A1;P7c$sfw}}NE-xdXIO-+GSRoUfhfRISL%n;(U%P5!)Oj#Ja4_(rl*VZsJ`Es8yklhJ>t$M_G8Wtcn-zo^z@W?Rzu|#^Cyb`9`^rhVU_g+|(p^{eX}>V8!B<+@MQovpjG?qJ<;-3@iu z)Gev~M(yWoKT!KX?cKGT9c~tDdPYuYPZJQ}u6F%~utw-c{9Dm8|+)Re#lgTl!~9?_K(tr8h2p)6$og z&Mp0yB~L8*=#n2SIlkogmtMEz{Y&FZ$|}FMWOe2HDnGqsqVn&TY_9C6d|l;m#bXuU zt-P<|)ykt4w^nSaxV!w7iYqHVRBj{IA)}<3T7O;sxr2zoHBy%{bEP4I2mjPz7~w#7FLY!%3?>bI1&6b6(7E`GS*ej z6vKnNu}^SxHAhy|ppunG=bUKwjI4+t(|)0iLF@QLk5{+ZW` zYF{HZg=|v|V*5=q0PPGbCO{Jii21YtR6syfXaO2WKvZA?8bd(L z!7m!n8xRoHAV8xCh;k6n2)3BH2>}`oD<(ig2#EQ#05phzsL%p*GXkOl6VOcvh>0+- z0p$@8)gVAS5D?`cpzYXV<|YJaTUapx${`@;(*n={0-{0-P(K2q0u#_y1jK~q)qwgC z5Y-?+uSY5?Wp#^9Y0-^#FP%i=^eV8>_YCt^* zh-wg^ZUjU*2&fBN%-n03Is$sJfJHO5K}_{YQh#Xg#>7MSTO-whJae|hl({u>ktqXAfQGBL>VTaS_DM- z1n6}Lh;n#9mmnafh5%HLEoKS{P(xTT0g5A_U=Z_I1p=Z11XP89D8mF)j({ki097L( z%HaXkARwlO091)BW(o<=(y(Fzv;+aI3G`6ePZ1Cm2tZ}8A|T2z0F}LhfGD2;l_4O? z;Q_@E5K{vHmHj_#F;j?u%Kj~^7y*_23<0eU0(ubvQ2_#a2?0@t3Frj`MEMA)>?a6_ za#%oRFC!qPh5+=h*kY!T0R1?um;n6<0bLaY^c@651qkT-2#7LFK+hu}$|pcSKtPnk z1NtEXVrmFL{})@#6cV8Cg%uN^?;@a8K|s$TASysW-$p=`VFLO(HjeVG=8MLC1HYXq zq{e+Kte6`2O>Eqi!Nxs}jiUl=+}GUx{~{OmuKI<^z}cpa;rst2AOv z`!7B@*XH*B^@hbA#krG5rs*`9n?|;dPv(awca8Opj-szxiwO0FTR{W4ZG97i;(hDs zmfY^q{D64#DyAjz>UGCxzt{{BS;uC@&BUe})PfUIhH}I7p@F_aZZJ=8Y}F(wK0A~f z+6u3B#b-ezCUW|BObp*Vp5L3B7|!DhUhFQb1ZroX=J3Q2Rfg-7`Xo%c;$~RCZ!kAJ z(1)+1=TA<}9GIFrfNk@f?@lX6yvf3zQ4HZUGdqJPfXOJ6 z-e*U~^4s%N45jLmTgUo_i51Zpl?!sQA;M+mj!$eI8X1@vL_b_?{I*=7f5+rFoJ%$c z$0Mm*ot!&W6fGFviQ9Dgt`lQ}3bp#8vE0tQdS#BN(Plv{uic}41?)AGl*w&*YQ^am z@t2M_-9IwCO??Fx;BYZ>A!8Db(8-S$#x`t^RY}26Ce?Bs`C2!6om}EIywb8Y$%Ht(^7!W`S&g z|Hft;#b@y;;sfn5%?xc4jc)21ffcU98ele3x4`%=&!wAhR%jbs{681}&&B_9`~L$2 z+Bg(OCgpaln%VtlsX71u&<*O`)ZEF#!d}6Q8^NePf6kr%=l1_k?LU6xB<`wqoS2)v zt)t~`vHriv{QtLt`$nXnVPGSRMBkbA1UP1aFMMZjx>IN2(lDFuO=b_)ef@w;M{Ms6 z-p_E^rrT)i+x*=r?wb%^;;C$Brd#`nM^w?S8|E)F@H6ZKn2~;l%%+&y^-(H!xypsW z_tMW$bI{$JNW!A`Z8F`@0Tv4~pVN-|0_C~W(fd7ShhNo_o-8Th%hj1I&CgI*7QBF; z;p>5hNIyfO0bd#VU}t-?@|*_^yQ??T1CZ~6bQ;oc+OYk8h8?{hBXoFo3r)g1+kq|A zy{cBVlR(*|A_n{nw*an4KSRPL-r@ydW#nlN2CQ@!z?qS0o`WUsgu}3-_g+FJa!gPV zq)`u8u1sS>#NhvbBVdU1Gh~J+zTgZXO3N2B2BK6J(CnA#kcX&rKf{jR70lqo23W8& z-dPS-Z?QEs34$T;GrS?lf$e9=aDZ=d0~{o~Da+s>nFjnjWx8c?98h2P`~^b+e!oRQ zI-z^sqJXWbDX^*{Tj2k{83>8=Gh_&X^=g2Qgf~aiwB1Eh)=fZ%NFyE{rTQ6m^b9d< zh{dw__5?gtFi~Zb-n4B2Wa{xuAZcfgE^GoOBK-`B33J}}v5@J_%D}C9P;~ZYx&i%- zGWByTn0KUM(EWP^+$^y`Cpz^2h^oT_24=wjzZc*}+GT=kn8Ej^RSAfa;3=iQ^S%#^G7t(jhbPoqu zjLP%a<+hifBdMGkpl8{!BeG3s^CmsDBs0(Q9@Xo$4SM1y6Q^+6t4In|}wC2V1; z0rHGYtO3JYVTZN7{QHE?G`)l-=}ly8q3%@{E7;{Oz!hni3744tRo7`)898Mcu<%%^ z{QyfO!-{!RmbjBdHAScl+l!|}^<=#ni7V5X5HZ;0PQVapmzg1I*=3E$JJCRQR5}4j zCS=;{jnC5Ua@)%t%-}SwEx|U9J+OL641rzF205_pGQ)vomo*3883>w#WH;c? z%QU<=4ydnx^3MzfrnMys!j3>-YibIDY=K?Q03nffnIXim%bE`FI0Q{cq7$gNL8h%9 z9o`@pE%HrSaBsDWX#2@5!-i*XiHoqa5SV&A6Ur{9fr&`FOiY;bzVA^Nhw!K@$$~J| zz(7)_PL2ihjx-Ej`X&K4ON`k$-D4bj5LJf<49vLw{|j?F+qHU;44iH12;KkJk7(Ne zJ^%uFX5hDF8@%@!cKiQs|KIulJO6*NE${aK*R;6(|A0Qmc9~(%9BSr4$mGhb{sodoSB_FGTSkA%BrmP z|AqhmT-Bkn#^)QKZ2WZN2OHnj_?E_ljiZgdjn_0bH2kFD*@nj({(JQ&tM9Jvt6oy| z`KpgKe6Znc!~G3!Z8*`ezoF2ut)UxE2Dq}Jp`k4PLj3FTr{a&sKN0_6{JrttiJy)i zi|>mM$G5~2@wM?w;}!M)TK`=Am+Swo{-OF0)t{~Z-TJrGAFuwy>Rr{%RbQ>Tqv}xo zuKK?Ew)$mtuhxCL?n`x_s{26QJL^u@73)Uodg`vOi`TwX`wW~S@X^{ouD!4JclHCt=iYMQElR{f3Y|5g2G)d#CvtG-wDzN+6^ z}EduGCsSG&#qvz%YMpdzsP5QhtGbF&wiTEeu~fjBA@*TpZzI5`{R7}hxzP3 z=d=HW&whZ<{yjeXx0Y3O!~*WIceA;t*xci6ZjsI1$L3D3xkGGjj?KM+&Fy7#Q*3T4 zo7=+Xu4HpBWpiuU+;aQL-eO;N*_R#m8q|KIKZyZwK+|L@NK7XlFDPy~1WzdQfGB?CT_ zcodC0|6fR1_RmkJQ({lj_F#|pR(JmYg14e@?FU7CkUe=ykKots|GWKvYfJy<+W)@@ z{r|_neIwG(FmTj3xoW^Vu%{hj?k2#6Qu~&svLY)BzNMW9n_gNjQ?u>8!TTAmdg5Dj z?7p#2L%tiCL=S|k>@?iABC2TD4Rdn{{0!d!FeCj8nPbb9gP#UWtK5t}_)c@s-J49p zqWAA*dWr+gIPIt}P@X$0mj5;rTR_!PY=EC(7DE1(;%5klC&k6QbOW%Gdicr_4aXXE0i=gy`V z&u|zpMEV&rL)7pygu$rBXF^6!*5k9QH=PAEe<9O@9-`9y3|B0>of({l_n?H9^n9PK z-ePNN5(InTXE+q(!1gm_I57MSH3#Y5tcSws3=}Qb7+JBI^ zfCB*diHfPmGokzpZw4kJ{S1i-bKcjG)$7TWcW{d`UfFJ7;B7Mb4sM|t%)BEFgQj~4 zxLIO8`suWHI!ja?9xyNi{{J@t+(^4ja1B!!K%4aZvlQA)Cm=m7lkdD1-Y&0bDkHGQ zH(6Ly-q|e?RWwwS53tL5fEj6*ndlp8-q(QXL0Il=na?M2R$@c|fMK9-`9i^0FK5 zWd^5ZZ4q|bdrqz1Vryy=1Vdn#b3qPlyUcK4+hxT;()(>f90*el_}?MZe^?v`IOQzc z{7(!8mbFC+Qd!;>u{AXXLAJmy4*(&Nc9|i>w9AT)z?3CSHBfQ4ObZ?zrP}3Xn~Mw^ zhP_2D!X_%F9?yib%l*Jaq+KQ^%z0nW(!_!|ghywoL6PbL22RLyietgNBMpO1-zDH? zi8(uW8S`0G9Ud?+p7lZK2a5Yo9TInn+-!#O z=&kCIz~t~bxBu_<|EK1r4vuRaJDHGzW(HYc4y5<_2H^FFP&X-YvtaGKPvxp>=UuoP{sNOdcJ9pN%-}A zHJ4Y!t7FyGD z_`@myJ1KRB8aB9kbOYoznG!Z^zs|6J&jW;xezu9*Co%@%;AqB$C{KqlMTyjrGfI9TFN_;}RsSxKl2Eg`kr2+_!uX-r55=>Km843RoR zW{6rkL+yQ|!kih1(rG|*sZ5u9h)UNP*6;oeW^kJ7L4s|>KC^m@t*J>641vyYUyuV^ zXUK41=?paoD)^bfK`IIOm&#PPI1Z?)ngXjTvIRQBHv%D% zIzxsKLuaVzP$3}=Iubqbp6(TyUiIiGRcBbg>u;F>;pqp7i?DEzOg)|nr8B$*n26LF z5)o8!c~8>vuj+V2y9Guo?j#BdTbqCLdsz z_W;aDyG-tf*0Z6^rUES8wuBR&%97(=m}+oBJSx)(4zRfOdF=A4r#6#sBHIkf26`!W zr%JQSy0YK}?DB4)A<`}r4VGQj-fCH`|JSgSJpg%9rUN!?zg=GS9gLE7bsAP2TxW;n3yvgRPAqb+L=GAY2{E7O+6aX@|jrQHk#rd=iq(xvGCkpifx zDX_-2vdaY^B+@Q3gcx>N(~;Dnnl&Bi1W>V2rnE;#sdjnQKRnH_;n`c_A}mlTQ;%mt z+2wIyBGN7s6Xv|{d(>guP%DWw3(0O^phc#291G?hX&C&&Ap&len6q=25zjQL4i6ZZ zar%GjwT8R6SQ$9mln&khmyf8}?579W_T-=NPzv891BH_!sy^W;JhTfzAQ; zkzsLvaBx@FTj2MN59P=E@h3C9ryJ@U9)Q1Frss-NCyVIH)ju-4Ex#Qr^i32-NXId` zGdDI4(pOP8voW<1&u@iCdskn+kRRTD)5uog=;D?|yR9!I) z@#8IKT`}%P1||k`qO-6BMp6Hc$#EENgSkn4E7i5*JH^+X)_0v46Qjwxm;Sve9u)NzSRe-dq?rpDlkf{RKUx@{RO1W3cq|9Y zPW)o^mm1`FJqaHi%#RC7iC%GMtnOPw>)`4}zFhsPwTHL|;RHm+VplD!2XdpiVZ-BD z01sX0`d85U`^|!|<+ssBfyJ%=^F!@P&4kf4(}z#Z92L(3{~7{@Wq)pbd<3Vl?x^k( z`++!Ylm;KSS3};8$d?#?KX)}w__lbSDN$1xg%5f z+s{vOah^M^!0G?zk4#S=JU_#JI^~fM9mjELulb)jA#OGPYqSxaeu>n{Im5B z)mPTOSUXs=qx#Y6@~Y!YA6W83D0KfE1C9a5zy-|!NONxoVV7T;t2~wt9g4-{*>GQ# zzZg!)3252=V?v)_M!A(gs&KB}IJJla& zE?Jn7xD@yC%PnuRzBozKOT>llN`T4c+AoLJc-RpIn&#usD8Hszk)6xM*2+eS^{0>6 zgbtOX9|PH@rNFaDhF6NmwYZa{`rSk|F1k&oGhWaQu1ph+Ox?3^OhVASF0*)D8z&uQ zsSvLX)lB?rPe2tPh zz&G_Td!6!gU>D5rBvuBtJ*Y7^H?L;P*@3!FR-(?fg0$iilR7rIKM7Z%r5^iZ_D zmFX!%{4IMb<@qvLgNm)Gd&;Vc+@DD-91ixIR>MQTZHhDJua`^+c&Pyyi|e-tBCyTi zNOlO?>=&%7PTaRI#wboKknI%RqC3>=+mq}C| szK4M2GbIF>uMz58!frW8TSz=^tqSA3EM_g#Z8m literal 0 HcmV?d00001 diff --git a/tests/test_auth.py b/tests/test_auth.py index 5fef206..9bfc5ee 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -6,7 +6,7 @@ from autosubmit_api import auth from autosubmit_api.auth.utils import validate_client from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api import config -from tests.custom_utils import custom_return_value, dummy_response +from tests.utils import custom_return_value, dummy_response class TestCommonAuth: diff --git a/tests/test_config.py b/tests/test_config.py index 64b1245..748da2b 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -6,39 +6,10 @@ from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.config.config_common import AutosubmitConfigResolver from autosubmit_api.config.ymlConfigStrategy import ymlConfigStrategy -from tests.conftest import FAKE_EXP_DIR -from tests.custom_utils import custom_return_value +from tests.utils import custom_return_value -class TestBasicConfig: - def test_api_basic_config(self, fixture_mock_basic_config): - APIBasicConfig.read() - - assert os.getenv("AUTOSUBMIT_CONFIGURATION") == os.path.join( - FAKE_EXP_DIR, ".autosubmitrc" - ) - assert APIBasicConfig.LOCAL_ROOT_DIR == FAKE_EXP_DIR - assert APIBasicConfig.DB_FILE == "autosubmit.db" - assert APIBasicConfig.DB_PATH == os.path.join( - FAKE_EXP_DIR, APIBasicConfig.DB_FILE - ) - assert APIBasicConfig.AS_TIMES_DB == "as_times.db" - assert APIBasicConfig.JOBDATA_DIR == os.path.join( - FAKE_EXP_DIR, "metadata", "data" - ) - assert APIBasicConfig.GLOBAL_LOG_DIR == os.path.join(FAKE_EXP_DIR, "logs") - assert APIBasicConfig.STRUCTURES_DIR == os.path.join( - FAKE_EXP_DIR, "metadata", "structures" - ) - assert APIBasicConfig.HISTORICAL_LOG_DIR == os.path.join( - FAKE_EXP_DIR, "metadata", "logs" - ) - - assert APIBasicConfig.GRAPHDATA_DIR == os.path.join( - FAKE_EXP_DIR, "metadata", "graph" - ) - class TestConfigResolver: def test_simple_init(self, monkeypatch: pytest.MonkeyPatch): # Conf test decision @@ -61,7 +32,7 @@ class TestConfigResolver: class TestYMLConfigStrategy: def test_exclusive(self, fixture_mock_basic_config): wrapper = ymlConfigStrategy("a007", fixture_mock_basic_config) - assert True == wrapper.get_exclusive(JobSection.SIM) + assert True is wrapper.get_exclusive(JobSection.SIM) wrapper = ymlConfigStrategy("a003", fixture_mock_basic_config) - assert False == wrapper.get_exclusive(JobSection.SIM) + assert False is wrapper.get_exclusive(JobSection.SIM) diff --git a/tests/test_database.py b/tests/test_database.py index 518523b..632c053 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -13,7 +13,7 @@ def count_pid_lsof(pid): class TestDatabase: - def test_open_files(self, fixture_mock_basic_config): + def test_open_files(self, fixture_sqlite): current_pid = os.getpid() counter = count_pid_lsof(current_pid) diff --git a/tests/test_endpoints_v3.py b/tests/test_endpoints_v3.py index b5d0113..85056ec 100644 --- a/tests/test_endpoints_v3.py +++ b/tests/test_endpoints_v3.py @@ -14,7 +14,6 @@ class TestLogin: def test_not_allowed_client( self, fixture_client: FlaskClient, - fixture_mock_basic_config: APIBasicConfig, monkeypatch: pytest.MonkeyPatch, ): monkeypatch.setattr(APIBasicConfig, "ALLOWED_CLIENTS", []) @@ -28,7 +27,6 @@ class TestLogin: def test_redirect( self, fixture_client: FlaskClient, - fixture_mock_basic_config: APIBasicConfig, monkeypatch: pytest.MonkeyPatch, ): random_referer = str(f"https://${str(uuid4())}/") diff --git a/tests/test_endpoints_v4.py b/tests/test_endpoints_v4.py index dc83894..6e7fb38 100644 --- a/tests/test_endpoints_v4.py +++ b/tests/test_endpoints_v4.py @@ -7,7 +7,7 @@ import jwt import pytest from autosubmit_api import config from autosubmit_api.views.v4 import PAGINATION_LIMIT_DEFAULT, ExperimentJobsViewOptEnum -from tests.custom_utils import custom_return_value +from tests.utils import custom_return_value class TestCASV2Login: diff --git a/tests/test_fixtures.py b/tests/test_fixtures.py new file mode 100644 index 0000000..75809d4 --- /dev/null +++ b/tests/test_fixtures.py @@ -0,0 +1,114 @@ +import os +from typing import Tuple + +import pytest +from sqlalchemy import Engine, select + +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables +from tests.utils import get_schema_names + + +class TestSQLiteFixtures: + def test_fixture_temp_dir_copy(self, fixture_temp_dir_copy: str): + """ + Test if all the files are copied from FAKEDIR to the temporary directory + """ + FILES_SHOULD_EXIST = [ + "a003/conf/minimal.yml", + "metadata/data/job_data_a007.db", + ] + for file in FILES_SHOULD_EXIST: + assert os.path.exists(os.path.join(fixture_temp_dir_copy, file)) + + def test_fixture_gen_rc_sqlite(self, fixture_gen_rc_sqlite: str): + """ + Test if the .autosubmitrc file is generated and the environment variable is set + """ + rc_file = os.path.join(fixture_gen_rc_sqlite, ".autosubmitrc") + + # File should exist + assert os.path.exists(rc_file) + + with open(rc_file, "r") as f: + content = f.read() + assert "[database]" in content + assert f"path = {fixture_gen_rc_sqlite}" in content + assert "filename = autosubmit.db" in content + assert "backend = sqlite" in content + + @pytest.mark.skip(reason="TODO: Fix this test") + def test_mock_basic_config( + self, fixture_mock_basic_config: APIBasicConfig, fixture_gen_rc_sqlite: str + ): + rc_file = os.path.join(fixture_gen_rc_sqlite, ".autosubmitrc") + # Environment variable should be set and should point to the .autosubmitrc file + assert "AUTOSUBMIT_CONFIGURATION" in os.environ and os.path.exists( + os.environ["AUTOSUBMIT_CONFIGURATION"] + ) + assert os.environ["AUTOSUBMIT_CONFIGURATION"] == rc_file + + # Reading the configuration file + APIBasicConfig.read() + assert APIBasicConfig.GRAPHDATA_DIR == f"{fixture_gen_rc_sqlite}/metadata/graph" + assert APIBasicConfig.LOCAL_ROOT_DIR == fixture_gen_rc_sqlite + assert APIBasicConfig.DATABASE_BACKEND == "sqlite" + assert APIBasicConfig.DB_DIR == fixture_gen_rc_sqlite + assert APIBasicConfig.DB_FILE == "autosubmit.db" + + +class TestPostgresFixtures: + def test_fixture_temp_dir_copy_exclude_db( + self, fixture_temp_dir_copy_exclude_db: str + ): + """ + Test if all the files are copied from FAKEDIR to the temporary directory except .db files + """ + FILES_SHOULD_EXIST = [ + "a003/conf/minimal.yml", + ] + FILES_SHOULD_EXCLUDED = ["metadata/data/job_data_a007.db"] + for file in FILES_SHOULD_EXIST: + assert os.path.exists(os.path.join(fixture_temp_dir_copy_exclude_db, file)) + + for file in FILES_SHOULD_EXCLUDED: + assert not os.path.exists( + os.path.join(fixture_temp_dir_copy_exclude_db, file) + ) + + def test_fixture_gen_rc_postgres(self, fixture_gen_rc_pg: str): + """ + Test if the .autosubmitrc file is generated and the environment variable is set + """ + rc_file = os.path.join(fixture_gen_rc_pg, ".autosubmitrc") + + # File should exist + assert os.path.exists(rc_file) + + with open(rc_file, "r") as f: + content = f.read() + assert "[database]" in content + assert "backend = postgres" in content + assert "postgresql://" in content + assert fixture_gen_rc_pg in content + + def test_fixture_pg_db(self, fixture_pg_db: Tuple[str, Engine]): + engine = fixture_pg_db[1] + + # Check if the public schema exists and is the only one + with engine.connect() as conn: + schema_names = get_schema_names(conn) + assert schema_names == ["public"] + + def test_fixture_pg_db_copy_all(self, fixture_pg_db_copy_all: Tuple[str, Engine]): + engine = fixture_pg_db_copy_all[1] + + # Check if the experiment and details tables are copied + with engine.connect() as conn: + exp_rows = conn.execute(select(tables.ExperimentTable)).all() + details_rows = conn.execute(select(tables.DetailsTable)).all() + + assert len(exp_rows) > 0 + assert len(details_rows) > 0 + + # TODO: Check if the other tables are copied diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..186a17e --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,193 @@ +from http import HTTPStatus +import re +from typing import List +from sqlalchemy import Connection, Engine, create_engine, insert, select, text + +from autosubmit_api.database import tables +from sqlalchemy.schema import CreateSchema, CreateTable + + +def dummy_response(*args, **kwargs): + return "Hello World!", HTTPStatus.OK + + +def custom_return_value(value=None): + def blank_func(*args, **kwargs): + return value + + return blank_func + + +def get_schema_names(conn: Connection) -> List[str]: + """ + Get all schema names that are not from the system + """ + results = conn.execute( + text( + "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT LIKE 'pg_%' AND schema_name != 'information_schema'" + ) + ).all() + return [res[0] for res in results] + + +def setup_pg_db(conn: Connection): + """ + Resets database by dropping all schemas except the system ones and restoring the public schema + """ + # Get all schema names that are not from the system + schema_names = get_schema_names(conn) + + # Drop all schemas + for schema_name in schema_names: + conn.execute(text(f'DROP SCHEMA IF EXISTS "{schema_name}" CASCADE')) + + # Restore default public schema + conn.execute(text("CREATE SCHEMA public")) + conn.execute(text("GRANT ALL ON SCHEMA public TO public")) + conn.execute(text("GRANT ALL ON SCHEMA public TO postgres")) + + +def copy_structure_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/metadata/structures to the Postgres database + """ + # Get the xxxx from structure_xxxx.db with regex + match = re.search(r"structure_(\w+)\.db", filepath) + expid = match.group(1) + + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + structures_rows = source_conn.execute( + select(tables.ExperimentStructureTable) + ).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateSchema(expid, if_not_exists=True)) + target_table = tables.table_change_schema( + expid, tables.ExperimentStructureTable + ) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(structures_rows) > 0: + conn.execute( + insert(target_table), [row._mapping for row in structures_rows] + ) + conn.commit() + + +def copy_job_data_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/metadata/data to the Postgres database + """ + # Get the xxxx from job_data_xxxx.db with regex + match = re.search(r"job_data_(\w+)\.db", filepath) + expid = match.group(1) + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + job_data_rows = source_conn.execute(select(tables.JobDataTable)).all() + exprun_rows = source_conn.execute(select(tables.experiment_run_table)).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateSchema(expid, if_not_exists=True)) + # Job data + target_table = tables.table_change_schema(expid, tables.JobDataTable) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(job_data_rows) > 0: + conn.execute(insert(target_table),[row._mapping for row in job_data_rows]) + # Experiment run + target_table = tables.table_change_schema(expid, tables.experiment_run_table) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(exprun_rows) > 0: + conn.execute(insert(target_table),[row._mapping for row in exprun_rows]) + conn.commit() + + +def copy_graph_data_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/metadata/graph to the Postgres database + """ + # Get the xxxx from graph_xxxx.db with regex + match = re.search(r"graph_data_(\w+)\.db", filepath) + expid = match.group(1) + + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + graph_rows = source_conn.execute(select(tables.GraphDataTable)).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateSchema(expid, if_not_exists=True)) + target_table = tables.table_change_schema(expid, tables.GraphDataTable) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(graph_rows) > 0: + conn.execute(insert(target_table),[row._mapping for row in graph_rows]) + conn.commit() + + +def copy_autosubmit_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/autosubmit.db to the Postgres database + """ + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + exp_rows = source_conn.execute(select(tables.ExperimentTable)).all() + details_rows = source_conn.execute(select(tables.DetailsTable)).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateTable(tables.ExperimentTable.__table__, if_not_exists=True)) + conn.execute(insert(tables.ExperimentTable),[row._mapping for row in exp_rows]) + conn.execute(CreateTable(tables.DetailsTable.__table__, if_not_exists=True)) + conn.execute(insert(tables.DetailsTable),[row._mapping for row in details_rows]) + conn.commit() + + +def copy_as_times_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/as_times.db to the Postgres database + """ + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + as_times_rows = source_conn.execute(select(tables.ExperimentStatusTable)).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateTable(tables.ExperimentStatusTable.__table__, if_not_exists=True)) + conn.execute(insert(tables.ExperimentStatusTable),[row._mapping for row in as_times_rows]) + conn.commit() + + +def copy_job_packages_db(filepath: str, engine: Engine): + """ + This function copies the content of the FAKE_EXP_DIR/pkl/job_packages to the Postgres database + """ + # Get the xxxx from job_packages_xxxx.db with regex + match = re.search(r"job_packages_(\w+)\.db", filepath) + expid = match.group(1) + + # Get SQLite source data + source_as_db = create_engine(f"sqlite:///{filepath}") + with source_as_db.connect() as source_conn: + job_packages_rows = source_conn.execute(select(tables.JobPackageTable)).all() + wrapper_job_packages_rows = source_conn.execute(select(tables.WrapperJobPackageTable)).all() + + # Copy data to the Postgres database + with engine.connect() as conn: + conn.execute(CreateSchema(expid, if_not_exists=True)) + # Job packages + target_table = tables.table_change_schema(expid, tables.JobPackageTable) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(job_packages_rows) > 0: + conn.execute(insert(target_table),[row._mapping for row in job_packages_rows]) + # Wrapper job packages + target_table = tables.table_change_schema(expid, tables.WrapperJobPackageTable) + conn.execute(CreateTable(target_table, if_not_exists=True)) + if len(wrapper_job_packages_rows) > 0: + conn.execute(insert(target_table),[row._mapping for row in wrapper_job_packages_rows]) + conn.commit() -- GitLab From c38ee16cf424554a9a15b7fd00d8e1d2faa02469 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 24 May 2024 12:39:13 +0200 Subject: [PATCH 15/26] refactor jobdata module --- .../autosubmit_legacy/job/job_list.py | 4 +- .../db_jobdata.py => components/jobdata.py} | 390 +++++------------- .../components/jobs/joblist_helper.py | 2 +- autosubmit_api/database/adapters/__init__.py | 2 + .../database/adapters/experiment_run.py | 21 +- autosubmit_api/database/adapters/job_data.py | 32 ++ autosubmit_api/experiment/common_requests.py | 2 +- 7 files changed, 157 insertions(+), 296 deletions(-) rename autosubmit_api/{database/db_jobdata.py => components/jobdata.py} (57%) create mode 100644 autosubmit_api/database/adapters/job_data.py diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py index 8cec17e..40b3707 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_list.py +++ b/autosubmit_api/autosubmit_legacy/job/job_list.py @@ -38,7 +38,7 @@ from autosubmit_api.common.utils import Status from bscearth.utils.date import date2str, parse_date # from autosubmit_legacy.job.tree import Tree from autosubmit.database import db_structure as DbStructure -from autosubmit_api.database.db_jobdata import JobDataStructure, JobRow +from autosubmit_api.components.jobdata import JobDataStructure, JobRow from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder from autosubmit_api.history.data_classes.job_data import JobData @@ -114,7 +114,7 @@ class JobList: else: raise Exception("Autosubmit couldn't fin the experiment header information necessary to complete this request.") job_list = job_data_structure.get_current_job_data( - run_id, all_states=True) + run_id) if not job_list: return [], [], {} else: diff --git a/autosubmit_api/database/db_jobdata.py b/autosubmit_api/components/jobdata.py similarity index 57% rename from autosubmit_api/database/db_jobdata.py rename to autosubmit_api/components/jobdata.py index 88d64cd..ea14e2e 100644 --- a/autosubmit_api/database/db_jobdata.py +++ b/autosubmit_api/components/jobdata.py @@ -17,18 +17,16 @@ # You should have received a copy of the GNU General Public License # along with Autosubmit. If not, see . -import os import time -import textwrap import traceback -import sqlite3 import collections from datetime import datetime, timedelta from json import loads -from time import mktime from autosubmit_api.components.jobs.utils import generate_job_html_title # from networkx import DiGraph from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database.adapters import ExperimentRunDbAdapter +from autosubmit_api.database.adapters.job_data import JobDataDbAdapter from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.performance.utils import calculate_ASYPD_perjob from autosubmit_api.components.jobs.job_factory import SimJob @@ -37,7 +35,6 @@ from autosubmit_api.common.utils import get_jobs_with_no_outliers, Status, datec # import autosubmitAPIwu.experiment.common_db_requests as DbRequests from bscearth.utils.date import Log -from autosubmit_api.persistance.experiment import ExperimentPaths # Version 15 includes out err MaxRSS AveRSS and rowstatus @@ -200,9 +197,9 @@ class JobData(object): self.job_id = job_id if job_id else 0 try: self.extra_data = loads(extra_data) - except Exception as exp: + except Exception: self.extra_data = "" - pass + self.nnodes = nnodes self.run_id = run_id self.MaxRSS = MaxRSS @@ -424,70 +421,7 @@ class JobData(object): self._energy = energy if energy else 0 -class MainDataBase(): - def __init__(self, expid): - self.expid = expid - self.conn = None - self.conn_ec = None - self.create_table_query = None - self.db_version = None - - def create_connection(self, db_file): - """ - Create a database connection to the SQLite database specified by db_file. - :param db_file: database file name - :return: Connection object or None - """ - try: - conn = sqlite3.connect(db_file) - return conn - except: - return None - - def create_table(self): - """ create a table from the create_table_sql statement - :param conn: Connection object - :param create_table_sql: a CREATE TABLE statement - :return: - """ - try: - if self.conn: - c = self.conn.cursor() - c.execute(self.create_table_query) - self.conn.commit() - else: - raise IOError("Not a valid connection") - except IOError as exp: - Log.warning(exp) - return None - except sqlite3.Error as e: - if _debug == True: - Log.info(traceback.format_exc()) - Log.warning("Error on create table : " + str(type(e).__name__)) - return None - - def create_index(self): - """ Creates index from statement defined in child class - """ - try: - if self.conn: - c = self.conn.cursor() - c.execute(self.create_index_query) - self.conn.commit() - else: - raise IOError("Not a valid connection") - except IOError as exp: - Log.warning(exp) - return None - except sqlite3.Error as e: - if _debug == True: - Log.info(traceback.format_exc()) - Log.debug(str(type(e).__name__)) - Log.warning("Error on create index . create_index") - return None - - -class JobDataStructure(MainDataBase): +class JobDataStructure: def __init__(self, expid: str, basic_config: APIBasicConfig): """Initializes the object based on the unique identifier of the experiment. @@ -495,36 +429,13 @@ class JobDataStructure(MainDataBase): Args: expid (str): Experiment identifier """ - MainDataBase.__init__(self, expid) - # BasicConfig.read() - # self.expid = expid - self.folder_path = basic_config.JOBDATA_DIR - exp_paths = ExperimentPaths(expid) - self.database_path = exp_paths.job_data_db - # self.conn = None - self.db_version = None - # self.jobdata_list = JobDataList(self.expid) - self.create_index_query = textwrap.dedent(''' - CREATE INDEX IF NOT EXISTS ID_JOB_NAME ON job_data(job_name); - ''') - if not os.path.exists(self.database_path): - self.conn = None - else: - self.conn = self.create_connection(self.database_path) - self.db_version = self._select_pragma_version() - # self.query_job_historic = None - # Historic only working on DB 12 now - # self.query_job_historic = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id FROM job_data WHERE job_name=? ORDER BY counter DESC" - - if self.db_version < DB_VERSION_SCHEMA_CHANGES: - try: - self.create_index() - except Exception as exp: - print(exp) - pass + self.db_version = 99 # Previous versions are unsupported + + self.exp_run_db = ExperimentRunDbAdapter(expid) + self.job_data_db = JobDataDbAdapter(expid) def __str__(self): - return '{} {}'.format("Data structure. Version:", self.db_version) + return "Data structure" def get_max_id_experiment_run(self): """ @@ -533,218 +444,121 @@ class JobDataStructure(MainDataBase): :rtype: ExperimentRun object """ try: - # expe = list() - if not os.path.exists(self.database_path): - raise Exception("Job data folder not found {0} or the database version is outdated.".format(str(self.database_path))) - if self.db_version < DB_VERSION_SCHEMA_CHANGES: - print(("Job database version {0} outdated.".format(str(self.db_version)))) - if os.path.exists(self.database_path) and self.db_version >= DB_VERSION_SCHEMA_CHANGES: - modified_time = int(os.stat(self.database_path).st_mtime) - current_experiment_run = self._get_max_id_experiment_run() - if current_experiment_run: - exprun_item = ExperimentRunItem_14( - *current_experiment_run) if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else ExperimentRunItem(*current_experiment_run) - return ExperimentRun(exprun_item.run_id, exprun_item.created, exprun_item.start, exprun_item.finish, exprun_item.chunk_unit, exprun_item.chunk_size, exprun_item.completed, exprun_item.total, exprun_item.failed, exprun_item.queuing, exprun_item.running, exprun_item.submitted, exprun_item.suspended if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else 0, exprun_item.metadata if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else "", modified_time) - else: - return None + current_experiment_run = self.exp_run_db.get_last_run() + + if current_experiment_run: + return ExperimentRun( + run_id=current_experiment_run.get("run_id"), + created=current_experiment_run.get("created"), + start=current_experiment_run.get("start"), + finish=current_experiment_run.get("finish"), + chunk_unit=current_experiment_run.get("chunk_unit"), + chunk_size=current_experiment_run.get("chunk_size"), + completed=current_experiment_run.get("completed"), + total=current_experiment_run.get("total"), + failed=current_experiment_run.get("failed"), + queuing=current_experiment_run.get("queuing"), + running=current_experiment_run.get("running"), + submitted=current_experiment_run.get("submitted"), + suspended=current_experiment_run.get("suspended", 0), + metadata=current_experiment_run.get("metadata", ""), + modified=current_experiment_run.get("modified"), + ) else: - raise Exception("Job data folder not found {0} or the database version is outdated.".format( - str(self.database_path))) + return None except Exception as exp: print((str(exp))) print((traceback.format_exc())) return None - def get_experiment_run_by_id(self, run_id): + def get_experiment_run_by_id(self, run_id: int): """ Get experiment run stored in database by run_id """ try: - # expe = list() - if os.path.exists(self.folder_path) and self.db_version >= DB_VERSION_SCHEMA_CHANGES: - result = None - current_experiment_run = self._get_experiment_run_by_id(run_id) - if current_experiment_run: - # for run in current_experiment_run: - exprun_item = ExperimentRunItem_14( - *current_experiment_run) if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else ExperimentRunItem(*current_experiment_run) - result = ExperimentRun(exprun_item.run_id, exprun_item.created, exprun_item.start, exprun_item.finish, exprun_item.chunk_unit, exprun_item.chunk_size, exprun_item.completed, exprun_item.total, exprun_item.failed, exprun_item.queuing, - exprun_item.running, exprun_item.submitted, exprun_item.suspended if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else 0, exprun_item.metadata if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES else "") - return result - else: - return None + current_experiment_run = self.exp_run_db.get_run_by_id(run_id) + + if current_experiment_run: + return ExperimentRun( + run_id=current_experiment_run.get("run_id"), + created=current_experiment_run.get("created"), + start=current_experiment_run.get("start"), + finish=current_experiment_run.get("finish"), + chunk_unit=current_experiment_run.get("chunk_unit"), + chunk_size=current_experiment_run.get("chunk_size"), + completed=current_experiment_run.get("completed"), + total=current_experiment_run.get("total"), + failed=current_experiment_run.get("failed"), + queuing=current_experiment_run.get("queuing"), + running=current_experiment_run.get("running"), + submitted=current_experiment_run.get("submitted"), + suspended=current_experiment_run.get("suspended", 0), + metadata=current_experiment_run.get("metadata", ""), + modified=current_experiment_run.get("modified"), + ) else: - raise Exception("Job data folder not found {0} or the database version is outdated.".format( - str(self.database_path))) - except Exception as exp: - if _debug == True: + return None + except Exception as exc: + if _debug is True: Log.info(traceback.format_exc()) Log.debug(traceback.format_exc()) Log.warning( - "Autosubmit couldn't retrieve experiment run. get_experiment_run_by_id. Exception {0}".format(str(exp))) + "Autosubmit couldn't retrieve experiment run. get_experiment_run_by_id. Exception {0}".format(str(exc))) return None - def get_current_job_data(self, run_id, all_states=False): + def get_current_job_data(self, run_id: int): """ Gets the job historical data for a run_id. :param run_id: Run identifier :type run_id: int - :param all_states: False if only last=1 should be included, otherwise all rows :return: List of jobdata rows :rtype: list() of JobData objects """ try: - current_collection = [] - if self.db_version < DB_VERSION_SCHEMA_CHANGES: - raise Exception("This function requieres a newer DB version.") - if os.path.exists(self.folder_path): - current_job_data = self._get_current_job_data( - run_id, all_states) - if current_job_data: - for job_data in current_job_data: - if self.db_version >= CURRENT_DB_VERSION: - jobitem = JobItem_15(*job_data) - current_collection.append(JobData(jobitem.id, jobitem.counter, jobitem.job_name, jobitem.created, jobitem.modified, jobitem.submit, jobitem.start, jobitem.finish, jobitem.status, jobitem.rowtype, jobitem.ncpus, - jobitem.wallclock, jobitem.qos, jobitem.energy, jobitem.date, jobitem.section, jobitem.member, jobitem.chunk, jobitem.last, jobitem.platform, jobitem.job_id, jobitem.extra_data, jobitem.nnodes, jobitem.run_id, jobitem.MaxRSS, jobitem.AveRSS, jobitem.out, jobitem.err, jobitem.rowstatus)) - else: - jobitem = JobItem_12(*job_data) - current_collection.append(JobData(jobitem.id, jobitem.counter, jobitem.job_name, jobitem.created, jobitem.modified, jobitem.submit, jobitem.start, jobitem.finish, jobitem.status, jobitem.rowtype, jobitem.ncpus, - jobitem.wallclock, jobitem.qos, jobitem.energy, jobitem.date, jobitem.section, jobitem.member, jobitem.chunk, jobitem.last, jobitem.platform, jobitem.job_id, jobitem.extra_data, jobitem.nnodes, jobitem.run_id)) - return current_collection - return None - except Exception as exp: - print((traceback.format_exc())) - print(( - "Error on returning current job data. run_id {0}".format(run_id))) - return None - - def _get_experiment_run_by_id(self, run_id): - """ - :param run_id: Run Identifier - :type run_id: int - :return: First row that matches the run_id - :rtype: Row as Tuple - """ - try: - if self.conn: - self.conn.text_factory = str - cur = self.conn.cursor() - if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES: - cur.execute( - "SELECT run_id,created,start,finish,chunk_unit,chunk_size,completed,total,failed,queuing,running,submitted,suspended, metadata FROM experiment_run WHERE run_id=? and total > 0 ORDER BY run_id DESC", (run_id,)) - else: - cur.execute( - "SELECT run_id,created,start,finish,chunk_unit,chunk_size,completed,total,failed,queuing,running,submitted FROM experiment_run WHERE run_id=? and total > 0 ORDER BY run_id DESC", (run_id,)) - rows = cur.fetchall() - if len(rows) > 0: - return rows[0] - else: - return None + current_job_data = self.job_data_db.get_last_job_data_by_run_id(run_id) + + if current_job_data: + current_collection = [] + for job_data in current_job_data: + current_collection.append( + JobData( + _id=job_data.get("id"), + counter=job_data.get("counter"), + job_name=job_data.get("job_name"), + created=job_data.get("created"), + modified=job_data.get("modified"), + submit=job_data.get("submit"), + start=job_data.get("start"), + finish=job_data.get("finish"), + status=job_data.get("status"), + rowtype=job_data.get("rowtype"), + ncpus=job_data.get("ncpus"), + wallclock=job_data.get("wallclock"), + qos=job_data.get("qos"), + energy=job_data.get("energy"), + date=job_data.get("date"), + section=job_data.get("section"), + member=job_data.get("member"), + chunk=job_data.get("chunk"), + last=job_data.get("last"), + platform=job_data.get("platform"), + job_id=job_data.get("job_id"), + extra_data=job_data.get("extra_data"), + nnodes=job_data.get("nnodes"), + run_id=job_data.get("run_id"), + MaxRSS=job_data.get("MaxRSS", 0), + AveRSS=job_data.get("AveRSS", 0), + out=job_data.get("out", ""), + err=job_data.get("err", ""), + rowstatus=job_data.get("rowstatus", 0), + ) + ) + return current_collection else: - raise Exception("Not a valid connection.") - except sqlite3.Error as e: - if _debug == True: - print((traceback.format_exc())) - print(("Error while retrieving run {0} information. {1}".format( - run_id, "_get_experiment_run_by_id"))) - return None - - def _select_pragma_version(self): - """ Retrieves user_version from database - """ - try: - if self.conn: - self.conn.text_factory = str - cur = self.conn.cursor() - cur.execute("pragma user_version;") - rows = cur.fetchall() - # print("Result {0}".format(str(rows))) - if len(rows) > 0: - # print(rows) - # print("Row " + str(rows[0])) - result, = rows[0] - # print(result) - return int(result) if result >= 0 else None - else: - # Starting value - return None - except sqlite3.Error as e: - if _debug == True: - Log.info(traceback.format_exc()) - Log.debug(traceback.format_exc()) - Log.warning("Error while retrieving version: " + - str(type(e).__name__)) - return None - - def _get_max_id_experiment_run(self): - """Return the max id from experiment_run - - :return: max run_id, None - :rtype: int, None - """ - try: - if self.conn: - self.conn.text_factory = str - cur = self.conn.cursor() - if self.db_version >= DB_EXPERIMENT_HEADER_SCHEMA_CHANGES: - cur.execute( - "SELECT run_id,created,start,finish,chunk_unit,chunk_size,completed,total,failed,queuing,running,submitted,suspended, metadata from experiment_run ORDER BY run_id DESC LIMIT 0, 1") - else: - cur.execute( - "SELECT run_id,created,start,finish,chunk_unit,chunk_size,completed,total,failed,queuing,running,submitted from experiment_run ORDER BY run_id DESC LIMIT 0, 1") - rows = cur.fetchall() - if len(rows) > 0: - return rows[0] - else: - return None - return None - except sqlite3.Error as e: - if _debug == True: - Log.info(traceback.format_exc()) - Log.debug(traceback.format_exc()) - Log.warning("Error on select max run_id : " + - str(type(e).__name__)) - return None - - def _get_current_job_data(self, run_id, all_states=False): - """ - Get JobData by run_id. - :param run_id: Run Identifier - :type run_id: int - :param all_states: False if only last=1, True all - :type all_states: bool - """ - try: - if self.conn: - # print("Run {0} states {1} db {2}".format( - # run_id, all_states, self.db_version)) - self.conn.text_factory = str - cur = self.conn.cursor() - request_string = "" - if all_states == False: - if self.db_version >= CURRENT_DB_VERSION: - request_string = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id, MaxRSS, AveRSS, out, err, rowstatus from job_data WHERE run_id=? and last=1 and finish > 0 and rowtype >= 2 ORDER BY id" - else: - request_string = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id from job_data WHERE run_id=? and last=1 and finish > 0 and rowtype >= 2 ORDER BY id" - - else: - if self.db_version >= CURRENT_DB_VERSION: - request_string = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id, MaxRSS, AveRSS, out, err, rowstatus from job_data WHERE run_id=? and rowtype >= 2 ORDER BY id" - else: - request_string = "SELECT id, counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, wallclock, qos, energy, date, section, member, chunk, last, platform, job_id, extra_data, nnodes, run_id from job_data WHERE run_id=? and rowtype >= 2 ORDER BY id" - - cur.execute(request_string, (run_id,)) - rows = cur.fetchall() - # print(rows) - if len(rows) > 0: - return rows - else: - return None - except sqlite3.Error as e: - if _debug == True: - print((traceback.format_exc())) - print(("Error on select job data: {0}".format( - str(type(e).__name__)))) + return None + except Exception: + print((traceback.format_exc())) + print(("Error on returning current job data. run_id {0}".format(run_id))) return None @@ -776,7 +590,7 @@ def parse_output_number(string_number): number = string_number try: number = float(number) * multiplier - except Exception as exp: + except Exception: number = 0.0 - pass + return number diff --git a/autosubmit_api/components/jobs/joblist_helper.py b/autosubmit_api/components/jobs/joblist_helper.py index 14a87f7..2dc29c9 100644 --- a/autosubmit_api/components/jobs/joblist_helper.py +++ b/autosubmit_api/components/jobs/joblist_helper.py @@ -1,7 +1,7 @@ #!/usr/bin/env python from autosubmit_api.autosubmit_legacy.job.job_list import JobList from autosubmit_api.common.utils import datechunk_to_year -from autosubmit_api.database.db_jobdata import JobDataStructure, JobRow +from autosubmit_api.components.jobdata import JobDataStructure, JobRow from autosubmit_api.components.experiment.configuration_facade import AutosubmitConfigurationFacade from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer from autosubmit_api.config.basicConfig import APIBasicConfig diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py index 2a8b182..a0454d9 100644 --- a/autosubmit_api/database/adapters/__init__.py +++ b/autosubmit_api/database/adapters/__init__.py @@ -21,12 +21,14 @@ from autosubmit_api.database.adapters.job_packages import ( JobPackagesDbAdapter, WrapperJobPackagesDbAdapter, ) +from autosubmit_api.database.adapters.experiment_run import ExperimentRunDbAdapter __all__ = [ "ExperimentDbAdapter", "ExperimentDetailsDbAdapter", "ExperimentStatusDbAdapter", + "ExperimentRunDbAdapter", "ExpGraphDrawDBAdapter", "ExperimentJoinDbAdapter", "JobPackagesDbAdapter", diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py index aaed948..a2dc323 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Any, Dict, Optional from autosubmit.database.db_manager import create_db_table_manager from sqlalchemy import select @@ -11,19 +11,32 @@ class ExperimentRunDbAdapter: self.expid = expid self.table_manager = create_db_table_manager( table=tables.experiment_run_table, - db_filepath=ExperimentPaths(expid).graph_data_db, + db_filepath=ExperimentPaths(expid).job_data_db, schema=expid, ) - def get_last_run(self) -> Optional[Dict[str, str]]: + def get_last_run(self) -> Optional[Dict[str, Any]]: """ Gets last run of the experiment """ with self.table_manager.get_connection() as conn: row = conn.execute( - select(self.table_manager.table) + select(tables.experiment_run_table) .order_by(tables.experiment_run_table.c.run_id.desc()) .limit(1) ).one_or_none() return row._mapping if row else None + + def get_run_by_id(self, run_id: int) -> Optional[Dict[str, Any]]: + """ + Gets run by id + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + select(tables.experiment_run_table) + .where(tables.experiment_run_table.c.run_id == run_id) + ).one_or_none() + + return row._mapping if row else None + diff --git a/autosubmit_api/database/adapters/job_data.py b/autosubmit_api/database/adapters/job_data.py new file mode 100644 index 0000000..4d6a328 --- /dev/null +++ b/autosubmit_api/database/adapters/job_data.py @@ -0,0 +1,32 @@ +from typing import Any, Dict, List +from autosubmit.database.db_manager import create_db_table_manager +from sqlalchemy import select + +from autosubmit_api.database import tables +from autosubmit_api.persistance.experiment import ExperimentPaths + + +class JobDataDbAdapter: + def __init__(self, expid: str) -> None: + self.expid = expid + self.table_manager = create_db_table_manager( + table=tables.job_data_table, + db_filepath=ExperimentPaths(expid).job_data_db, + schema=expid, + ) + + def get_last_job_data_by_run_id(self, run_id: int) -> List[Dict[str, Any]]: + """ + Gets last job data of an specific run id + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + select(tables.job_data_table) + .where( + (tables.job_data_table.c.run_id == run_id), + (tables.job_data_table.c.rowtype == 2), + ) + .order_by(tables.job_data_table.c.id.desc()) + ).all() + + return [row._mapping for row in row] diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index 1631d84..af81e9e 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -37,7 +37,7 @@ from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter from autosubmit_api.database.adapters import ExperimentJoinDbAdapter from autosubmit_api.experiment import common_db_requests as DbRequests -from autosubmit_api.database import db_jobdata as JobData +from autosubmit_api.components import jobdata as JobData from autosubmit_api.common import utils as common_utils from autosubmit_api.components.jobs import utils as JUtils -- GitLab From 68fe7a9c9ed0792cdce10d4da2be520c959cc845 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Mon, 27 May 2024 14:32:34 +0200 Subject: [PATCH 16/26] move table_manager from autosubmit --- autosubmit_api/database/README.md | 13 +- .../database/adapters/experiment.py | 2 +- .../database/adapters/experiment_details.py | 2 +- .../database/adapters/experiment_run.py | 2 +- .../database/adapters/experiment_status.py | 2 +- .../database/adapters/graph_draw.py | 2 +- autosubmit_api/database/adapters/job_data.py | 2 +- .../database/adapters/job_packages.py | 2 +- .../database/adapters/join/experiment_join.py | 88 ++++++++++++- autosubmit_api/database/queries.py | 87 ------------- autosubmit_api/database/table_manager.py | 122 ++++++++++++++++++ 11 files changed, 224 insertions(+), 100 deletions(-) delete mode 100644 autosubmit_api/database/queries.py create mode 100644 autosubmit_api/database/table_manager.py diff --git a/autosubmit_api/database/README.md b/autosubmit_api/database/README.md index 3fee039..304149f 100644 --- a/autosubmit_api/database/README.md +++ b/autosubmit_api/database/README.md @@ -1,7 +1,12 @@ -**db_common** contains some function to access Autosubmit database. It is mostly legacy code that needs to be restructured. +This package aims to encapsulate everything related to structured DDBB (SQLite/Postgres) operations. -**db_jobdata** contains most of the classes of the old implementation of the `historical database`. It needs to be deleted, but some functions still use it. Replace the references to this old implementation for the new implementation `history` module and proceed to delete this file. Also, take out the `Graph Drawing` class. +* **common.py**: This module have all the common functions to allow DDBB interaction. -**db_manager** is mostly legacy code that is still referenced. +* **tables.py**: Holds all the table schemas. This module extends `autosubmit.tables`. + +* **models.py**: Holds data validators. Might be refactored in the future. + +* **table_manager.py**: Provides a generalized interface to interact with one table at the time. + +* **adapters**: This subpackage holds all the entities and their corresponding operations. It should provide an interface for other parts of the API that prevents them to worry about DDBB logic. -**db_structure** handles the consumption of the structure database of the experiment. diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index 061d060..26833b1 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -1,5 +1,5 @@ from typing import Any, Dict -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/adapters/experiment_details.py index ae47122..b402a2a 100644 --- a/autosubmit_api/database/adapters/experiment_details.py +++ b/autosubmit_api/database/adapters/experiment_details.py @@ -1,5 +1,5 @@ from typing import Any, Dict, List -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py index a2dc323..285fec8 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -1,5 +1,5 @@ from typing import Any, Dict, Optional -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from sqlalchemy import select from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/experiment_status.py b/autosubmit_api/database/adapters/experiment_status.py index 122ee16..1cfb5d5 100644 --- a/autosubmit_api/database/adapters/experiment_status.py +++ b/autosubmit_api/database/adapters/experiment_status.py @@ -1,7 +1,7 @@ from datetime import datetime import os from typing import Dict, List -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from sqlalchemy import delete, insert, select from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py index 34c380f..6482a5b 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -1,4 +1,4 @@ -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from autosubmit_api.database import tables from autosubmit_api.persistance.experiment import ExperimentPaths from typing import Any, Dict, List diff --git a/autosubmit_api/database/adapters/job_data.py b/autosubmit_api/database/adapters/job_data.py index 4d6a328..f7183cb 100644 --- a/autosubmit_api/database/adapters/job_data.py +++ b/autosubmit_api/database/adapters/job_data.py @@ -1,5 +1,5 @@ from typing import Any, Dict, List -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from sqlalchemy import select from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/job_packages.py b/autosubmit_api/database/adapters/job_packages.py index 99a7cde..8962902 100644 --- a/autosubmit_api/database/adapters/job_packages.py +++ b/autosubmit_api/database/adapters/job_packages.py @@ -1,5 +1,5 @@ from typing import Dict, List -from autosubmit.database.db_manager import create_db_table_manager +from autosubmit_api.database.table_manager import create_db_table_manager from sqlalchemy import select from autosubmit_api.database import tables diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/adapters/join/experiment_join.py index 97c0a50..6aefe76 100644 --- a/autosubmit_api/database/adapters/join/experiment_join.py +++ b/autosubmit_api/database/adapters/join/experiment_join.py @@ -1,10 +1,94 @@ -from sqlalchemy import select +from typing import Optional +from pyparsing import Any +from sqlalchemy import Column, or_, select from autosubmit_api.database import tables from autosubmit_api.database.common import ( create_main_db_conn, execute_with_limit_offset, ) -from autosubmit_api.database.queries import generate_query_listexp_extended + + +def generate_query_listexp_extended( + query: str = None, + only_active: bool = False, + owner: str = None, + exp_type: str = None, + autosubmit_version: str = None, + order_by: str = None, + order_desc: bool = False, +): + """ + Query listexp without accessing the view with status and total/completed jobs. + """ + + statement = ( + select( + tables.experiment_table, + tables.details_table, + tables.experiment_status_table.c.exp_id, + tables.experiment_status_table.c.status, + ) + .join( + tables.details_table, + tables.experiment_table.c.id == tables.details_table.c.exp_id, + isouter=True, + ) + .join( + tables.experiment_status_table, + tables.experiment_table.c.id == tables.experiment_status_table.c.exp_id, + isouter=True, + ) + ) + + # Build filters + filter_stmts = [] + + if query: + filter_stmts.append( + or_( + tables.experiment_table.c.name.like(f"{query}%"), + tables.experiment_table.c.description.like(f"%{query}%"), + tables.details_table.c.user.like(f"%{query}%"), + ) + ) + + if only_active: + filter_stmts.append(tables.experiment_status_table.c.status == "RUNNING") + + if owner: + filter_stmts.append(tables.details_table.c.user == owner) + + if exp_type == "test": + filter_stmts.append(tables.experiment_table.c.name.like("t%")) + elif exp_type == "operational": + filter_stmts.append(tables.experiment_table.c.name.like("o%")) + elif exp_type == "experiment": + filter_stmts.append(tables.experiment_table.c.name.not_like("t%")) + filter_stmts.append(tables.experiment_table.c.name.not_like("o%")) + + if autosubmit_version: + filter_stmts.append( + tables.experiment_table.c.autosubmit_version == autosubmit_version + ) + + statement = statement.where(*filter_stmts) + + # Order by + ORDER_OPTIONS = { + "expid": tables.experiment_table.c.name, + "created": tables.details_table.c.created, + "description": tables.experiment_table.c.description, + } + order_col: Optional[Column[Any]] = None + if order_by: + order_col = ORDER_OPTIONS.get(order_by, None) + + if isinstance(order_col, Column): + if order_desc: + order_col = order_col.desc() + statement = statement.order_by(order_col) + + return statement class ExperimentJoinDbAdapter: diff --git a/autosubmit_api/database/queries.py b/autosubmit_api/database/queries.py deleted file mode 100644 index 55cca9e..0000000 --- a/autosubmit_api/database/queries.py +++ /dev/null @@ -1,87 +0,0 @@ -from typing import Optional -from pyparsing import Any -from sqlalchemy import Column, select, or_ -from autosubmit_api.database import tables - - -def generate_query_listexp_extended( - query: str = None, - only_active: bool = False, - owner: str = None, - exp_type: str = None, - autosubmit_version: str = None, - order_by: str = None, - order_desc: bool = False, -): - """ - Query listexp without accessing the view with status and total/completed jobs. - """ - - statement = ( - select( - tables.experiment_table, - tables.details_table, - tables.experiment_status_table.c.exp_id, - tables.experiment_status_table.c.status, - ) - .join( - tables.details_table, - tables.experiment_table.c.id == tables.details_table.c.exp_id, - isouter=True, - ) - .join( - tables.experiment_status_table, - tables.experiment_table.c.id == tables.experiment_status_table.c.exp_id, - isouter=True, - ) - ) - - # Build filters - filter_stmts = [] - - if query: - filter_stmts.append( - or_( - tables.experiment_table.c.name.like(f"{query}%"), - tables.experiment_table.c.description.like(f"%{query}%"), - tables.details_table.c.user.like(f"%{query}%"), - ) - ) - - if only_active: - filter_stmts.append(tables.experiment_status_table.c.status == "RUNNING") - - if owner: - filter_stmts.append(tables.details_table.c.user == owner) - - if exp_type == "test": - filter_stmts.append(tables.experiment_table.c.name.like("t%")) - elif exp_type == "operational": - filter_stmts.append(tables.experiment_table.c.name.like("o%")) - elif exp_type == "experiment": - filter_stmts.append(tables.experiment_table.c.name.not_like("t%")) - filter_stmts.append(tables.experiment_table.c.name.not_like("o%")) - - if autosubmit_version: - filter_stmts.append( - tables.experiment_table.c.autosubmit_version == autosubmit_version - ) - - statement = statement.where(*filter_stmts) - - # Order by - ORDER_OPTIONS = { - "expid": tables.experiment_table.c.name, - "created": tables.details_table.c.created, - "description": tables.experiment_table.c.description, - } - order_col: Optional[Column[Any]] = None - if order_by: - order_col = ORDER_OPTIONS.get(order_by, None) - - if isinstance(order_col, Column): - if order_desc: - order_col = order_col.desc() - statement = statement.order_by(order_col) - - return statement diff --git a/autosubmit_api/database/table_manager.py b/autosubmit_api/database/table_manager.py new file mode 100644 index 0000000..eb0709d --- /dev/null +++ b/autosubmit_api/database/table_manager.py @@ -0,0 +1,122 @@ +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional, Type, Union +from sqlalchemy import Connection, Engine, Table, delete, insert, select +from autosubmitconfigparser.config.basicconfig import BasicConfig +from sqlalchemy.schema import CreateTable, CreateSchema, DropTable +from sqlalchemy.orm import DeclarativeBase + +from autosubmit.database import tables, session + + +class DbTableManager(ABC): + engine: Engine + table: Table + + @abstractmethod + def __init__( + self, + table: Union[Type[DeclarativeBase], Table], + db_filepath: str = None, + schema: Optional[str] = None, + ) -> None: + """ + Class to manage a database table with common methods + :param table: SQLAlchemy Table + :param db_filepath: File path location in case of SQLite is used as database backend + :param schema: Almost always same as expid. Postgres database schema in case this is a distributed table. + """ + self.schema = schema + self.db_filepath = db_filepath + if isinstance(table, type) and issubclass(table, DeclarativeBase): + self.table = table.__table__ + else: + self.table = table + + def get_connection(self) -> Connection: + return self.engine.connect() + + def create_table(self, conn: Connection): + """ + Create table + """ + conn.execute(CreateTable(self.table, if_not_exists=True)) + conn.commit() + + def drop_table(self, conn: Connection): + """ + Drops the table + """ + conn.execute(DropTable(self.table, if_exists=True)) + conn.commit() + + def insert_many(self, conn: Connection, values: List[Dict[str, Any]]) -> int: + """ + Insert many values + """ + result = conn.execute(insert(self.table), values) + conn.commit() + return result.rowcount + + def select_all(self, conn: Connection): + rows = conn.execute(select(self.table)).all() + return rows + + def delete_all(self, conn: Connection) -> int: + """ + Deletes all the rows of the table + """ + result = conn.execute(delete(self.table)) + conn.commit() + return result.rowcount + + +class SQLiteDbTableManager(DbTableManager): + def __init__( + self, + table: Union[Type[DeclarativeBase], Table], + db_filepath: str = None, + schema: Optional[str] = None, + ) -> None: + super().__init__(table, db_filepath, schema) + self.engine = session.create_sqlite_engine(self.db_filepath) + + +class PostgresDbTableManager(DbTableManager): + def __init__( + self, + table: Union[Type[DeclarativeBase], Table], + db_filepath: str = None, + schema: Optional[str] = None, + ) -> None: + super().__init__(table, db_filepath, schema) + self.engine = session.Session().bind + if schema: + self.table = tables.table_change_schema(schema, table) + + def create_table(self, conn: Connection): + """ + Create table and the schema (if applicable) + """ + if self.schema: + conn.execute(CreateSchema(self.schema, if_not_exists=True)) + super().create_table(conn) + + +def create_db_table_manager( + table: Union[Type[DeclarativeBase], Table], + db_filepath: str = None, + schema: Optional[str] = None, +) -> DbTableManager: + """ + Creates a Postgres or SQLite DbTableManager depending on the Autosubmit configuration + :param table: SQLAlchemy Table + :param db_filepath: File path location in case of SQLite is used as database backend + :param schema: Almost always same as expid. Postgres database schema in case this is a distributed table. + """ + BasicConfig.read() + if BasicConfig.DATABASE_BACKEND == "postgres": + return PostgresDbTableManager(table, db_filepath, schema) + elif BasicConfig.DATABASE_BACKEND == "sqlite": + return SQLiteDbTableManager(table, db_filepath, schema) + else: + raise Exception("Invalid DATABASE_BACKEND") -- GitLab From 7ab1b908204a210164b02e2c248503a0e6bcf653 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Mon, 27 May 2024 17:17:05 +0200 Subject: [PATCH 17/26] history refactor --- .../autosubmit_legacy/job/job_list.py | 2 +- .../builders/experiment_history_builder.py | 30 +- autosubmit_api/database/adapters/__init__.py | 2 + .../database/adapters/experiment_run.py | 24 +- autosubmit_api/database/adapters/job_data.py | 89 ++- .../history/data_classes/experiment_run.py | 3 +- .../history/data_classes/job_data.py | 3 +- .../database_managers/database_manager.py | 150 ----- .../experiment_history_db_manager.py | 544 ++++-------------- autosubmit_api/history/experiment_history.py | 8 +- 10 files changed, 218 insertions(+), 637 deletions(-) delete mode 100644 autosubmit_api/history/database_managers/database_manager.py diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py index 40b3707..2ea1cbe 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_list.py +++ b/autosubmit_api/autosubmit_legacy/job/job_list.py @@ -592,7 +592,7 @@ class JobList: job_data = None try: experiment_history = ExperimentHistoryDirector(ExperimentHistoryBuilder(expid)).build_reader_experiment_history() - job_data = experiment_history.manager.get_all_last_job_data_dcs() if experiment_history.is_header_ready() else None + job_data = experiment_history.manager.get_all_last_job_data_dcs() except Exception: print(traceback.print_exc()) # Result variables diff --git a/autosubmit_api/builders/experiment_history_builder.py b/autosubmit_api/builders/experiment_history_builder.py index dd13a67..c2ac367 100644 --- a/autosubmit_api/builders/experiment_history_builder.py +++ b/autosubmit_api/builders/experiment_history_builder.py @@ -1,9 +1,9 @@ #!/usr/bin/python3.7 -from ..history.experiment_history import ExperimentHistory -from ..history.internal_logging import Logging -from ..config.basicConfig import APIBasicConfig -from ..history.database_managers.experiment_history_db_manager import ExperimentHistoryDbManager -from .basic_builder import BasicBuilder +from autosubmit_api.history.experiment_history import ExperimentHistory +from autosubmit_api.history.internal_logging import Logging +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.history.database_managers.experiment_history_db_manager import ExperimentHistoryDbManager +from autosubmit_api.builders.basic_builder import BasicBuilder from abc import ABCMeta, abstractmethod class Builder(BasicBuilder, metaclass=ABCMeta): @@ -39,13 +39,11 @@ class ExperimentHistoryBuilder(Builder): def generate_experiment_history_db_manager(self): # type: () -> None self._validate_basic_config() - self.experiment_history_db_manager = ExperimentHistoryDbManager(self.expid, self.basic_config) + self.experiment_history_db_manager = ExperimentHistoryDbManager(self.expid) def initialize_experiment_history_db_manager(self): # type: () -> None - if not self.experiment_history_db_manager: - raise Exception("Experiment Database Manager is missing") - self.experiment_history_db_manager.initialize() + return NotImplementedError def generate_logger(self): # type: () -> None @@ -58,7 +56,7 @@ class ExperimentHistoryBuilder(Builder): if not self.experiment_history_db_manager: raise Exception("Experiment Database Manager is missing") else: - if not self.experiment_history_db_manager.my_database_exists(): + if APIBasicConfig == "sqlite" and not self.experiment_history_db_manager.my_database_exists(): raise Exception("Job/Runs database does not exist") if not self.logger: raise Exception("Logging is missing.") @@ -69,18 +67,6 @@ class ExperimentHistoryDirector(object): # type: (Builder) -> None self.builder = builder - def build_current_experiment_history(self, basic_config=None): - # type: (APIBasicConfig) -> ExperimentHistory - """ Builds ExperimentHistory updated to current version. """ - if basic_config: - self.builder.set_basic_config(basic_config) - else: - self.builder.generate_basic_config() - self.builder.generate_experiment_history_db_manager() - self.builder.initialize_experiment_history_db_manager() - self.builder.generate_logger() - return self.builder.make_experiment_history() - def build_reader_experiment_history(self, basic_config=None): # type: (APIBasicConfig) -> ExperimentHistory """ Buids ExperimentHistory that doesn't update to current version automatically. """ diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py index a0454d9..dffe6ce 100644 --- a/autosubmit_api/database/adapters/__init__.py +++ b/autosubmit_api/database/adapters/__init__.py @@ -22,6 +22,7 @@ from autosubmit_api.database.adapters.job_packages import ( WrapperJobPackagesDbAdapter, ) from autosubmit_api.database.adapters.experiment_run import ExperimentRunDbAdapter +from autosubmit_api.database.adapters.job_data import JobDataDbAdapter __all__ = [ @@ -29,6 +30,7 @@ __all__ = [ "ExperimentDetailsDbAdapter", "ExperimentStatusDbAdapter", "ExperimentRunDbAdapter", + "JobDataDbAdapter", "ExpGraphDrawDBAdapter", "ExperimentJoinDbAdapter", "JobPackagesDbAdapter", diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py index 285fec8..01f7d3f 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional from autosubmit_api.database.table_manager import create_db_table_manager from sqlalchemy import select @@ -14,6 +14,16 @@ class ExperimentRunDbAdapter: db_filepath=ExperimentPaths(expid).job_data_db, schema=expid, ) + self.table = self.table_manager.table + + def get_all(self) -> List[Dict[str, Any]]: + """ + Gets all runs of the experiment + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute(select(self.table)).all() + + return [row._asdict() for row in rows] def get_last_run(self) -> Optional[Dict[str, Any]]: """ @@ -21,12 +31,12 @@ class ExperimentRunDbAdapter: """ with self.table_manager.get_connection() as conn: row = conn.execute( - select(tables.experiment_run_table) - .order_by(tables.experiment_run_table.c.run_id.desc()) + select(self.table) + .order_by(self.table.c.run_id.desc()) .limit(1) ).one_or_none() - return row._mapping if row else None + return row._asdict() if row else None def get_run_by_id(self, run_id: int) -> Optional[Dict[str, Any]]: """ @@ -34,9 +44,9 @@ class ExperimentRunDbAdapter: """ with self.table_manager.get_connection() as conn: row = conn.execute( - select(tables.experiment_run_table) - .where(tables.experiment_run_table.c.run_id == run_id) + select(self.table) + .where(self.table.c.run_id == run_id) ).one_or_none() - return row._mapping if row else None + return row._asdict() if row else None diff --git a/autosubmit_api/database/adapters/job_data.py b/autosubmit_api/database/adapters/job_data.py index f7183cb..f3f762d 100644 --- a/autosubmit_api/database/adapters/job_data.py +++ b/autosubmit_api/database/adapters/job_data.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List from autosubmit_api.database.table_manager import create_db_table_manager -from sqlalchemy import select +from sqlalchemy import or_, select from autosubmit_api.database import tables from autosubmit_api.persistance.experiment import ExperimentPaths @@ -14,6 +14,7 @@ class JobDataDbAdapter: db_filepath=ExperimentPaths(expid).job_data_db, schema=expid, ) + self.table = self.table_manager.table def get_last_job_data_by_run_id(self, run_id: int) -> List[Dict[str, Any]]: """ @@ -21,12 +22,88 @@ class JobDataDbAdapter: """ with self.table_manager.get_connection() as conn: row = conn.execute( - select(tables.job_data_table) + select(self.table) .where( - (tables.job_data_table.c.run_id == run_id), - (tables.job_data_table.c.rowtype == 2), + (self.table.c.run_id == run_id), + (self.table.c.rowtype == 2), ) - .order_by(tables.job_data_table.c.id.desc()) + .order_by(self.table.c.id.desc()) ).all() - return [row._mapping for row in row] + return [row._asdict() for row in row] + + def get_last_job_data(self) -> List[Dict[str, Any]]: + """ + Gets last job data + """ + with self.table_manager.get_connection() as conn: + row = conn.execute( + select(self.table).where( + (self.table.c.last == 1), + (self.table.c.rowtype >= 2), + ) + ).all() + return [row._asdict() for row in row] + + def get_jobs_by_name(self, job_name: str) -> List[Dict[str, Any]]: + """ + Gets job data by name + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute( + select(self.table) + .where(self.table.c.job_name == job_name) + .order_by(self.table.c.counter.desc()) + ).all() + + return [row._asdict() for row in rows] + + def get_all(self) -> List[Dict[str, Any]]: + """ + Gets all job data + """ + with self.table_manager.get_connection() as conn: + statement = ( + select(self.table) + .where(self.table.c.id > 0) + .order_by(self.table.c.id) + ) + rows = conn.execute(statement).all() + + return [row._asdict() for row in rows] + + def get_job_data_COMPLETED_by_rowtype_run_id(self, rowtype: int, run_id: int) -> List[Dict[str, Any]]: + """ + Gets job data by rowtype and run id + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute( + select(self.table) + .where( + (self.table.c.run_id == run_id), + (self.table.c.rowtype == rowtype), + (self.table.c.status == "COMPLETED"), + ) + .order_by(self.table.c.id) + ).all() + + return [row._asdict() for row in rows] + + def get_job_data_COMPLETD_by_section(self, section: str)-> List[Dict[str, Any]]: + """ + Gets job data by section + """ + with self.table_manager.get_connection() as conn: + rows = conn.execute( + select(self.table) + .where( + (self.table.c.status == "COMPLETED"), + or_( + (self.table.c.section == section), + (self.table.c.member == section) + ) + ) + .order_by(self.table.c.id) + ).all() + + return [row._asdict() for row in rows] \ No newline at end of file diff --git a/autosubmit_api/history/data_classes/experiment_run.py b/autosubmit_api/history/data_classes/experiment_run.py index ef3459f..d886056 100644 --- a/autosubmit_api/history/data_classes/experiment_run.py +++ b/autosubmit_api/history/data_classes/experiment_run.py @@ -137,10 +137,9 @@ class ExperimentRun(object): raise dbexception @classmethod - def from_model(cls, row): + def from_model(cls, row_dict: dict): """ Build ExperimentRun from ExperimentRunRow """ try: - row_dict = row._asdict() experiment_run = cls(0) experiment_run.run_id = row_dict.get('run_id', 0) experiment_run.created = get_current_datetime_if_none(row_dict.get('created', None)) diff --git a/autosubmit_api/history/data_classes/job_data.py b/autosubmit_api/history/data_classes/job_data.py index 739e0a5..6cb5a8e 100644 --- a/autosubmit_api/history/data_classes/job_data.py +++ b/autosubmit_api/history/data_classes/job_data.py @@ -78,9 +78,8 @@ class JobData(object): self.platform_output = platform_output # DB 17 @classmethod - def from_model(cls, row): + def from_model(cls, row_dict: dict): """ Build JobData from JobDataRow. """ - row_dict = row._asdict() job_data = cls(row_dict['id'], row_dict['counter'], row_dict['job_name'], diff --git a/autosubmit_api/history/database_managers/database_manager.py b/autosubmit_api/history/database_managers/database_manager.py deleted file mode 100644 index c7cb2d9..0000000 --- a/autosubmit_api/history/database_managers/database_manager.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015-2020 Earth Sciences Department, BSC-CNS -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . - -import sqlite3 -import os -from autosubmit_api.history import utils as HUtils -from autosubmit_api.history.database_managers import database_models as Models -from autosubmit_api.config.basicConfig import APIBasicConfig -from abc import ABCMeta - -DEFAULT_JOBDATA_DIR = os.path.join('/esarchive', 'autosubmit', 'as_metadata', 'data') -DEFAULT_HISTORICAL_LOGS_DIR = os.path.join('/esarchive', 'autosubmit', 'as_metadata', 'logs') -DEFAULT_LOCAL_ROOT_DIR = os.path.join('/esarchive', 'autosubmit') - -class DatabaseManager(metaclass=ABCMeta): - """ Simple database manager. Needs expid. """ - AS_TIMES_DB_NAME = "as_times.db" # default AS_TIMES location - ECEARTH_DB_NAME = "ecearth.db" # default EC_EARTH_DB_NAME location - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None - self.expid = expid - self.JOBDATA_DIR = basic_config.JOBDATA_DIR - self.LOCAL_ROOT_DIR = basic_config.LOCAL_ROOT_DIR - self.db_version = Models.DatabaseVersion.NO_DATABASE.value - - def get_connection(self, path): - # type : (str) -> Sqlite3Connection - """ - Create a database connection to the SQLite database specified by path. - :param path: database file name - :return: Connection object or None - """ - if not os.path.exists(path): - self._create_database_file(path) - return sqlite3.connect(path) - - def _create_database_file(self, path): - # type : (str) -> None - """ creates a database files with full permissions """ - os.umask(0) - os.open(path, os.O_WRONLY | os.O_CREAT, 0o776) - - def execute_statement_on_dbfile(self, path, statement): - # type : (str, str) -> None - """ Executes a statement on a database file specified by path. """ - conn = self.get_connection(path) - cursor = conn.cursor() - cursor.execute(statement) - conn.commit() - conn.close() - - def execute_statement_with_arguments_on_dbfile(self, path, statement, arguments): - # type : (str, str, Tuple) -> None - """ Executes an statement with arguments on a database file specified by path. """ - conn = self.get_connection(path) - cursor = conn.cursor() - cursor.execute(statement, arguments) - conn.commit() - conn.close() - - def execute_many_statement_with_arguments_on_dbfile(self, path, statement, arguments_list): - # type : (str, str, List[Tuple]) -> None - """ Executes many statements from a list of arguments specified by a path. """ - conn = self.get_connection(path) - cursor = conn.cursor() - cursor.executemany(statement, arguments_list) - conn.commit() - conn.close() - - def execute_many_statements_on_dbfile(self, path, statements): - # type : (str, List[str]) -> None - """ - Updates the table schema using a **small** list of statements. No Exception raised. - Should be used to execute a list of schema updates that might have been already applied. - """ - for statement in statements: - try: - self.execute_statement_on_dbfile(path, statement) - except Exception as exp: - pass - - def get_from_statement(self, path, statement): - # type : (str, str) -> List[Tuple] - """ Get the rows from a statement with no arguments """ - conn = self.get_connection(path) - conn.text_factory = str - cursor = conn.cursor() - cursor.execute(statement) - statement_rows = cursor.fetchall() - conn.close() - return statement_rows - - def get_from_statement_with_arguments(self, path, statement, arguments): - # type : (str, str, Tuple) -> List[Tuple] - """ Get the rows from a statement with arguments """ - conn = self.get_connection(path) - conn.text_factory = str - cursor = conn.cursor() - cursor.execute(statement, arguments) - statement_rows = cursor.fetchall() - conn.close() - return statement_rows - - def insert_statement(self, path, statement): - # type : (str, str) -> int - """ Insert statement into path """ - conn = self.get_connection(path) - conn.text_factory = str - cursor = conn.cursor() - cursor.execute(statement) - lastrow_id = cursor.lastrowid - conn.commit() - conn.close() - return lastrow_id - - def insert_statement_with_arguments(self, path, statement, arguments): - # type : (str, str, Tuple) -> int - """ Insert statement with arguments into path """ - conn = self.get_connection(path) - conn.text_factory = str - cursor = conn.cursor() - cursor.execute(statement, arguments) - lastrow_id = cursor.lastrowid - conn.commit() - conn.close() - return lastrow_id - - def get_built_select_statement(self, table_name, conditions=None): - # type : (str, namedtuple, str) -> str - """ Build and return a SELECT statement with the same fields as the model. Requires that the table is associated with a model (namedtuple). """ - model = Models.get_correct_model_for_table_and_version(table_name, self.db_version) # Models.table_name_to_model[table_name] - if conditions: - return "SELECT {0} FROM {1} WHERE {2}".format(HUtils.get_fields_as_comma_str(model), table_name, conditions) - else: - return "SELECT {0} FROM {1}".format(HUtils.get_fields_as_comma_str(model), table_name) diff --git a/autosubmit_api/history/database_managers/experiment_history_db_manager.py b/autosubmit_api/history/database_managers/experiment_history_db_manager.py index 1f35a92..db3616a 100644 --- a/autosubmit_api/history/database_managers/experiment_history_db_manager.py +++ b/autosubmit_api/history/database_managers/experiment_history_db_manager.py @@ -16,450 +16,114 @@ # You should have received a copy of the GNU General Public License # along with Autosubmit. If not, see . import os -import textwrap +from typing import Any, Dict, List, Optional +from autosubmit_api.database.adapters import ExperimentRunDbAdapter, JobDataDbAdapter from autosubmit_api.persistance.experiment import ExperimentPaths -from autosubmit_api.history import utils as HUtils from autosubmit_api.history.database_managers import database_models as Models from autosubmit_api.history.data_classes.job_data import JobData from autosubmit_api.history.data_classes.experiment_run import ExperimentRun -from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit_api.history.database_managers.database_manager import DatabaseManager -from typing import List -from collections import namedtuple -DEFAULT_MAX_COUNTER = 0 -class ExperimentHistoryDbManager(DatabaseManager): - """ Manages actions directly on the database. - """ - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None - """ Requires expid and jobdata_dir_path. """ - super(ExperimentHistoryDbManager, self).__init__(expid, basic_config) - self._set_schema_changes() - self._set_table_queries() - exp_paths = ExperimentPaths(expid) - self.historicaldb_file_path = exp_paths.job_data_db - if self.my_database_exists(): - self.set_db_version_models() - - def initialize(self): - """ Check if database exists. Updates to current version if necessary. """ - if self.my_database_exists(): - if not self.is_current_version(): - self.update_historical_database() - else: - self.create_historical_database() - self.set_db_version_models() - - def set_db_version_models(self): - self.db_version = self._get_pragma_version() - self.experiment_run_row_model = Models.get_experiment_row_model(self.db_version) - self.job_data_row_model = Models.get_job_data_row_model(self.db_version) - - def my_database_exists(self): - return os.path.exists(self.historicaldb_file_path) - - def is_header_ready_db_version(self): - if self.my_database_exists(): - return self._get_pragma_version() >= Models.DatabaseVersion.EXPERIMENT_HEADER_SCHEMA_CHANGES.value - return False - - def is_current_version(self): - if self.my_database_exists(): - return self._get_pragma_version() == Models.DatabaseVersion.CURRENT_DB_VERSION.value - return False - - def _set_table_queries(self): - """ Sets basic table queries. """ - self.create_table_header_query = textwrap.dedent( - '''CREATE TABLE - IF NOT EXISTS experiment_run ( - run_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - created TEXT NOT NULL, - modified TEXT NOT NULL, - start INTEGER NOT NULL, - finish INTEGER, - chunk_unit TEXT NOT NULL, - chunk_size INTEGER NOT NULL, - completed INTEGER NOT NULL, - total INTEGER NOT NULL, - failed INTEGER NOT NULL, - queuing INTEGER NOT NULL, - running INTEGER NOT NULL, - submitted INTEGER NOT NULL, - suspended INTEGER NOT NULL DEFAULT 0, - metadata TEXT - ); - ''') - self.create_table_query = textwrap.dedent( - '''CREATE TABLE - IF NOT EXISTS job_data ( - id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - counter INTEGER NOT NULL, - job_name TEXT NOT NULL, - created TEXT NOT NULL, - modified TEXT NOT NULL, - submit INTEGER NOT NULL, - start INTEGER NOT NULL, - finish INTEGER NOT NULL, - status TEXT NOT NULL, - rowtype INTEGER NOT NULL, - ncpus INTEGER NOT NULL, - wallclock TEXT NOT NULL, - qos TEXT NOT NULL, - energy INTEGER NOT NULL, - date TEXT NOT NULL, - section TEXT NOT NULL, - member TEXT NOT NULL, - chunk INTEGER NOT NULL, - last INTEGER NOT NULL, - platform TEXT NOT NULL, - job_id INTEGER NOT NULL, - extra_data TEXT NOT NULL, - nnodes INTEGER NOT NULL DEFAULT 0, - run_id INTEGER, - MaxRSS REAL NOT NULL DEFAULT 0.0, - AveRSS REAL NOT NULL DEFAULT 0.0, - out TEXT NOT NULL, - err TEXT NOT NULL, - rowstatus INTEGER NOT NULL DEFAULT 0, - children TEXT, - platform_output TEXT, - UNIQUE(counter,job_name) - ); - ''') - self.create_index_query = textwrap.dedent(''' - CREATE INDEX IF NOT EXISTS ID_JOB_NAME ON job_data(job_name); - ''') - - def _set_schema_changes(self): - # type : () -> None - """ Creates the list of schema changes""" - self.version_schema_changes = [ - "ALTER TABLE job_data ADD COLUMN nnodes INTEGER NOT NULL DEFAULT 0", - "ALTER TABLE job_data ADD COLUMN run_id INTEGER" - ] - # Version 15 - self.version_schema_changes.extend([ - "ALTER TABLE job_data ADD COLUMN MaxRSS REAL NOT NULL DEFAULT 0.0", - "ALTER TABLE job_data ADD COLUMN AveRSS REAL NOT NULL DEFAULT 0.0", - "ALTER TABLE job_data ADD COLUMN out TEXT NOT NULL DEFAULT ''", - "ALTER TABLE job_data ADD COLUMN err TEXT NOT NULL DEFAULT ''", - "ALTER TABLE job_data ADD COLUMN rowstatus INTEGER NOT NULL DEFAULT 0", - "ALTER TABLE experiment_run ADD COLUMN suspended INTEGER NOT NULL DEFAULT 0", - "ALTER TABLE experiment_run ADD COLUMN metadata TEXT" - ]) - # Version 16 - self.version_schema_changes.extend([ - "ALTER TABLE experiment_run ADD COLUMN modified TEXT" - ]) - # Version 17 - self.version_schema_changes.extend([ - "ALTER TABLE job_data ADD COLUMN children TEXT", - "ALTER TABLE job_data ADD COLUMN platform_output TEXT" - ]) - - def create_historical_database(self): - """ Creates the historical database with the latest changes. """ - self.execute_statement_on_dbfile(self.historicaldb_file_path, self.create_table_header_query) - self.execute_statement_on_dbfile(self.historicaldb_file_path, self.create_table_query) - self.execute_statement_on_dbfile(self.historicaldb_file_path, self.create_index_query) - self._set_historical_pragma_version(Models.DatabaseVersion.CURRENT_DB_VERSION.value) - - def update_historical_database(self): - """ Updates the historical database with the latest changes IF necessary. """ - self.execute_many_statements_on_dbfile(self.historicaldb_file_path, self.version_schema_changes) - self.execute_statement_on_dbfile(self.historicaldb_file_path, self.create_index_query) - self.execute_statement_on_dbfile(self.historicaldb_file_path, self.create_table_header_query) - self._set_historical_pragma_version(Models.DatabaseVersion.CURRENT_DB_VERSION.value) - - def get_experiment_run_dc_with_max_id(self): - """ Get Current (latest) ExperimentRun data class. """ - return ExperimentRun.from_model(self._get_experiment_run_with_max_id()) - - def register_experiment_run_dc(self, experiment_run_dc): - self._insert_experiment_run(experiment_run_dc) - return ExperimentRun.from_model(self._get_experiment_run_with_max_id()) - - def update_experiment_run_dc_by_id(self, experiment_run_dc): - """ Requires ExperimentRun data class. """ - self._update_experiment_run(experiment_run_dc) - return ExperimentRun.from_model(self._get_experiment_run_with_max_id()) - - def _get_experiment_run_with_max_id(self): - """ Get Models.ExperimentRunRow for the maximum id run. """ - statement = self.get_built_select_statement("experiment_run", "run_id > 0 ORDER BY run_id DESC LIMIT 0, 1") - max_experiment_run = self.get_from_statement(self.historicaldb_file_path, statement) - if len(max_experiment_run) == 0: - raise Exception("No Experiment Runs registered.") - return self.experiment_run_row_model(*max_experiment_run[0]) - - def get_experiment_run_by_id(self, run_id): - # type: (int) -> ExperimentRun | None - if run_id: - return ExperimentRun.from_model(self._get_experiment_run_by_id(run_id)) - return None - - def _get_experiment_run_by_id(self, run_id): - # type: (int) -> namedtuple - statement = self.get_built_select_statement("experiment_run", "run_id=?") - arguments = (run_id,) - experiment_run = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - if len(experiment_run) == 0: - raise Exception("Experiment run {0} for experiment {1} does not exists.".format(run_id, self.expid)) - return self.experiment_run_row_model(*experiment_run[0]) - - def get_experiment_runs_dcs(self): - # type: () -> List[ExperimentRun] - experiment_run_rows = self._get_experiment_runs() - return [ExperimentRun.from_model(row) for row in experiment_run_rows] - - def _get_experiment_runs(self): - # type: () -> List[namedtuple] - statement = self.get_built_select_statement("experiment_run") - experiment_runs = self.get_from_statement(self.historicaldb_file_path, statement) - return [self.experiment_run_row_model(*row) for row in experiment_runs] - - def is_there_a_last_experiment_run(self): - statement = self.get_built_select_statement("experiment_run", "run_id > 0 ORDER BY run_id DESC LIMIT 0, 1") - max_experiment_run = self.get_from_statement(self.historicaldb_file_path, statement) - if len(max_experiment_run) > 0: - return True - return False - - def get_job_data_dcs_all(self): - # type: () -> List[JobData] - """ Gets all content from job_data ordered by id (from table). """ - return [JobData.from_model(row) for row in self.get_job_data_all()] - - def get_job_data_all(self): - """ Gets all content from job_data as list of Models.JobDataRow from database. """ - statement = self.get_built_select_statement("job_data", "id > 0 ORDER BY id") - job_data_rows = self.get_from_statement(self.historicaldb_file_path, statement) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def register_submitted_job_data_dc(self, job_data_dc): - """ Sets previous register to last=0 and inserts the new job_data_dc data class.""" - self._set_current_job_data_rows_last_to_zero_by_job_name(job_data_dc.job_name) - self._insert_job_data(job_data_dc) - return self.get_job_data_dc_unique_latest_by_job_name(job_data_dc.job_name) - - def _set_current_job_data_rows_last_to_zero_by_job_name(self, job_name): - """ Sets the column last = 0 for all job_rows by job_name and last = 1. """ - job_data_row_last = self._get_job_data_last_by_name(job_name) - job_data_dc_list = [JobData.from_model(row) for row in job_data_row_last] - for job_data_dc in job_data_dc_list: - job_data_dc.last = 0 - self._update_job_data_by_id(job_data_dc) - - def update_job_data_dc_by_id(self, job_data_dc): - """ Update JobData data class. Returns latest last=1 row from job_data by job_name. """ - self._update_job_data_by_id(job_data_dc) - return self.get_job_data_dc_unique_latest_by_job_name(job_data_dc.job_name) - - def update_list_job_data_dc_by_each_id(self, job_data_dcs): - """ Return length of updated list. """ - for job_data_dc in job_data_dcs: - self._update_job_data_by_id(job_data_dc) - return len(job_data_dcs) - - def get_job_data_dc_unique_latest_by_job_name(self, job_name): - """ Returns JobData data class for the latest job_data_row with last=1 by job_name. """ - job_data_row_last = self._get_job_data_last_by_name(job_name) - if len(job_data_row_last) > 0: - return JobData.from_model(job_data_row_last[0]) - return None - - def _get_job_data_last_by_name(self, job_name): - """ Get List of Models.JobDataRow for job_name and last=1 """ - statement = self.get_built_select_statement("job_data", "last=1 and job_name=? ORDER BY counter DESC") - arguments = (job_name,) - job_data_rows_last = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows_last] - - def get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code, run_id): - # type: (int, int) -> List[JobData] - if not run_id or package_code <= Models.RowType.NORMAL: - return [] - job_data_rows = self._get_job_data_dc_COMPLETED_by_wrapper_run_id(package_code, run_id) - if len(job_data_rows) == 0: - return [] - return [JobData.from_model(row) for row in job_data_rows] - - def _get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code, run_id): - # type: (int, int) -> List[namedtuple] - statement = self.get_built_select_statement("job_data", "run_id=? and rowtype=? and status=? ORDER BY id") - arguments = (run_id, package_code, "COMPLETED") - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def get_job_data_dcs_last_by_run_id(self, run_id): - job_data_rows = self._get_job_data_last_by_run_id(run_id) - return [JobData.from_model(row) for row in job_data_rows] - - def _get_job_data_last_by_run_id(self, run_id): - """ Get List of Models.JobDataRow for last=1 and run_id """ - statement = self.get_built_select_statement("job_data", "run_id=? and last=1 and rowtype >= 2 ORDER BY id") - arguments = (run_id,) - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def get_job_data_dcs_last_by_wrapper_code(self, wrapper_code): - if wrapper_code and wrapper_code > 2: - return [JobData.from_model(row) for row in self._get_job_data_last_by_wrapper_code(wrapper_code)] - else: - return [] - - def get_job_data_dcs_COMPLETED_by_section(self, section): - # type: (str) -> List[JobData] - arguments = {"status": "COMPLETED", "section": section} - job_data_rows = self._get_job_data_COMPLETD_by_section(section) - return [JobData.from_model(row) for row in job_data_rows] - - def _get_job_data_last_by_wrapper_code(self, wrapper_code): - """ Get List of Models.JobDataRow for last=1 and rowtype=wrapper_code """ - statement = self.get_built_select_statement("job_data", "rowtype = ? and last=1 ORDER BY id") - arguments = (wrapper_code,) - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def _get_job_data_COMPLETD_by_section(self, section): - statement = self.get_built_select_statement("job_data", "status=? and (section=? or member=?) ORDER BY id") - arguments = ("COMPLETED", section, section) - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - - def get_all_last_job_data_dcs(self): - """ Gets JobData data classes in job_data for last=1. """ - job_data_rows = self._get_all_last_job_data_rows() - return [JobData.from_model(row) for row in job_data_rows] - - def _get_all_last_job_data_rows(self): - """ Get List of Models.JobDataRow for last=1. """ - statement = self.get_built_select_statement("job_data", "last=1 and rowtype >= 2") - job_data_rows = self.get_from_statement(self.historicaldb_file_path, statement) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def get_job_data_dcs_by_name(self, job_name): - # type: (str) -> List[JobData] - job_data_rows = self._get_job_data_by_name(job_name) - return [JobData.from_model(row) for row in job_data_rows] - - def _get_job_data_by_name(self, job_name): - # type: (str) -> List[namedtuple] - """ Get List of Models.JobDataRow for job_name """ - statement = self.get_built_select_statement("job_data", "job_name=? ORDER BY counter DESC") - arguments = (job_name,) - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def _insert_job_data(self, job_data): - # type : (JobData) -> int - """ Insert data class JobData into job_data table. """ - statement = ''' INSERT INTO job_data(counter, job_name, created, modified, - submit, start, finish, status, rowtype, ncpus, - wallclock, qos, energy, date, section, member, chunk, last, - platform, job_id, extra_data, nnodes, run_id, MaxRSS, AveRSS, - out, err, rowstatus, children, platform_output) - VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) ''' - arguments = (job_data.counter, job_data.job_name, HUtils.get_current_datetime(), HUtils.get_current_datetime(), - job_data.submit, job_data.start, job_data.finish, job_data.status, job_data.rowtype, job_data.ncpus, - job_data.wallclock, job_data.qos, job_data.energy, job_data.date, job_data.section, job_data.member, job_data.chunk, job_data.last, - job_data.platform, job_data.job_id, job_data.extra_data, job_data.nnodes, job_data.run_id, job_data.MaxRSS, job_data.AveRSS, - job_data.out, job_data.err, job_data.rowstatus, job_data.children, job_data.platform_output) - return self.insert_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - - def _insert_experiment_run(self, experiment_run): - """ Insert data class ExperimentRun into database """ - statement = ''' INSERT INTO experiment_run(created, modified, start, finish, - chunk_unit, chunk_size, completed, total, - failed, queuing, running, - submitted, suspended, metadata) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?) ''' - arguments = (HUtils.get_current_datetime(), HUtils.get_current_datetime(), experiment_run.start, experiment_run.finish, - experiment_run.chunk_unit, experiment_run.chunk_size, experiment_run.completed, experiment_run.total, - experiment_run.failed, experiment_run.queuing, experiment_run.running, - experiment_run.submitted, experiment_run.suspended, experiment_run.metadata) - return self.insert_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - - def update_many_job_data_change_status(self, changes): - # type : (List[Tuple]) -> None - """ - Update many job_data rows in bulk. Requires a changes list of argument tuples. - Only updates finish, modified, status, and rowstatus by id. - """ - statement = ''' UPDATE job_data SET modified=?, status=?, rowstatus=? WHERE id=? ''' - self.execute_many_statement_with_arguments_on_dbfile(self.historicaldb_file_path, statement, changes) - - def _update_job_data_by_id(self, job_data_dc): - """ - Update job_data table with data class JobData. - Update last, submit, start, finish, modified, job_id, status, energy, extra_data, nnodes, ncpus, rowstatus, out, err by id. - """ - statement = ''' UPDATE job_data SET last=?, submit=?, start=?, finish=?, modified=?, - job_id=?, status=?, energy=?, extra_data=?, - nnodes=?, ncpus=?, rowstatus=?, out=?, err=?, - children=?, platform_output=?, rowtype=? WHERE id=? ''' - arguments = (job_data_dc.last, job_data_dc.submit, job_data_dc.start, job_data_dc.finish, HUtils.get_current_datetime(), - job_data_dc.job_id, job_data_dc.status, job_data_dc.energy, job_data_dc.extra_data, - job_data_dc.nnodes, job_data_dc.ncpus, job_data_dc.rowstatus, job_data_dc.out, job_data_dc.err, - job_data_dc.children, job_data_dc.platform_output, job_data_dc.rowtype, job_data_dc._id) - self.execute_statement_with_arguments_on_dbfile(self.historicaldb_file_path, statement, arguments) - - def _update_experiment_run(self, experiment_run_dc): - """ - Update experiment_run table with data class ExperimentRun. - Updates by run_id (finish, chunk_unit, chunk_size, completed, total, failed, queuing, running, submitted, suspended) - """ - statement = ''' UPDATE experiment_run SET finish=?, chunk_unit=?, chunk_size=?, completed=?, total=?, - failed=?, queuing=?, running=?, submitted=?, - suspended=?, modified=? WHERE run_id=? ''' - arguments = (experiment_run_dc.finish, experiment_run_dc.chunk_unit, experiment_run_dc.chunk_size, experiment_run_dc.completed, experiment_run_dc.total, - experiment_run_dc.failed, experiment_run_dc.queuing, experiment_run_dc.running, experiment_run_dc.submitted, - experiment_run_dc.suspended, HUtils.get_current_datetime(), experiment_run_dc.run_id) - self.execute_statement_with_arguments_on_dbfile(self.historicaldb_file_path, statement, arguments) - - def _get_job_data_last_by_run_id_and_finished(self, run_id): - """ Get List of Models.JobDataRow for last=1, finished > 0 and run_id """ - statement = self.get_built_select_statement("job_data", "run_id=? and last=1 and finish > 0 and rowtype >= 2 ORDER BY id") - arguments = (run_id,) - job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - return [self.job_data_row_model(*row) for row in job_data_rows] - - def get_job_data_max_counter(self): - """ The max counter is the maximum count value for the count column in job_data. """ - statement = "SELECT MAX(counter) as maxcounter FROM job_data" - counter_result = self.get_from_statement(self.historicaldb_file_path, statement) - if len(counter_result) <= 0: - return DEFAULT_MAX_COUNTER - else: - max_counter = Models.MaxCounterRow(*counter_result[0]).maxcounter - return max_counter if max_counter else DEFAULT_MAX_COUNTER - - def delete_job_data(self, id): - """ Deletes row from job_data by id. Useful for testing. """ - statement = ''' DELETE FROM job_data WHERE id=? ''' - arguments = (id, ) - self.execute_statement_with_arguments_on_dbfile(self.historicaldb_file_path, statement, arguments) - - def delete_experiment_run(self, run_id): - """ Deletes row in experiment_run by run_id. Useful for testing. """ - statement = ''' DELETE FROM experiment_run where run_id=? ''' - arguments = (run_id,) - self.execute_statement_with_arguments_on_dbfile(self.historicaldb_file_path, statement, arguments) - - def _set_historical_pragma_version(self, version=10): - """ Sets the pragma version. """ - statement = "pragma user_version={v:d};".format(v=version) - self.execute_statement_on_dbfile(self.historicaldb_file_path, statement) - - def _get_pragma_version(self): - # type: () -> int - """ Gets current pragma version as int. """ - statement = "pragma user_version;" - pragma_result = self.get_from_statement(self.historicaldb_file_path, statement) - if len(pragma_result) <= 0: - raise Exception("Error while getting the pragma version. This might be a signal of a deeper problem. Review previous errors.") - return int(Models.PragmaVersion(*pragma_result[0]).version) +class ExperimentHistoryDbManager: + """Manages history DDBB actions directly on the database.""" + + def __init__(self, expid: str): + """Requires expid""" + self.expid = expid + self.run_db = ExperimentRunDbAdapter(expid) + self.job_data_db = JobDataDbAdapter(expid) + self.historicaldb_file_path = ExperimentPaths(expid).job_data_db + + def my_database_exists(self) -> bool: + return os.path.exists(self.historicaldb_file_path) + + def get_experiment_run_dc_with_max_id(self) -> ExperimentRun: + """Get Current (latest) ExperimentRun data class.""" + return ExperimentRun.from_model(self._get_experiment_run_with_max_id()) + + def _get_experiment_run_with_max_id(self) -> Dict[str, Any]: + """Get Models.ExperimentRunRow for the maximum id run.""" + max_experiment_run = self.run_db.get_last_run() + if not max_experiment_run: + raise Exception("No Experiment Runs registered.") + return max_experiment_run + + def get_experiment_run_by_id(self, run_id: int) -> Optional[ExperimentRun]: + if run_id: + return ExperimentRun.from_model(self._get_experiment_run_by_id(run_id)) + return None + + def _get_experiment_run_by_id(self, run_id: int) -> Dict[str, Any]: + experiment_run = self.run_db.get_run_by_id(run_id) + if not experiment_run: + raise Exception( + "Experiment run {0} for experiment {1} does not exists.".format( + run_id, self.expid + ) + ) + return experiment_run + + def get_experiment_runs_dcs(self) -> List[ExperimentRun]: + experiment_run_rows = self._get_experiment_runs() + return [ExperimentRun.from_model(row) for row in experiment_run_rows] + + def _get_experiment_runs(self): + experiment_runs = self.run_db.get_all() + return experiment_runs + + def get_job_data_dcs_all(self) -> List[JobData]: + """Gets all content from job_data ordered by id (from table).""" + return [JobData.from_model(row) for row in self.get_job_data_all()] + + def get_job_data_all(self): + """Gets all content from job_data as list of Models.JobDataRow from database.""" + job_data_rows = self.job_data_db.get_all() + return job_data_rows + + def get_job_data_dc_COMPLETED_by_wrapper_run_id( + self, package_code: int, run_id: int + ) -> List[JobData]: + if not run_id or package_code <= Models.RowType.NORMAL: + return [] + job_data_rows = self._get_job_data_dc_COMPLETED_by_wrapper_run_id( + package_code, run_id + ) + if len(job_data_rows) == 0: + return [] + return [JobData.from_model(row) for row in job_data_rows] + + def _get_job_data_dc_COMPLETED_by_wrapper_run_id( + self, package_code: int, run_id: int + ) -> List[Dict[str, Any]]: + job_data_rows = self.job_data_db.get_job_data_COMPLETED_by_rowtype_run_id( + rowtype=package_code, run_id=run_id + ) + return job_data_rows + + def get_job_data_dcs_COMPLETED_by_section(self, section: str) -> List[JobData]: + job_data_rows = self._get_job_data_COMPLETD_by_section(section) + return [JobData.from_model(row) for row in job_data_rows] + + def _get_job_data_COMPLETD_by_section(self, section: str) -> List[Dict[str, Any]]: + job_data_rows = self.job_data_db.get_job_data_COMPLETD_by_section(section) + return job_data_rows + + def get_all_last_job_data_dcs(self): + """Gets JobData data classes in job_data for last=1.""" + job_data_rows = self._get_all_last_job_data_rows() + return [JobData.from_model(row) for row in job_data_rows] + + def _get_all_last_job_data_rows(self): + """Get List of Models.JobDataRow for last=1.""" + job_data_rows = self.job_data_db.get_last_job_data() + return job_data_rows + + def get_job_data_dcs_by_name(self, job_name: str) -> List[JobData]: + job_data_rows = self._get_job_data_by_name(job_name) + return [JobData.from_model(row) for row in job_data_rows] + + def _get_job_data_by_name(self, job_name: str) -> List[Dict[str, Any]]: + """Get List of Models.JobDataRow for job_name""" + job_data_rows = self.job_data_db.get_jobs_by_name(job_name) + return job_data_rows diff --git a/autosubmit_api/history/experiment_history.py b/autosubmit_api/history/experiment_history.py index f9ae423..a392571 100644 --- a/autosubmit_api/history/experiment_history.py +++ b/autosubmit_api/history/experiment_history.py @@ -28,8 +28,7 @@ from typing import List, Dict, Tuple, Any SECONDS_WAIT_PLATFORM = 60 class ExperimentHistory(): - def __init__(self, expid, basic_config, experiment_history_db_manager, logger): - # type: (str, APIBasicConfig, ExperimentHistoryDbManager, Logging) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig, experiment_history_db_manager: ExperimentHistoryDbManager, logger: Logging): self.expid = expid self._log = logger self.basic_config = basic_config @@ -41,11 +40,6 @@ class ExperimentHistory(): self._log.log(str(exp), traceback.format_exc()) self.manager = None - def is_header_ready(self): - if self.manager: - return self.manager.is_header_ready_db_version() - return False - def get_historic_job_data(self, job_name): # type: (str) -> List[Dict[str, Any]] result = [] -- GitLab From ef37327a77aa949bfd852781e3ca489967ca77be Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 28 May 2024 09:51:03 +0200 Subject: [PATCH 18/26] replace _mapping to _asdict() --- autosubmit_api/bgtasks/tasks/status_updater.py | 2 +- autosubmit_api/database/adapters/experiment.py | 2 +- .../database/adapters/experiment_details.py | 2 +- autosubmit_api/database/adapters/graph_draw.py | 2 +- .../database/adapters/job_packages.py | 4 ++-- autosubmit_api/experiment/common_requests.py | 4 ++-- autosubmit_api/views/v4.py | 2 +- .../workers/populate_details/populate.py | 2 +- tests/conftest.py | 10 +++++----- tests/utils.py | 18 +++++++++--------- 10 files changed, 24 insertions(+), 24 deletions(-) diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index 0177dd1..d49eb6b 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -37,7 +37,7 @@ class StatusUpdater(BackgroundTaskTemplate): Get the experiments list """ query_result = ExperimentDbAdapter().get_all() - return [ExperimentModel.model_validate(row._mapping) for row in query_result] + return [ExperimentModel.model_validate(row._asdict()) for row in query_result] @classmethod def _check_exp_running(cls, expid: str) -> bool: diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index 26833b1..d5c0cbe 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -41,4 +41,4 @@ class ExperimentDbAdapter: tables.ExperimentTable.name == expid ) ).one() - return row._mapping + return row._asdict() diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/adapters/experiment_details.py index b402a2a..ef026f4 100644 --- a/autosubmit_api/database/adapters/experiment_details.py +++ b/autosubmit_api/database/adapters/experiment_details.py @@ -45,4 +45,4 @@ class ExperimentDetailsDbAdapter: tables.DetailsTable.exp_id == exp_id ) ).one() - return row._mapping + return row._asdict() diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/adapters/graph_draw.py index 6482a5b..691b7fb 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/adapters/graph_draw.py @@ -23,7 +23,7 @@ class ExpGraphDrawDBAdapter: def get_all(self) -> List[Dict[str, Any]]: with self.table_manager.get_connection() as conn: result = self.table_manager.select_all(conn) - return [x._mapping for x in result] + return [x._asdict() for x in result] def delete_all(self) -> int: with self.table_manager.get_connection() as conn: diff --git a/autosubmit_api/database/adapters/job_packages.py b/autosubmit_api/database/adapters/job_packages.py index 8962902..46c4d6b 100644 --- a/autosubmit_api/database/adapters/job_packages.py +++ b/autosubmit_api/database/adapters/job_packages.py @@ -21,7 +21,7 @@ class JobPackagesDbAdapter: """ with self.table_manager.get_connection() as conn: rows = conn.execute(select(self.table_manager.table)).all() - return [row._mapping for row in rows] + return [row._asdict() for row in rows] class WrapperJobPackagesDbAdapter: @@ -39,4 +39,4 @@ class WrapperJobPackagesDbAdapter: """ with self.table_manager.get_connection() as conn: rows = conn.execute(select(self.table_manager.table)).all() - return [row._mapping for row in rows] + return [row._asdict() for row in rows] diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index af81e9e..dadd50d 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -1287,7 +1287,7 @@ def search_experiment_by_id( ) for raw_row in query_result: - row = raw_row._mapping + row = raw_row._asdict() expid = str(row["name"]) completed = "NA" total = "NA" @@ -1370,7 +1370,7 @@ def get_current_running_exp(): query_result, _ = ExperimentJoinDbAdapter().search(only_active=True) for raw_row in query_result: - row = raw_row._mapping + row = raw_row._asdict() expid = str(row["name"]) status = "NOT RUNNING" completed = "NA" diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py index 8794b74..0d9fc66 100644 --- a/autosubmit_api/views/v4.py +++ b/autosubmit_api/views/v4.py @@ -247,7 +247,7 @@ class ExperimentView(MethodView): experiments = [] for raw_exp in query_result: exp_builder = ExperimentBuilder() - exp_builder.produce_base_from_dict(raw_exp._mapping) + exp_builder.produce_base_from_dict(raw_exp._asdict()) # Get additional data from config files try: diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index ade623d..c23244f 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -35,7 +35,7 @@ class DetailsProcessor: for exp in query_result: experiments.append( - Experiment(exp._mapping.get("id"), exp._mapping.get("name")) + Experiment(exp._asdict().get("id"), exp._asdict().get("name")) ) return experiments diff --git a/tests/conftest.py b/tests/conftest.py index 1728ee8..b7269a7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -174,7 +174,7 @@ def fixture_gen_rc_pg(fixture_temp_dir_copy_exclude_db: str): yield fixture_temp_dir_copy_exclude_db -@pytest.fixture +@pytest.fixture(scope="session") def fixture_pg_db(fixture_gen_rc_pg: str): """ This fixture cleans and setup a PostgreSQL database for testing purposes. @@ -188,12 +188,12 @@ def fixture_pg_db(fixture_gen_rc_pg: str): yield (fixture_gen_rc_pg, engine) - with engine.connect() as conn: - utils.setup_pg_db(conn) - conn.commit() + # with engine.connect() as conn: + # utils.setup_pg_db(conn) + # conn.commit() -@pytest.fixture +@pytest.fixture(scope="session") def fixture_pg_db_copy_all(fixture_pg_db: Tuple[str, Engine]): """ This fixture recursively search all the .db files in the FAKE_EXP_DIR and copies them to the test database diff --git a/tests/utils.py b/tests/utils.py index 186a17e..e528f3a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -71,7 +71,7 @@ def copy_structure_db(filepath: str, engine: Engine): conn.execute(CreateTable(target_table, if_not_exists=True)) if len(structures_rows) > 0: conn.execute( - insert(target_table), [row._mapping for row in structures_rows] + insert(target_table), [row._asdict() for row in structures_rows] ) conn.commit() @@ -96,12 +96,12 @@ def copy_job_data_db(filepath: str, engine: Engine): target_table = tables.table_change_schema(expid, tables.JobDataTable) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(job_data_rows) > 0: - conn.execute(insert(target_table),[row._mapping for row in job_data_rows]) + conn.execute(insert(target_table),[row._asdict() for row in job_data_rows]) # Experiment run target_table = tables.table_change_schema(expid, tables.experiment_run_table) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(exprun_rows) > 0: - conn.execute(insert(target_table),[row._mapping for row in exprun_rows]) + conn.execute(insert(target_table),[row._asdict() for row in exprun_rows]) conn.commit() @@ -124,7 +124,7 @@ def copy_graph_data_db(filepath: str, engine: Engine): target_table = tables.table_change_schema(expid, tables.GraphDataTable) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(graph_rows) > 0: - conn.execute(insert(target_table),[row._mapping for row in graph_rows]) + conn.execute(insert(target_table),[row._asdict() for row in graph_rows]) conn.commit() @@ -141,9 +141,9 @@ def copy_autosubmit_db(filepath: str, engine: Engine): # Copy data to the Postgres database with engine.connect() as conn: conn.execute(CreateTable(tables.ExperimentTable.__table__, if_not_exists=True)) - conn.execute(insert(tables.ExperimentTable),[row._mapping for row in exp_rows]) + conn.execute(insert(tables.ExperimentTable),[row._asdict() for row in exp_rows]) conn.execute(CreateTable(tables.DetailsTable.__table__, if_not_exists=True)) - conn.execute(insert(tables.DetailsTable),[row._mapping for row in details_rows]) + conn.execute(insert(tables.DetailsTable),[row._asdict() for row in details_rows]) conn.commit() @@ -159,7 +159,7 @@ def copy_as_times_db(filepath: str, engine: Engine): # Copy data to the Postgres database with engine.connect() as conn: conn.execute(CreateTable(tables.ExperimentStatusTable.__table__, if_not_exists=True)) - conn.execute(insert(tables.ExperimentStatusTable),[row._mapping for row in as_times_rows]) + conn.execute(insert(tables.ExperimentStatusTable),[row._asdict() for row in as_times_rows]) conn.commit() @@ -184,10 +184,10 @@ def copy_job_packages_db(filepath: str, engine: Engine): target_table = tables.table_change_schema(expid, tables.JobPackageTable) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(job_packages_rows) > 0: - conn.execute(insert(target_table),[row._mapping for row in job_packages_rows]) + conn.execute(insert(target_table),[row._asdict() for row in job_packages_rows]) # Wrapper job packages target_table = tables.table_change_schema(expid, tables.WrapperJobPackageTable) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(wrapper_job_packages_rows) > 0: - conn.execute(insert(target_table),[row._mapping for row in wrapper_job_packages_rows]) + conn.execute(insert(target_table),[row._asdict() for row in wrapper_job_packages_rows]) conn.commit() -- GitLab From 34763f81df3045a899a15fd5fac88a993a0ebfc0 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 28 May 2024 11:07:58 +0200 Subject: [PATCH 19/26] fix fixtures --- autosubmit_api/config/basicConfig.py | 12 +++--- autosubmit_api/database/table_manager.py | 9 +++-- tests/conftest.py | 7 +++- tests/test_db_adapters.py | 29 +++++++++++++++ tests/test_fixtures.py | 45 +++++++++++------------ tests/test_graph.py | 47 +++++++++++------------- 6 files changed, 88 insertions(+), 61 deletions(-) create mode 100644 tests/test_db_adapters.py diff --git a/autosubmit_api/config/basicConfig.py b/autosubmit_api/config/basicConfig.py index 30b5e1a..bc7970e 100644 --- a/autosubmit_api/config/basicConfig.py +++ b/autosubmit_api/config/basicConfig.py @@ -31,8 +31,8 @@ class APIBasicConfig(BasicConfig): FILE_STATUS_DIR = os.path.join(os.path.expanduser('~'), 'autosubmit', 'metadata', 'test') FILE_STATUS_DB = 'status.db' ALLOWED_CLIENTS = set([]) - DATABASE_BACKEND = "sqlite" # TODO Move to the config parser repo - DATABASE_CONN_URL = "" # TODO Move to the config parser repo + # DATABASE_BACKEND = "sqlite" # TODO Move to the config parser repo + # DATABASE_CONN_URL = "" # TODO Move to the config parser repo @staticmethod def __read_file_config(file_path): @@ -55,10 +55,10 @@ class APIBasicConfig(BasicConfig): APIBasicConfig.FILE_STATUS_DB = parser.get('statusdb', 'filename') if parser.has_option('clients', 'authorized'): APIBasicConfig.ALLOWED_CLIENTS = set(parser.get('clients', 'authorized').split()) - if parser.has_option('database', 'backend'): - APIBasicConfig.DATABASE_BACKEND = parser.get('database', 'backend') - if parser.has_option('database', 'connection_url'): - APIBasicConfig.DATABASE_CONN_URL = parser.get('database', 'connection_url') + # if parser.has_option('database', 'backend'): + # APIBasicConfig.DATABASE_BACKEND = parser.get('database', 'backend') + # if parser.has_option('database', 'connection_url'): + # APIBasicConfig.DATABASE_CONN_URL = parser.get('database', 'connection_url') @staticmethod diff --git a/autosubmit_api/database/table_manager.py b/autosubmit_api/database/table_manager.py index eb0709d..743f214 100644 --- a/autosubmit_api/database/table_manager.py +++ b/autosubmit_api/database/table_manager.py @@ -1,12 +1,13 @@ from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Type, Union from sqlalchemy import Connection, Engine, Table, delete, insert, select -from autosubmitconfigparser.config.basicconfig import BasicConfig from sqlalchemy.schema import CreateTable, CreateSchema, DropTable from sqlalchemy.orm import DeclarativeBase from autosubmit.database import tables, session +from autosubmit_api.config.basicConfig import APIBasicConfig + class DbTableManager(ABC): engine: Engine @@ -113,10 +114,10 @@ def create_db_table_manager( :param db_filepath: File path location in case of SQLite is used as database backend :param schema: Almost always same as expid. Postgres database schema in case this is a distributed table. """ - BasicConfig.read() - if BasicConfig.DATABASE_BACKEND == "postgres": + APIBasicConfig.read() + if APIBasicConfig.DATABASE_BACKEND == "postgres": return PostgresDbTableManager(table, db_filepath, schema) - elif BasicConfig.DATABASE_BACKEND == "sqlite": + elif APIBasicConfig.DATABASE_BACKEND == "sqlite": return SQLiteDbTableManager(table, db_filepath, schema) else: raise Exception("Invalid DATABASE_BACKEND") diff --git a/tests/conftest.py b/tests/conftest.py index b7269a7..dca9833 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,8 +9,10 @@ import pytest from autosubmit_api.app import create_app from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api import config +from autosubmit.database import session from tests import utils from sqlalchemy import Engine, create_engine +from sqlalchemy.orm import scoped_session, sessionmaker FAKE_EXP_DIR = "./tests/experiments/" DEFAULT_DATABASE_CONN_URL = ( @@ -155,7 +157,7 @@ def fixture_gen_rc_pg(fixture_temp_dir_copy_exclude_db: str): "[database]", f"path = {fixture_temp_dir_copy_exclude_db}", "backend = postgres", - f"conn_url = {conn_url}", + f"connection_url = {conn_url}", "[local]", f"path = {fixture_temp_dir_copy_exclude_db}", "[globallogs]", @@ -235,4 +237,7 @@ def fixture_pg( "AUTOSUBMIT_CONFIGURATION", os.path.join(fixture_pg_db_copy_all[0], ".autosubmitrc"), ) + # Mock the session because it is initialized before the fixture + mock_session = scoped_session(sessionmaker(bind=fixture_pg_db_copy_all[1])) + monkeypatch.setattr(session, "Session", mock_session) yield fixture_pg_db_copy_all[0] diff --git a/tests/test_db_adapters.py b/tests/test_db_adapters.py new file mode 100644 index 0000000..be0c4b2 --- /dev/null +++ b/tests/test_db_adapters.py @@ -0,0 +1,29 @@ +from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter + + +class TestExpGraphDrawDBAdapter: + def test_operation(self, fixture_mock_basic_config): + expid = "g001" + graph_draw_db = ExpGraphDrawDBAdapter(expid) + + # Create table + graph_draw_db.create_table() + + # Table exists and is empty + assert graph_draw_db.get_all() == [] + + # Insert data + data = [ + {"id": 1, "job_name": "job1", "x": 1, "y": 2}, + {"id": 2, "job_name": "job2", "x": 2, "y": 3}, + ] + assert graph_draw_db.insert_many(data) == len(data) + + # Get data + assert graph_draw_db.get_all() == data + + # Delete data + assert graph_draw_db.delete_all() == len(data) + + # Table is empty + assert graph_draw_db.get_all() == [] diff --git a/tests/test_fixtures.py b/tests/test_fixtures.py index 75809d4..7021750 100644 --- a/tests/test_fixtures.py +++ b/tests/test_fixtures.py @@ -1,14 +1,30 @@ import os from typing import Tuple - -import pytest from sqlalchemy import Engine, select - from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables from tests.utils import get_schema_names +def test_mock_basic_config(fixture_mock_basic_config: APIBasicConfig): + assert "AUTOSUBMIT_CONFIGURATION" in os.environ and os.path.exists( + os.environ["AUTOSUBMIT_CONFIGURATION"] + ) + + # Reading the configuration file + APIBasicConfig.read() + + assert APIBasicConfig.DATABASE_BACKEND in ["sqlite", "postgres"] + assert "tmp" in APIBasicConfig.LOCAL_ROOT_DIR + assert APIBasicConfig.LOCAL_ROOT_DIR in os.environ["AUTOSUBMIT_CONFIGURATION"] + + if APIBasicConfig.DATABASE_BACKEND == "sqlite": + assert APIBasicConfig.DB_FILE == "autosubmit.db" + + elif APIBasicConfig.DATABASE_BACKEND == "postgres": + assert APIBasicConfig.DATABASE_CONN_URL + + class TestSQLiteFixtures: def test_fixture_temp_dir_copy(self, fixture_temp_dir_copy: str): """ @@ -37,25 +53,6 @@ class TestSQLiteFixtures: assert "filename = autosubmit.db" in content assert "backend = sqlite" in content - @pytest.mark.skip(reason="TODO: Fix this test") - def test_mock_basic_config( - self, fixture_mock_basic_config: APIBasicConfig, fixture_gen_rc_sqlite: str - ): - rc_file = os.path.join(fixture_gen_rc_sqlite, ".autosubmitrc") - # Environment variable should be set and should point to the .autosubmitrc file - assert "AUTOSUBMIT_CONFIGURATION" in os.environ and os.path.exists( - os.environ["AUTOSUBMIT_CONFIGURATION"] - ) - assert os.environ["AUTOSUBMIT_CONFIGURATION"] == rc_file - - # Reading the configuration file - APIBasicConfig.read() - assert APIBasicConfig.GRAPHDATA_DIR == f"{fixture_gen_rc_sqlite}/metadata/graph" - assert APIBasicConfig.LOCAL_ROOT_DIR == fixture_gen_rc_sqlite - assert APIBasicConfig.DATABASE_BACKEND == "sqlite" - assert APIBasicConfig.DB_DIR == fixture_gen_rc_sqlite - assert APIBasicConfig.DB_FILE == "autosubmit.db" - class TestPostgresFixtures: def test_fixture_temp_dir_copy_exclude_db( @@ -95,10 +92,10 @@ class TestPostgresFixtures: def test_fixture_pg_db(self, fixture_pg_db: Tuple[str, Engine]): engine = fixture_pg_db[1] - # Check if the public schema exists and is the only one + # Check if the public schema exists with engine.connect() as conn: schema_names = get_schema_names(conn) - assert schema_names == ["public"] + assert "public" in schema_names def test_fixture_pg_db_copy_all(self, fixture_pg_db_copy_all: Tuple[str, Engine]): engine = fixture_pg_db_copy_all[1] diff --git a/tests/test_graph.py b/tests/test_graph.py index ba14361..842764d 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,6 +1,4 @@ -import os -from sqlalchemy import create_engine from autosubmit_api.builders.configuration_facade_builder import ( AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector, @@ -9,14 +7,12 @@ from autosubmit_api.builders.joblist_loader_builder import ( JobListLoaderBuilder, JobListLoaderDirector, ) -from autosubmit_api.database import tables from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing +from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter from autosubmit_api.monitor.monitor import Monitor -from autosubmit_api.persistance.experiment import ExperimentPaths class TestPopulateDB: - def test_monitor_dot(self, fixture_mock_basic_config): expid = "a003" job_list_loader = JobListLoaderDirector( @@ -48,28 +44,27 @@ class TestPopulateDB: AutosubmitConfigurationFacadeBuilder(expid) ).build_autosubmit_configuration_facade() - exp_paths = ExperimentPaths(expid) - with create_engine( - f"sqlite:///{ os.path.abspath(exp_paths.graph_data_db)}" - ).connect() as conn: - conn.execute(tables.graph_data_table.delete()) - conn.commit() + # Create adapter + graph_draw_db = ExpGraphDrawDBAdapter(expid) + + # Delete content of table + graph_draw_db.delete_all() - experimentGraphDrawing.calculate_drawing( - allJobs=job_list_loader.jobs, - independent=False, - num_chunks=autosubmit_configuration_facade.chunk_size, - job_dictionary=job_list_loader.job_dictionary, - ) + experimentGraphDrawing.calculate_drawing( + allJobs=job_list_loader.jobs, + independent=False, + num_chunks=autosubmit_configuration_facade.chunk_size, + job_dictionary=job_list_loader.job_dictionary, + ) - assert ( - experimentGraphDrawing.coordinates - and len(experimentGraphDrawing.coordinates) == 8 - ) + assert ( + experimentGraphDrawing.coordinates + and len(experimentGraphDrawing.coordinates) == 8 + ) - rows = conn.execute(tables.graph_data_table.select()).all() + rows = graph_draw_db.get_all() - assert len(rows) == 8 - for job in rows: - job_name: str = job.job_name - assert job_name.startswith(expid) + assert len(rows) == 8 + for job in rows: + job_name: str = job.get("job_name") + assert job_name.startswith(expid) \ No newline at end of file -- GitLab From 9e073ca42d183c8b86f956aab3229ce8ec028bd4 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 28 May 2024 13:29:08 +0200 Subject: [PATCH 20/26] migrate SQLAlchemy table def to Core --- .../database/adapters/experiment.py | 2 +- .../database/adapters/experiment_details.py | 2 +- .../database/adapters/experiment_run.py | 2 +- .../database/adapters/experiment_status.py | 2 +- autosubmit_api/database/adapters/job_data.py | 2 +- .../database/adapters/join/experiment_join.py | 48 ++++----- autosubmit_api/database/table_manager.py | 6 +- autosubmit_api/database/tables.py | 98 ++++++++----------- autosubmit_api/views/v4.py | 2 +- tests/bgtasks/test_status_updater.py | 6 +- tests/test_bg_tasks.py | 4 +- tests/utils.py | 10 +- 12 files changed, 85 insertions(+), 99 deletions(-) diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index d5c0cbe..801da16 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -38,7 +38,7 @@ class ExperimentDbAdapter: with self.table_manager.get_connection() as conn: row = conn.execute( self.table_manager.table.select().where( - tables.ExperimentTable.name == expid + tables.ExperimentTable.c.name == expid ) ).one() return row._asdict() diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/adapters/experiment_details.py index ef026f4..8f6ce63 100644 --- a/autosubmit_api/database/adapters/experiment_details.py +++ b/autosubmit_api/database/adapters/experiment_details.py @@ -42,7 +42,7 @@ class ExperimentDetailsDbAdapter: with self.table_manager.get_connection() as conn: row = conn.execute( self.table_manager.table.select().where( - tables.DetailsTable.exp_id == exp_id + tables.DetailsTable.c.exp_id == exp_id ) ).one() return row._asdict() diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/adapters/experiment_run.py index 01f7d3f..17717dd 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/adapters/experiment_run.py @@ -10,7 +10,7 @@ class ExperimentRunDbAdapter: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( - table=tables.experiment_run_table, + table=tables.ExperimentRunTable, db_filepath=ExperimentPaths(expid).job_data_db, schema=expid, ) diff --git a/autosubmit_api/database/adapters/experiment_status.py b/autosubmit_api/database/adapters/experiment_status.py index 1cfb5d5..b91d783 100644 --- a/autosubmit_api/database/adapters/experiment_status.py +++ b/autosubmit_api/database/adapters/experiment_status.py @@ -63,7 +63,7 @@ class ExperimentStatusDbAdapter: """ with self.table_manager.get_connection() as conn: del_stmnt = delete(tables.ExperimentStatusTable).where( - tables.ExperimentStatusTable.name == expid + tables.ExperimentStatusTable.c.name == expid ) ins_stmnt = insert(tables.ExperimentStatusTable).values( exp_id=exp_id, diff --git a/autosubmit_api/database/adapters/job_data.py b/autosubmit_api/database/adapters/job_data.py index f3f762d..8e5266b 100644 --- a/autosubmit_api/database/adapters/job_data.py +++ b/autosubmit_api/database/adapters/job_data.py @@ -10,7 +10,7 @@ class JobDataDbAdapter: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( - table=tables.job_data_table, + table=tables.JobDataTable, db_filepath=ExperimentPaths(expid).job_data_db, schema=expid, ) diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/adapters/join/experiment_join.py index 6aefe76..47b5564 100644 --- a/autosubmit_api/database/adapters/join/experiment_join.py +++ b/autosubmit_api/database/adapters/join/experiment_join.py @@ -23,19 +23,19 @@ def generate_query_listexp_extended( statement = ( select( - tables.experiment_table, - tables.details_table, - tables.experiment_status_table.c.exp_id, - tables.experiment_status_table.c.status, + tables.ExperimentTable, + tables.DetailsTable, + tables.ExperimentStatusTable.c.exp_id, + tables.ExperimentStatusTable.c.status, ) .join( - tables.details_table, - tables.experiment_table.c.id == tables.details_table.c.exp_id, + tables.DetailsTable, + tables.ExperimentTable.c.id == tables.DetailsTable.c.exp_id, isouter=True, ) .join( - tables.experiment_status_table, - tables.experiment_table.c.id == tables.experiment_status_table.c.exp_id, + tables.ExperimentStatusTable, + tables.ExperimentTable.c.id == tables.ExperimentStatusTable.c.exp_id, isouter=True, ) ) @@ -46,38 +46,38 @@ def generate_query_listexp_extended( if query: filter_stmts.append( or_( - tables.experiment_table.c.name.like(f"{query}%"), - tables.experiment_table.c.description.like(f"%{query}%"), - tables.details_table.c.user.like(f"%{query}%"), + tables.ExperimentTable.c.name.like(f"{query}%"), + tables.ExperimentTable.c.description.like(f"%{query}%"), + tables.DetailsTable.c.user.like(f"%{query}%"), ) ) if only_active: - filter_stmts.append(tables.experiment_status_table.c.status == "RUNNING") + filter_stmts.append(tables.ExperimentStatusTable.c.status == "RUNNING") if owner: - filter_stmts.append(tables.details_table.c.user == owner) + filter_stmts.append(tables.DetailsTable.c.user == owner) if exp_type == "test": - filter_stmts.append(tables.experiment_table.c.name.like("t%")) + filter_stmts.append(tables.ExperimentTable.c.name.like("t%")) elif exp_type == "operational": - filter_stmts.append(tables.experiment_table.c.name.like("o%")) + filter_stmts.append(tables.ExperimentTable.c.name.like("o%")) elif exp_type == "experiment": - filter_stmts.append(tables.experiment_table.c.name.not_like("t%")) - filter_stmts.append(tables.experiment_table.c.name.not_like("o%")) + filter_stmts.append(tables.ExperimentTable.c.name.not_like("t%")) + filter_stmts.append(tables.ExperimentTable.c.name.not_like("o%")) if autosubmit_version: filter_stmts.append( - tables.experiment_table.c.autosubmit_version == autosubmit_version + tables.ExperimentTable.c.autosubmit_version == autosubmit_version ) statement = statement.where(*filter_stmts) # Order by ORDER_OPTIONS = { - "expid": tables.experiment_table.c.name, - "created": tables.details_table.c.created, - "description": tables.experiment_table.c.description, + "expid": tables.ExperimentTable.c.name, + "created": tables.DetailsTable.c.created, + "description": tables.ExperimentTable.c.description, } order_col: Optional[Column[Any]] = None if order_by: @@ -101,9 +101,9 @@ class ExperimentJoinDbAdapter: def drop_status_from_deleted_experiments(self) -> int: with self._get_connection() as conn: - del_stmnt = tables.experiment_status_table.delete().where( - tables.experiment_status_table.c.exp_id.not_in( - select(tables.experiment_table.c.id) + del_stmnt = tables.ExperimentStatusTable.delete().where( + tables.ExperimentStatusTable.c.exp_id.not_in( + select(tables.ExperimentTable.c.id) ) ) result = conn.execute(del_stmnt) diff --git a/autosubmit_api/database/table_manager.py b/autosubmit_api/database/table_manager.py index 743f214..5a8faf6 100644 --- a/autosubmit_api/database/table_manager.py +++ b/autosubmit_api/database/table_manager.py @@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional, Type, Union from sqlalchemy import Connection, Engine, Table, delete, insert, select from sqlalchemy.schema import CreateTable, CreateSchema, DropTable from sqlalchemy.orm import DeclarativeBase - -from autosubmit.database import tables, session +from autosubmit_api.database import tables +from autosubmit.database import session from autosubmit_api.config.basicConfig import APIBasicConfig @@ -92,7 +92,7 @@ class PostgresDbTableManager(DbTableManager): super().__init__(table, db_filepath, schema) self.engine = session.Session().bind if schema: - self.table = tables.table_change_schema(schema, table) + self.table = tables._table_change_schema(schema, table) def create_table(self, conn: Connection): """ diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py index c28b56e..da9679f 100644 --- a/autosubmit_api/database/tables.py +++ b/autosubmit_api/database/tables.py @@ -1,68 +1,54 @@ -from sqlalchemy import Integer, Text, Table -from sqlalchemy.orm import mapped_column, Mapped +from sqlalchemy import Column, Integer, Text, Table from autosubmit.database.tables import ( - BaseTable, + metadata_obj, + _table_change_schema, ExperimentTable, - experiment_run_table, - JobDataTable, ExperimentStructureTable, - table_change_schema, ExperimentStatusTable, JobPackageTable, WrapperJobPackageTable, + ExperimentRunTable, + JobDataTable, ) -table_change_schema = table_change_schema - -## SQLAlchemy ORM tables -class DetailsTable(BaseTable): - """ - Stores extra information. It is populated by the API. - """ - - __tablename__ = "details" - - exp_id: Mapped[int] = mapped_column(Integer, primary_key=True) - user: Mapped[str] = mapped_column(Text, nullable=False) - created: Mapped[str] = mapped_column(Text, nullable=False) - model: Mapped[str] = mapped_column(Text, nullable=False) - branch: Mapped[str] = mapped_column(Text, nullable=False) - hpc: Mapped[str] = mapped_column(Text, nullable=False) - - -class GraphDataTable(BaseTable): - """ - Stores the coordinates and it is used exclusively to speed up the process - of generating the graph layout - """ - - __tablename__ = "experiment_graph_draw" +table_change_schema = _table_change_schema - id: Mapped[int] = mapped_column(Integer, primary_key=True) - job_name: Mapped[str] = mapped_column(Text, nullable=False) - x: Mapped[int] = mapped_column(Integer, nullable=False) - y: Mapped[int] = mapped_column(Integer, nullable=False) +## API extended SQLAlchemy Core tables +DetailsTable = Table( + "details", + metadata_obj, + Column("exp_id", Integer, primary_key=True), + Column("user", Text, nullable=False), + Column("created", Text, nullable=False), + Column("model", Text, nullable=False), + Column("branch", Text, nullable=False), + Column("hpc", Text, nullable=False), +) +"""Stores extra information. It is populated by the API.""" -## SQLAlchemy Core tables - -# MAIN_DB TABLES -experiment_table: Table = ExperimentTable.__table__ -details_table: Table = DetailsTable.__table__ - -# AS_TIMES TABLES -experiment_status_table: Table = ExperimentStatusTable.__table__ - -# Graph Data TABLES -graph_data_table: Table = GraphDataTable.__table__ - -# Job package TABLES -job_package_table: Table = JobPackageTable.__table__ -wrapper_job_package_table: Table = WrapperJobPackageTable.__table__ - -# Job Data TABLES -job_data_table: Table = JobDataTable.__table__ -experiment_run_table: Table = experiment_run_table -# Structure TABLES -experiment_structure_table: Table = ExperimentStructureTable.__table__ +GraphDataTable = Table( + "experiment_graph_draw", + metadata_obj, + Column("id", Integer, primary_key=True), + Column("job_name", Text, nullable=False), + Column("x", Integer, nullable=False), + Column("y", Integer, nullable=False), +) +"""Stores the coordinates and it is used exclusively +to speed up the process of generating the graph layout""" + +# Module exports +__all__ = [ + "table_change_schema", + "ExperimentTable", + "ExperimentStructureTable", + "ExperimentStatusTable", + "JobPackageTable", + "WrapperJobPackageTable", + "ExperimentRunTable", + "JobDataTable", + "DetailsTable", + "GraphDataTable", +] diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py index 0d9fc66..2292cb6 100644 --- a/autosubmit_api/views/v4.py +++ b/autosubmit_api/views/v4.py @@ -334,7 +334,7 @@ class ExperimentDetailView(MethodView): """ exp_builder = ExperimentBuilder() exp_builder.produce_base(expid) - return exp_builder.product.model_dump(include=tables.experiment_table.c.keys()) + return exp_builder.product.model_dump(include=tables.ExperimentTable.c.keys()) class ExperimentJobsViewOptEnum(str, Enum): diff --git a/tests/bgtasks/test_status_updater.py b/tests/bgtasks/test_status_updater.py index c6975a9..56a225c 100644 --- a/tests/bgtasks/test_status_updater.py +++ b/tests/bgtasks/test_status_updater.py @@ -12,15 +12,15 @@ class TestStatusUpdater: prepare_db() with create_as_times_db_engine().connect() as conn: - exps_status = conn.execute(tables.experiment_status_table.delete()) + exps_status = conn.execute(tables.ExperimentStatusTable.delete()) StatusUpdater.run() with create_autosubmit_db_engine().connect() as conn: - experiments = conn.execute(tables.experiment_table.select()).all() + experiments = conn.execute(tables.ExperimentTable.select()).all() with create_as_times_db_engine().connect() as conn: - exps_status = conn.execute(tables.experiment_status_table.select()).all() + exps_status = conn.execute(tables.ExperimentStatusTable.select()).all() assert len(experiments) == len(exps_status) assert set([x.id for x in experiments]) == set([x.exp_id for x in exps_status]) diff --git a/tests/test_bg_tasks.py b/tests/test_bg_tasks.py index f8d5ab2..2c5a073 100644 --- a/tests/test_bg_tasks.py +++ b/tests/test_bg_tasks.py @@ -9,12 +9,12 @@ class TestDetailsPopulate: def test_process(self,fixture_mock_basic_config: APIBasicConfig): with create_autosubmit_db_engine().connect() as conn: - conn.execute(tables.details_table.delete()) + conn.execute(tables.DetailsTable.delete()) conn.commit() count = DetailsProcessor(fixture_mock_basic_config).process() - rows = conn.execute(tables.details_table.select()).all() + rows = conn.execute(tables.DetailsTable.select()).all() assert len(rows) > 0 assert len(rows) == count \ No newline at end of file diff --git a/tests/utils.py b/tests/utils.py index e528f3a..e5b7f21 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -87,7 +87,7 @@ def copy_job_data_db(filepath: str, engine: Engine): source_as_db = create_engine(f"sqlite:///{filepath}") with source_as_db.connect() as source_conn: job_data_rows = source_conn.execute(select(tables.JobDataTable)).all() - exprun_rows = source_conn.execute(select(tables.experiment_run_table)).all() + exprun_rows = source_conn.execute(select(tables.ExperimentRunTable)).all() # Copy data to the Postgres database with engine.connect() as conn: @@ -98,7 +98,7 @@ def copy_job_data_db(filepath: str, engine: Engine): if len(job_data_rows) > 0: conn.execute(insert(target_table),[row._asdict() for row in job_data_rows]) # Experiment run - target_table = tables.table_change_schema(expid, tables.experiment_run_table) + target_table = tables.table_change_schema(expid, tables.ExperimentRunTable) conn.execute(CreateTable(target_table, if_not_exists=True)) if len(exprun_rows) > 0: conn.execute(insert(target_table),[row._asdict() for row in exprun_rows]) @@ -140,9 +140,9 @@ def copy_autosubmit_db(filepath: str, engine: Engine): # Copy data to the Postgres database with engine.connect() as conn: - conn.execute(CreateTable(tables.ExperimentTable.__table__, if_not_exists=True)) + conn.execute(CreateTable(tables.ExperimentTable, if_not_exists=True)) conn.execute(insert(tables.ExperimentTable),[row._asdict() for row in exp_rows]) - conn.execute(CreateTable(tables.DetailsTable.__table__, if_not_exists=True)) + conn.execute(CreateTable(tables.DetailsTable, if_not_exists=True)) conn.execute(insert(tables.DetailsTable),[row._asdict() for row in details_rows]) conn.commit() @@ -158,7 +158,7 @@ def copy_as_times_db(filepath: str, engine: Engine): # Copy data to the Postgres database with engine.connect() as conn: - conn.execute(CreateTable(tables.ExperimentStatusTable.__table__, if_not_exists=True)) + conn.execute(CreateTable(tables.ExperimentStatusTable, if_not_exists=True)) conn.execute(insert(tables.ExperimentStatusTable),[row._asdict() for row in as_times_rows]) conn.commit() -- GitLab From 16f736d41b17b87f9a223a0cb1a69b5cf0955f3a Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Tue, 28 May 2024 14:31:15 +0200 Subject: [PATCH 21/26] normalize adapters responses --- .../bgtasks/tasks/status_updater.py | 2 +- .../database/adapters/experiment.py | 6 ++-- .../database/adapters/join/experiment_join.py | 2 +- autosubmit_api/experiment/common_requests.py | 6 ++-- autosubmit_api/views/v4.py | 2 +- .../workers/populate_details/populate.py | 2 +- tests/conftest.py | 1 + tests/test_db_adapters.py | 34 ++++++++++++++++++- 8 files changed, 43 insertions(+), 12 deletions(-) diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index d49eb6b..0766e49 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -37,7 +37,7 @@ class StatusUpdater(BackgroundTaskTemplate): Get the experiments list """ query_result = ExperimentDbAdapter().get_all() - return [ExperimentModel.model_validate(row._asdict()) for row in query_result] + return [ExperimentModel.model_validate(row) for row in query_result] @classmethod def _check_exp_running(cls, expid: str) -> bool: diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/adapters/experiment.py index 801da16..d2c05ee 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/adapters/experiment.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any, Dict, List from autosubmit_api.database.table_manager import create_db_table_manager from autosubmit_api.config.basicConfig import APIBasicConfig @@ -19,13 +19,13 @@ class ExperimentDbAdapter: with self.table_manager.get_connection() as conn: self.table_manager.create_table(conn) - def get_all(self): + def get_all(self) -> List[Dict[str, Any]]: """ Return all experiments. """ with self.table_manager.get_connection() as conn: rows = self.table_manager.select_all(conn) - return rows + return [row._asdict() for row in rows] def get_by_expid(self, expid: str) -> Dict[str, Any]: """ diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/adapters/join/experiment_join.py index 47b5564..2fd92db 100644 --- a/autosubmit_api/database/adapters/join/experiment_join.py +++ b/autosubmit_api/database/adapters/join/experiment_join.py @@ -143,4 +143,4 @@ class ExperimentJoinDbAdapter: offset=offset, ) - return query_result, total_rows + return [row._asdict() for row in query_result], total_rows diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index dadd50d..9c0ea7f 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -1286,8 +1286,7 @@ def search_experiment_by_id( query=query, exp_type=exp_type, only_active=only_active, owner=owner ) - for raw_row in query_result: - row = raw_row._asdict() + for row in query_result: expid = str(row["name"]) completed = "NA" total = "NA" @@ -1369,8 +1368,7 @@ def get_current_running_exp(): result = list() query_result, _ = ExperimentJoinDbAdapter().search(only_active=True) - for raw_row in query_result: - row = raw_row._asdict() + for row in query_result: expid = str(row["name"]) status = "NOT RUNNING" completed = "NA" diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py index 2292cb6..6e35b5b 100644 --- a/autosubmit_api/views/v4.py +++ b/autosubmit_api/views/v4.py @@ -247,7 +247,7 @@ class ExperimentView(MethodView): experiments = [] for raw_exp in query_result: exp_builder = ExperimentBuilder() - exp_builder.produce_base_from_dict(raw_exp._asdict()) + exp_builder.produce_base_from_dict(raw_exp) # Get additional data from config files try: diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index c23244f..b4f0b7f 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -35,7 +35,7 @@ class DetailsProcessor: for exp in query_result: experiments.append( - Experiment(exp._asdict().get("id"), exp._asdict().get("name")) + Experiment(exp.get("id"), exp.get("name")) ) return experiments diff --git a/tests/conftest.py b/tests/conftest.py index dca9833..7409517 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -47,6 +47,7 @@ def fixture_mock_basic_config(request: pytest.FixtureRequest): Sets a mock basic config for the tests. """ request.getfixturevalue(request.param) + APIBasicConfig.read() yield APIBasicConfig diff --git a/tests/test_db_adapters.py b/tests/test_db_adapters.py index be0c4b2..d5537b0 100644 --- a/tests/test_db_adapters.py +++ b/tests/test_db_adapters.py @@ -1,8 +1,27 @@ +from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter +from autosubmit_api.database.adapters.join.experiment_join import ( + ExperimentJoinDbAdapter, +) + + +class TestExperimentDbAdapter: + def test_operations(self, fixture_mock_basic_config): + experiment_db = ExperimentDbAdapter() + + # Check get_all + rows = experiment_db.get_all() + assert len(rows) >= 4 + for expid in ["a003", "a007", "a3tb", "a6zj"]: + assert expid in [row.get("name") for row in rows] + + # Check get_by_expid + row = experiment_db.get_by_expid("a003") + assert row["name"] == "a003" class TestExpGraphDrawDBAdapter: - def test_operation(self, fixture_mock_basic_config): + def test_operations(self, fixture_mock_basic_config): expid = "g001" graph_draw_db = ExpGraphDrawDBAdapter(expid) @@ -27,3 +46,16 @@ class TestExpGraphDrawDBAdapter: # Table is empty assert graph_draw_db.get_all() == [] + + +class TestExperimentJoinDbAdapter: + def test_search(self, fixture_mock_basic_config): + experiment_join_db = ExperimentJoinDbAdapter() + + # Check search + rows, total = experiment_join_db.search(limit=3) + assert len(rows) == 3 + assert total >= 4 + + for row in rows: + assert row.get("status") -- GitLab From 7d7d38a5b8c78869fabc3fcfe54534a393860c9d Mon Sep 17 00:00:00 2001 From: ltenorio Date: Tue, 28 May 2024 16:26:05 +0200 Subject: [PATCH 22/26] Revert "extend Basic Config to handle postgres (should be reverted later)" This reverts commit c51f607993f02423aafefacced6dcc171d47932b. --- autosubmit_api/config/basicConfig.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/autosubmit_api/config/basicConfig.py b/autosubmit_api/config/basicConfig.py index bc7970e..c73a51e 100644 --- a/autosubmit_api/config/basicConfig.py +++ b/autosubmit_api/config/basicConfig.py @@ -31,8 +31,6 @@ class APIBasicConfig(BasicConfig): FILE_STATUS_DIR = os.path.join(os.path.expanduser('~'), 'autosubmit', 'metadata', 'test') FILE_STATUS_DB = 'status.db' ALLOWED_CLIENTS = set([]) - # DATABASE_BACKEND = "sqlite" # TODO Move to the config parser repo - # DATABASE_CONN_URL = "" # TODO Move to the config parser repo @staticmethod def __read_file_config(file_path): @@ -55,10 +53,6 @@ class APIBasicConfig(BasicConfig): APIBasicConfig.FILE_STATUS_DB = parser.get('statusdb', 'filename') if parser.has_option('clients', 'authorized'): APIBasicConfig.ALLOWED_CLIENTS = set(parser.get('clients', 'authorized').split()) - # if parser.has_option('database', 'backend'): - # APIBasicConfig.DATABASE_BACKEND = parser.get('database', 'backend') - # if parser.has_option('database', 'connection_url'): - # APIBasicConfig.DATABASE_CONN_URL = parser.get('database', 'connection_url') @staticmethod -- GitLab From 2329fb0f84847c5d2495ebe5850752358850b4ee Mon Sep 17 00:00:00 2001 From: ltenorio Date: Wed, 29 May 2024 15:27:39 +0200 Subject: [PATCH 23/26] move session module to API --- autosubmit_api/database/common.py | 2 +- autosubmit_api/database/session.py | 21 +++++++++++++++++++++ autosubmit_api/database/table_manager.py | 3 +-- tests/conftest.py | 2 +- 4 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 autosubmit_api/database/session.py diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index c055a8f..1d1ceb7 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -13,7 +13,7 @@ from sqlalchemy import ( from autosubmit_api.builders import BaseBuilder from autosubmit_api.logger import logger from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit.database import session +from autosubmit_api.database import session def get_postgres_engine(): diff --git a/autosubmit_api/database/session.py b/autosubmit_api/database/session.py new file mode 100644 index 0000000..69db92f --- /dev/null +++ b/autosubmit_api/database/session.py @@ -0,0 +1,21 @@ +import os +from sqlalchemy import Engine, NullPool, create_engine +from sqlalchemy.orm import sessionmaker, scoped_session +from autosubmitconfigparser.config.basicconfig import BasicConfig + + +def create_sqlite_engine(path: str = "") -> Engine: + if path: + return create_engine(f"sqlite:///{os.path.abspath(path)}", poolclass=NullPool) + # Else return memory database + return create_engine("sqlite://", poolclass=NullPool) + + +BasicConfig.read() +if BasicConfig.DATABASE_BACKEND == "postgres": + engine = create_engine(BasicConfig.DATABASE_CONN_URL) +else: + engine = create_sqlite_engine() # Placeholder sqlite engine + +session_factory = sessionmaker(bind=engine) +Session = scoped_session(session_factory) diff --git a/autosubmit_api/database/table_manager.py b/autosubmit_api/database/table_manager.py index 5a8faf6..e433248 100644 --- a/autosubmit_api/database/table_manager.py +++ b/autosubmit_api/database/table_manager.py @@ -3,8 +3,7 @@ from typing import Any, Dict, List, Optional, Type, Union from sqlalchemy import Connection, Engine, Table, delete, insert, select from sqlalchemy.schema import CreateTable, CreateSchema, DropTable from sqlalchemy.orm import DeclarativeBase -from autosubmit_api.database import tables -from autosubmit.database import session +from autosubmit_api.database import tables, session from autosubmit_api.config.basicConfig import APIBasicConfig diff --git a/tests/conftest.py b/tests/conftest.py index 7409517..84b7f3e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,7 +9,7 @@ import pytest from autosubmit_api.app import create_app from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api import config -from autosubmit.database import session +from autosubmit_api.database import session from tests import utils from sqlalchemy import Engine, create_engine from sqlalchemy.orm import scoped_session, sessionmaker -- GitLab From f4e54dd9b4dfa23ba357cdacb108cce2a7a7cd20 Mon Sep 17 00:00:00 2001 From: ltenorio Date: Wed, 29 May 2024 15:29:17 +0200 Subject: [PATCH 24/26] use APIBasicConfig in session --- autosubmit_api/database/session.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/autosubmit_api/database/session.py b/autosubmit_api/database/session.py index 69db92f..ac38922 100644 --- a/autosubmit_api/database/session.py +++ b/autosubmit_api/database/session.py @@ -1,7 +1,7 @@ import os from sqlalchemy import Engine, NullPool, create_engine from sqlalchemy.orm import sessionmaker, scoped_session -from autosubmitconfigparser.config.basicconfig import BasicConfig +from autosubmit_api.config.basicConfig import APIBasicConfig def create_sqlite_engine(path: str = "") -> Engine: @@ -11,9 +11,9 @@ def create_sqlite_engine(path: str = "") -> Engine: return create_engine("sqlite://", poolclass=NullPool) -BasicConfig.read() -if BasicConfig.DATABASE_BACKEND == "postgres": - engine = create_engine(BasicConfig.DATABASE_CONN_URL) +APIBasicConfig.read() +if APIBasicConfig.DATABASE_BACKEND == "postgres": + engine = create_engine(APIBasicConfig.DATABASE_CONN_URL) else: engine = create_sqlite_engine() # Placeholder sqlite engine -- GitLab From 6842dde7abe32035d0a1d3f59d921b7dbcce91de Mon Sep 17 00:00:00 2001 From: ltenorio Date: Wed, 29 May 2024 15:31:41 +0200 Subject: [PATCH 25/26] update name from adapters to repository --- .../bgtasks/tasks/status_updater.py | 16 ++++---- autosubmit_api/builders/experiment_builder.py | 6 +-- .../components/experiment/graph_drawer.py | 4 +- autosubmit_api/components/jobdata.py | 8 ++-- autosubmit_api/database/__init__.py | 14 +++---- autosubmit_api/database/adapters/__init__.py | 38 ------------------- autosubmit_api/database/common.py | 8 ++-- .../database/repositories/__init__.py | 38 +++++++++++++++++++ .../{adapters => repositories}/experiment.py | 2 +- .../experiment_details.py | 2 +- .../experiment_run.py | 2 +- .../experiment_status.py | 2 +- .../{adapters => repositories}/graph_draw.py | 2 +- .../{adapters => repositories}/job_data.py | 2 +- .../job_packages.py | 4 +- .../join/__init__.py | 0 .../join/experiment_join.py | 4 +- autosubmit_api/experiment/common_requests.py | 18 ++++----- .../experiment_history_db_manager.py | 6 +-- .../persistance/job_package_reader.py | 10 ++--- autosubmit_api/views/v4.py | 4 +- .../business/process_graph_drawings.py | 4 +- .../workers/populate_details/populate.py | 10 ++--- ...db_adapters.py => test_db_repositories.py} | 20 +++++----- tests/test_graph.py | 6 +-- 25 files changed, 115 insertions(+), 115 deletions(-) delete mode 100644 autosubmit_api/database/adapters/__init__.py create mode 100644 autosubmit_api/database/repositories/__init__.py rename autosubmit_api/database/{adapters => repositories}/experiment.py (97%) rename autosubmit_api/database/{adapters => repositories}/experiment_details.py (97%) rename autosubmit_api/database/{adapters => repositories}/experiment_run.py (98%) rename autosubmit_api/database/{adapters => repositories}/experiment_status.py (98%) rename autosubmit_api/database/{adapters => repositories}/graph_draw.py (97%) rename autosubmit_api/database/{adapters => repositories}/job_data.py (98%) rename autosubmit_api/database/{adapters => repositories}/job_packages.py (95%) rename autosubmit_api/database/{adapters => repositories}/join/__init__.py (100%) rename autosubmit_api/database/{adapters => repositories}/join/experiment_join.py (97%) rename tests/{test_db_adapters.py => test_db_repositories.py} (70%) diff --git a/autosubmit_api/bgtasks/tasks/status_updater.py b/autosubmit_api/bgtasks/tasks/status_updater.py index 0766e49..0be7307 100644 --- a/autosubmit_api/bgtasks/tasks/status_updater.py +++ b/autosubmit_api/bgtasks/tasks/status_updater.py @@ -3,10 +3,10 @@ import time from typing import List from autosubmit_api.bgtasks.bgtask import BackgroundTaskTemplate -from autosubmit_api.database.adapters import ( - ExperimentStatusDbAdapter, - ExperimentDbAdapter, - ExperimentJoinDbAdapter, +from autosubmit_api.database.repositories import ( + ExperimentStatusDbRepository, + ExperimentDbRepository, + ExperimentJoinDbRepository, ) from autosubmit_api.database.models import ExperimentModel from autosubmit_api.experiment.common_requests import _is_exp_running @@ -25,7 +25,7 @@ class StatusUpdater(BackgroundTaskTemplate): """ try: - ExperimentJoinDbAdapter().drop_status_from_deleted_experiments() + ExperimentJoinDbRepository().drop_status_from_deleted_experiments() except Exception as exc: cls.logger.error( f"[{cls.id}] Error while clearing missing experiments status: {exc}" @@ -36,7 +36,7 @@ class StatusUpdater(BackgroundTaskTemplate): """ Get the experiments list """ - query_result = ExperimentDbAdapter().get_all() + query_result = ExperimentDbRepository().get_all() return [ExperimentModel.model_validate(row) for row in query_result] @classmethod @@ -68,7 +68,7 @@ class StatusUpdater(BackgroundTaskTemplate): @classmethod def _update_experiment_status(cls, experiment: ExperimentModel, is_running: bool): try: - ExperimentStatusDbAdapter().upsert_status( + ExperimentStatusDbRepository().upsert_status( experiment.id, experiment.name, RunningStatus.RUNNING if is_running else RunningStatus.NOT_RUNNING, @@ -89,7 +89,7 @@ class StatusUpdater(BackgroundTaskTemplate): exp_list = cls._get_experiments() # Read current status of all experiments - current_status = ExperimentStatusDbAdapter().get_all_dict() + current_status = ExperimentStatusDbRepository().get_all_dict() # Check every experiment status & update for experiment in exp_list: diff --git a/autosubmit_api/builders/experiment_builder.py b/autosubmit_api/builders/experiment_builder.py index 77ba3b5..9a4720a 100644 --- a/autosubmit_api/builders/experiment_builder.py +++ b/autosubmit_api/builders/experiment_builder.py @@ -5,7 +5,7 @@ from autosubmit_api.builders.configuration_facade_builder import ( AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector, ) -from autosubmit_api.database.adapters import ExperimentDbAdapter, ExperimentDetailsDbAdapter +from autosubmit_api.database.repositories import ExperimentDbRepository, ExperimentDetailsDbRepository from autosubmit_api.database.models import ExperimentModel @@ -21,7 +21,7 @@ class ExperimentBuilder(BaseBuilder): """ Produce basic information from the main experiment table """ - result = ExperimentDbAdapter().get_by_expid(expid) + result = ExperimentDbRepository().get_by_expid(expid) # Set new product self._product = ExperimentModel( @@ -38,7 +38,7 @@ class ExperimentBuilder(BaseBuilder): exp_id = self._product.id result = None try: - result = ExperimentDetailsDbAdapter().get_by_exp_id(exp_id) + result = ExperimentDetailsDbRepository().get_by_exp_id(exp_id) except Exception: logger.error(f"Error getting details for exp_id {exp_id}") diff --git a/autosubmit_api/components/experiment/graph_drawer.py b/autosubmit_api/components/experiment/graph_drawer.py index a461fcf..663a922 100644 --- a/autosubmit_api/components/experiment/graph_drawer.py +++ b/autosubmit_api/components/experiment/graph_drawer.py @@ -3,7 +3,7 @@ import portalocker import os import traceback from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter +from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository from autosubmit_api.logger import logger from autosubmit_api.monitor.monitor import Monitor @@ -18,7 +18,7 @@ class ExperimentGraphDrawing: APIBasicConfig.read() self.expid = expid self.folder_path = APIBasicConfig.LOCAL_ROOT_DIR - self.graph_data_db = ExpGraphDrawDBAdapter(expid) + self.graph_data_db = ExpGraphDrawDBRepository(expid) self.graph_data_db.create_table() self.lock_name = "calculation_in_progress.lock" self.current_position_dictionary = None diff --git a/autosubmit_api/components/jobdata.py b/autosubmit_api/components/jobdata.py index ea14e2e..1ef6e87 100644 --- a/autosubmit_api/components/jobdata.py +++ b/autosubmit_api/components/jobdata.py @@ -25,8 +25,8 @@ from json import loads from autosubmit_api.components.jobs.utils import generate_job_html_title # from networkx import DiGraph from autosubmit_api.config.basicConfig import APIBasicConfig -from autosubmit_api.database.adapters import ExperimentRunDbAdapter -from autosubmit_api.database.adapters.job_data import JobDataDbAdapter +from autosubmit_api.database.repositories import ExperimentRunDbRepository +from autosubmit_api.database.repositories.job_data import JobDataDbRepository from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.performance.utils import calculate_ASYPD_perjob from autosubmit_api.components.jobs.job_factory import SimJob @@ -431,8 +431,8 @@ class JobDataStructure: """ self.db_version = 99 # Previous versions are unsupported - self.exp_run_db = ExperimentRunDbAdapter(expid) - self.job_data_db = JobDataDbAdapter(expid) + self.exp_run_db = ExperimentRunDbRepository(expid) + self.job_data_db = JobDataDbRepository(expid) def __str__(self): return "Data structure" diff --git a/autosubmit_api/database/__init__.py b/autosubmit_api/database/__init__.py index 13b7af0..fb661d1 100644 --- a/autosubmit_api/database/__init__.py +++ b/autosubmit_api/database/__init__.py @@ -1,11 +1,11 @@ -from autosubmit_api.database.adapters import ( - ExperimentDbAdapter, - ExperimentDetailsDbAdapter, - ExperimentStatusDbAdapter, +from autosubmit_api.database.repositories import ( + ExperimentDbRepository, + ExperimentDetailsDbRepository, + ExperimentStatusDbRepository, ) def prepare_db(): - ExperimentDbAdapter().create_table() - ExperimentDetailsDbAdapter().create_table() - ExperimentStatusDbAdapter().create_table() + ExperimentDbRepository().create_table() + ExperimentDetailsDbRepository().create_table() + ExperimentStatusDbRepository().create_table() diff --git a/autosubmit_api/database/adapters/__init__.py b/autosubmit_api/database/adapters/__init__.py deleted file mode 100644 index dffe6ce..0000000 --- a/autosubmit_api/database/adapters/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -This module contains the adapters for the database tables. - -The adapters are used to interact with the database tables delegating the SQL statements generation and execution order. - -Other modules can use the adapters to interact with the database tables without the need to know the SQL syntax. -""" - -from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter -from autosubmit_api.database.adapters.experiment_details import ( - ExperimentDetailsDbAdapter, -) -from autosubmit_api.database.adapters.experiment_status import ( - ExperimentStatusDbAdapter, -) -from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter -from autosubmit_api.database.adapters.join.experiment_join import ( - ExperimentJoinDbAdapter, -) -from autosubmit_api.database.adapters.job_packages import ( - JobPackagesDbAdapter, - WrapperJobPackagesDbAdapter, -) -from autosubmit_api.database.adapters.experiment_run import ExperimentRunDbAdapter -from autosubmit_api.database.adapters.job_data import JobDataDbAdapter - - -__all__ = [ - "ExperimentDbAdapter", - "ExperimentDetailsDbAdapter", - "ExperimentStatusDbAdapter", - "ExperimentRunDbAdapter", - "JobDataDbAdapter", - "ExpGraphDrawDBAdapter", - "ExperimentJoinDbAdapter", - "JobPackagesDbAdapter", - "WrapperJobPackagesDbAdapter", -] diff --git a/autosubmit_api/database/common.py b/autosubmit_api/database/common.py index 1d1ceb7..14863a1 100644 --- a/autosubmit_api/database/common.py +++ b/autosubmit_api/database/common.py @@ -24,7 +24,7 @@ class AttachedDatabaseConnBuilder(BaseBuilder): """ SQLite utility to build attached databases. - MUST BE USED ONLY FOR ADAPTERS and TESTS. + MUST BE USED ONLY FOR DATABASE MODULE and TESTS. """ def __init__(self) -> None: @@ -55,7 +55,7 @@ def create_main_db_conn() -> Connection: """ Connection with the autosubmit and as_times DDBB. - MUST BE USED ONLY FOR ADAPTERS and TESTS. + MUST BE USED ONLY FOR DATABASE MODULE and TESTS. """ APIBasicConfig.read() if APIBasicConfig.DATABASE_BACKEND == "postgres": @@ -71,7 +71,7 @@ def create_autosubmit_db_engine() -> Engine: """ Create an engine for the autosubmit DDBB. Usually named autosubmit.db - MUST BE USED ONLY FOR ADAPTERS and TESTS. + MUST BE USED ONLY FOR DATABASE MODULE and TESTS. """ APIBasicConfig.read() if APIBasicConfig.DATABASE_BACKEND == "postgres": @@ -85,7 +85,7 @@ def create_as_times_db_engine() -> Engine: """ Create an engine for the AS_TIMES DDBB. Usually named as_times.db - MUST BE USED ONLY FOR ADAPTERS and TESTS. + MUST BE USED ONLY FOR DATABASE MODULE and TESTS. """ APIBasicConfig.read() diff --git a/autosubmit_api/database/repositories/__init__.py b/autosubmit_api/database/repositories/__init__.py new file mode 100644 index 0000000..b70ca2d --- /dev/null +++ b/autosubmit_api/database/repositories/__init__.py @@ -0,0 +1,38 @@ +""" +This module contains the repositories for the database tables. + +The repositories are used to interact with the database tables delegating the SQL statements generation and execution order. + +Other modules can use the repositories to interact with the database tables without the need to know the SQL syntax. +""" + +from autosubmit_api.database.repositories.experiment import ExperimentDbRepository +from autosubmit_api.database.repositories.experiment_details import ( + ExperimentDetailsDbRepository, +) +from autosubmit_api.database.repositories.experiment_status import ( + ExperimentStatusDbRepository, +) +from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository +from autosubmit_api.database.repositories.join.experiment_join import ( + ExperimentJoinDbRepository, +) +from autosubmit_api.database.repositories.job_packages import ( + JobPackagesDbRepository, + WrapperJobPackagesDbRepository, +) +from autosubmit_api.database.repositories.experiment_run import ExperimentRunDbRepository +from autosubmit_api.database.repositories.job_data import JobDataDbRepository + + +__all__ = [ + "ExperimentDbRepository", + "ExperimentDetailsDbRepository", + "ExperimentStatusDbRepository", + "ExperimentRunDbRepository", + "JobDataDbRepository", + "ExpGraphDrawDBRepository", + "ExperimentJoinDbRepository", + "JobPackagesDbRepository", + "WrapperJobPackagesDbRepository", +] diff --git a/autosubmit_api/database/adapters/experiment.py b/autosubmit_api/database/repositories/experiment.py similarity index 97% rename from autosubmit_api/database/adapters/experiment.py rename to autosubmit_api/database/repositories/experiment.py index d2c05ee..a55aaca 100644 --- a/autosubmit_api/database/adapters/experiment.py +++ b/autosubmit_api/database/repositories/experiment.py @@ -5,7 +5,7 @@ from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables -class ExperimentDbAdapter: +class ExperimentDbRepository: def __init__(self): self.table_manager = create_db_table_manager( table=tables.ExperimentTable, diff --git a/autosubmit_api/database/adapters/experiment_details.py b/autosubmit_api/database/repositories/experiment_details.py similarity index 97% rename from autosubmit_api/database/adapters/experiment_details.py rename to autosubmit_api/database/repositories/experiment_details.py index 8f6ce63..950d7f7 100644 --- a/autosubmit_api/database/adapters/experiment_details.py +++ b/autosubmit_api/database/repositories/experiment_details.py @@ -4,7 +4,7 @@ from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables -class ExperimentDetailsDbAdapter: +class ExperimentDetailsDbRepository: def __init__(self) -> None: APIBasicConfig.read() self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/experiment_run.py b/autosubmit_api/database/repositories/experiment_run.py similarity index 98% rename from autosubmit_api/database/adapters/experiment_run.py rename to autosubmit_api/database/repositories/experiment_run.py index 17717dd..1ff12b7 100644 --- a/autosubmit_api/database/adapters/experiment_run.py +++ b/autosubmit_api/database/repositories/experiment_run.py @@ -6,7 +6,7 @@ from autosubmit_api.database import tables from autosubmit_api.persistance.experiment import ExperimentPaths -class ExperimentRunDbAdapter: +class ExperimentRunDbRepository: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/experiment_status.py b/autosubmit_api/database/repositories/experiment_status.py similarity index 98% rename from autosubmit_api/database/adapters/experiment_status.py rename to autosubmit_api/database/repositories/experiment_status.py index b91d783..4d22d65 100644 --- a/autosubmit_api/database/adapters/experiment_status.py +++ b/autosubmit_api/database/repositories/experiment_status.py @@ -7,7 +7,7 @@ from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables -class ExperimentStatusDbAdapter: +class ExperimentStatusDbRepository: def __init__(self) -> None: APIBasicConfig.read() self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/graph_draw.py b/autosubmit_api/database/repositories/graph_draw.py similarity index 97% rename from autosubmit_api/database/adapters/graph_draw.py rename to autosubmit_api/database/repositories/graph_draw.py index 691b7fb..a1b396a 100644 --- a/autosubmit_api/database/adapters/graph_draw.py +++ b/autosubmit_api/database/repositories/graph_draw.py @@ -4,7 +4,7 @@ from autosubmit_api.persistance.experiment import ExperimentPaths from typing import Any, Dict, List -class ExpGraphDrawDBAdapter: +class ExpGraphDrawDBRepository: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/job_data.py b/autosubmit_api/database/repositories/job_data.py similarity index 98% rename from autosubmit_api/database/adapters/job_data.py rename to autosubmit_api/database/repositories/job_data.py index 8e5266b..f71f56e 100644 --- a/autosubmit_api/database/adapters/job_data.py +++ b/autosubmit_api/database/repositories/job_data.py @@ -6,7 +6,7 @@ from autosubmit_api.database import tables from autosubmit_api.persistance.experiment import ExperimentPaths -class JobDataDbAdapter: +class JobDataDbRepository: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/job_packages.py b/autosubmit_api/database/repositories/job_packages.py similarity index 95% rename from autosubmit_api/database/adapters/job_packages.py rename to autosubmit_api/database/repositories/job_packages.py index 46c4d6b..02d343b 100644 --- a/autosubmit_api/database/adapters/job_packages.py +++ b/autosubmit_api/database/repositories/job_packages.py @@ -6,7 +6,7 @@ from autosubmit_api.database import tables from autosubmit_api.persistance.experiment import ExperimentPaths -class JobPackagesDbAdapter: +class JobPackagesDbRepository: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( @@ -24,7 +24,7 @@ class JobPackagesDbAdapter: return [row._asdict() for row in rows] -class WrapperJobPackagesDbAdapter: +class WrapperJobPackagesDbRepository: def __init__(self, expid: str) -> None: self.expid = expid self.table_manager = create_db_table_manager( diff --git a/autosubmit_api/database/adapters/join/__init__.py b/autosubmit_api/database/repositories/join/__init__.py similarity index 100% rename from autosubmit_api/database/adapters/join/__init__.py rename to autosubmit_api/database/repositories/join/__init__.py diff --git a/autosubmit_api/database/adapters/join/experiment_join.py b/autosubmit_api/database/repositories/join/experiment_join.py similarity index 97% rename from autosubmit_api/database/adapters/join/experiment_join.py rename to autosubmit_api/database/repositories/join/experiment_join.py index 2fd92db..3b68ad8 100644 --- a/autosubmit_api/database/adapters/join/experiment_join.py +++ b/autosubmit_api/database/repositories/join/experiment_join.py @@ -91,9 +91,9 @@ def generate_query_listexp_extended( return statement -class ExperimentJoinDbAdapter: +class ExperimentJoinDbRepository: """ - Adapter for experiments using Experiment, ExperimentStatus and ExperimentDetails tables. + View experiments using Experiment, ExperimentStatus and ExperimentDetails tables. """ def _get_connection(self): diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index 9c0ea7f..5d11f75 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -33,9 +33,9 @@ from collections import deque from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer from autosubmit_api.components.jobs.job_factory import SimpleJob from autosubmit_api.config.confConfigStrategy import confConfigStrategy -from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter -from autosubmit_api.database.adapters.experiment_status import ExperimentStatusDbAdapter -from autosubmit_api.database.adapters import ExperimentJoinDbAdapter +from autosubmit_api.database.repositories.experiment import ExperimentDbRepository +from autosubmit_api.database.repositories.experiment_status import ExperimentStatusDbRepository +from autosubmit_api.database.repositories import ExperimentJoinDbRepository from autosubmit_api.experiment import common_db_requests as DbRequests from autosubmit_api.components import jobdata as JobData from autosubmit_api.common import utils as common_utils @@ -154,7 +154,7 @@ def get_experiment_data(expid): try: autosubmit_config_facade = ConfigurationFacadeDirector(AutosubmitConfigurationFacadeBuilder(expid)).build_autosubmit_configuration_facade() try: - experiment_status = ExperimentStatusDbAdapter().get_status(expid) + experiment_status = ExperimentStatusDbRepository().get_status(expid) result["running"] = (experiment_status == "RUNNING") except Exception as exc: logger.warning((traceback.format_exc())) @@ -167,7 +167,7 @@ def get_experiment_data(expid): result["time_last_access"] = autosubmit_config_facade.get_experiment_last_access_time_as_datetime() result["time_last_mod"] = autosubmit_config_facade.get_experiment_last_modified_time_as_datetime() try: - result["description"] = ExperimentDbAdapter().get_by_expid(expid).get("description", "NA") + result["description"] = ExperimentDbRepository().get_by_expid(expid).get("description", "NA") except Exception: result["description"] = "NA" result["version"] = autosubmit_config_facade.get_autosubmit_version() @@ -469,7 +469,7 @@ def quick_test_run(expid): error_message = "" try: - status = ExperimentStatusDbAdapter().get_status(expid) + status = ExperimentStatusDbRepository().get_status(expid) if status != "RUNNING": running = False except Exception as exp: @@ -766,7 +766,7 @@ def get_experiment_tree_structured(expid, log): APIBasicConfig.read() # TODO: Encapsulate this following 2 lines or move to the parent function in app. - curr_exp_as_version: str = ExperimentDbAdapter().get_by_expid(expid).get("autosubmit_version") + curr_exp_as_version: str = ExperimentDbRepository().get_by_expid(expid).get("autosubmit_version") main, secondary = common_utils.parse_version_number(curr_exp_as_version) if main and main >= 4: as_conf = Autosubmit4Config(expid) @@ -1282,7 +1282,7 @@ def search_experiment_by_id( :rtype: JSON """ result = list() - query_result, _ = ExperimentJoinDbAdapter().search( + query_result, _ = ExperimentJoinDbRepository().search( query=query, exp_type=exp_type, only_active=only_active, owner=owner ) @@ -1366,7 +1366,7 @@ def get_current_running_exp(): :rtype: list of users """ result = list() - query_result, _ = ExperimentJoinDbAdapter().search(only_active=True) + query_result, _ = ExperimentJoinDbRepository().search(only_active=True) for row in query_result: expid = str(row["name"]) diff --git a/autosubmit_api/history/database_managers/experiment_history_db_manager.py b/autosubmit_api/history/database_managers/experiment_history_db_manager.py index db3616a..e72731d 100644 --- a/autosubmit_api/history/database_managers/experiment_history_db_manager.py +++ b/autosubmit_api/history/database_managers/experiment_history_db_manager.py @@ -18,7 +18,7 @@ import os from typing import Any, Dict, List, Optional -from autosubmit_api.database.adapters import ExperimentRunDbAdapter, JobDataDbAdapter +from autosubmit_api.database.repositories import ExperimentRunDbRepository, JobDataDbRepository from autosubmit_api.persistance.experiment import ExperimentPaths from autosubmit_api.history.database_managers import database_models as Models from autosubmit_api.history.data_classes.job_data import JobData @@ -31,8 +31,8 @@ class ExperimentHistoryDbManager: def __init__(self, expid: str): """Requires expid""" self.expid = expid - self.run_db = ExperimentRunDbAdapter(expid) - self.job_data_db = JobDataDbAdapter(expid) + self.run_db = ExperimentRunDbRepository(expid) + self.job_data_db = JobDataDbRepository(expid) self.historicaldb_file_path = ExperimentPaths(expid).job_data_db def my_database_exists(self) -> bool: diff --git a/autosubmit_api/persistance/job_package_reader.py b/autosubmit_api/persistance/job_package_reader.py index e295b33..4846cfa 100644 --- a/autosubmit_api/persistance/job_package_reader.py +++ b/autosubmit_api/persistance/job_package_reader.py @@ -1,7 +1,7 @@ from typing import Dict, List -from autosubmit_api.database.adapters import ( - JobPackagesDbAdapter, - WrapperJobPackagesDbAdapter, +from autosubmit_api.database.repositories import ( + JobPackagesDbRepository, + WrapperJobPackagesDbRepository, ) from autosubmit_api.logger import logger @@ -17,12 +17,12 @@ class JobPackageReader: def read(self): try: - self._content = JobPackagesDbAdapter(self.expid).get_all() + self._content = JobPackagesDbRepository(self.expid).get_all() if len(self._content) == 0: raise Warning("job_packages table empty, trying wrapper_job_packages") except Exception as exc: logger.warning(exc) - self._content = WrapperJobPackagesDbAdapter(self.expid).get_all() + self._content = WrapperJobPackagesDbRepository(self.expid).get_all() self._build_job_to_package() self._build_package_to_jobs() diff --git a/autosubmit_api/views/v4.py b/autosubmit_api/views/v4.py index 6e35b5b..a3f4842 100644 --- a/autosubmit_api/views/v4.py +++ b/autosubmit_api/views/v4.py @@ -18,7 +18,7 @@ from autosubmit_api.builders.experiment_history_builder import ( ) from autosubmit_api.common.utils import Status from autosubmit_api.database import tables -from autosubmit_api.database.adapters import ExperimentJoinDbAdapter +from autosubmit_api.database.repositories import ExperimentJoinDbRepository from autosubmit_api.logger import logger, with_log_run_times from cas import CASClient from autosubmit_api import config @@ -231,7 +231,7 @@ class ExperimentView(MethodView): return {"error": {"message": "Invalid params"}}, HTTPStatus.BAD_REQUEST # Query - query_result, total_rows = ExperimentJoinDbAdapter().search( + query_result, total_rows = ExperimentJoinDbRepository().search( query=query, only_active=only_active, owner=owner, diff --git a/autosubmit_api/workers/business/process_graph_drawings.py b/autosubmit_api/workers/business/process_graph_drawings.py index 1b095d1..c284963 100644 --- a/autosubmit_api/workers/business/process_graph_drawings.py +++ b/autosubmit_api/workers/business/process_graph_drawings.py @@ -1,6 +1,6 @@ import time import traceback -from autosubmit_api.database.adapters import ExperimentStatusDbAdapter +from autosubmit_api.database.repositories import ExperimentStatusDbRepository from autosubmit_api.common import utils as common_utils from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing from autosubmit_api.builders.configuration_facade_builder import ( @@ -19,7 +19,7 @@ def process_active_graphs(): Process the list of active experiments to generate the positioning of their graphs """ try: - active_experiments = ExperimentStatusDbAdapter().get_only_running_expids() + active_experiments = ExperimentStatusDbRepository().get_only_running_expids() for expid in active_experiments: try: diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index b4f0b7f..49304ef 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -1,6 +1,6 @@ -from autosubmit_api.database.adapters import ( - ExperimentDetailsDbAdapter, - ExperimentDbAdapter, +from autosubmit_api.database.repositories import ( + ExperimentDetailsDbRepository, + ExperimentDbRepository, ) from autosubmit_api.logger import logger from autosubmit_api.builders.configuration_facade_builder import ( @@ -21,8 +21,8 @@ Experiment = namedtuple("Experiment", ["id", "name"]) class DetailsProcessor: def __init__(self, basic_config: APIBasicConfig): self.basic_config = basic_config - self.experiment_db = ExperimentDbAdapter() - self.details_db = ExperimentDetailsDbAdapter() + self.experiment_db = ExperimentDbRepository() + self.details_db = ExperimentDetailsDbRepository() def process(self): new_details = self._get_all_details() diff --git a/tests/test_db_adapters.py b/tests/test_db_repositories.py similarity index 70% rename from tests/test_db_adapters.py rename to tests/test_db_repositories.py index d5537b0..a4bf3d0 100644 --- a/tests/test_db_adapters.py +++ b/tests/test_db_repositories.py @@ -1,13 +1,13 @@ -from autosubmit_api.database.adapters.experiment import ExperimentDbAdapter -from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter -from autosubmit_api.database.adapters.join.experiment_join import ( - ExperimentJoinDbAdapter, +from autosubmit_api.database.repositories.experiment import ExperimentDbRepository +from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository +from autosubmit_api.database.repositories.join.experiment_join import ( + ExperimentJoinDbRepository, ) -class TestExperimentDbAdapter: +class TestExperimentDbRepository: def test_operations(self, fixture_mock_basic_config): - experiment_db = ExperimentDbAdapter() + experiment_db = ExperimentDbRepository() # Check get_all rows = experiment_db.get_all() @@ -20,10 +20,10 @@ class TestExperimentDbAdapter: assert row["name"] == "a003" -class TestExpGraphDrawDBAdapter: +class TestExpGraphDrawDBRepository: def test_operations(self, fixture_mock_basic_config): expid = "g001" - graph_draw_db = ExpGraphDrawDBAdapter(expid) + graph_draw_db = ExpGraphDrawDBRepository(expid) # Create table graph_draw_db.create_table() @@ -48,9 +48,9 @@ class TestExpGraphDrawDBAdapter: assert graph_draw_db.get_all() == [] -class TestExperimentJoinDbAdapter: +class TestExperimentJoinDbRepository: def test_search(self, fixture_mock_basic_config): - experiment_join_db = ExperimentJoinDbAdapter() + experiment_join_db = ExperimentJoinDbRepository() # Check search rows, total = experiment_join_db.search(limit=3) diff --git a/tests/test_graph.py b/tests/test_graph.py index 842764d..55f185f 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -8,7 +8,7 @@ from autosubmit_api.builders.joblist_loader_builder import ( JobListLoaderDirector, ) from autosubmit_api.components.experiment.graph_drawer import ExperimentGraphDrawing -from autosubmit_api.database.adapters.graph_draw import ExpGraphDrawDBAdapter +from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository from autosubmit_api.monitor.monitor import Monitor @@ -44,8 +44,8 @@ class TestPopulateDB: AutosubmitConfigurationFacadeBuilder(expid) ).build_autosubmit_configuration_facade() - # Create adapter - graph_draw_db = ExpGraphDrawDBAdapter(expid) + # Create repository handler + graph_draw_db = ExpGraphDrawDBRepository(expid) # Delete content of table graph_draw_db.delete_all() -- GitLab From 34ad9284f4f895f69cd61c94a2272e0e9061a03c Mon Sep 17 00:00:00 2001 From: ltenorio Date: Mon, 8 Jul 2024 17:34:55 +0200 Subject: [PATCH 26/26] replace db_structure handler --- .../autosubmit_legacy/job/job_list.py | 4 +- .../components/jobs/joblist_loader.py | 5 ++- .../database/repositories/__init__.py | 8 +++- .../repositories/experiment_structure.py | 37 +++++++++++++++++++ .../database/repositories/job_data.py | 2 + autosubmit_api/database/table_manager.py | 3 +- autosubmit_api/database/tables.py | 28 ++++++++++++-- tests/test_db_repositories.py | 21 +++++++++++ tests/test_fixtures.py | 2 + 9 files changed, 101 insertions(+), 9 deletions(-) create mode 100644 autosubmit_api/database/repositories/experiment_structure.py diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py index 2ea1cbe..b153650 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_list.py +++ b/autosubmit_api/autosubmit_legacy/job/job_list.py @@ -31,13 +31,13 @@ from dateutil.relativedelta import * from autosubmit_api.autosubmit_legacy.job.job_utils import SubJob from autosubmit_api.autosubmit_legacy.job.job_utils import SubJobManager, job_times_to_text from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database.repositories import ExperimentStructureDbRepository from autosubmit_api.performance.utils import calculate_ASYPD_perjob, calculate_SYPD_perjob from autosubmit_api.components.jobs import utils as JUtils from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.common.utils import Status from bscearth.utils.date import date2str, parse_date # from autosubmit_legacy.job.tree import Tree -from autosubmit.database import db_structure as DbStructure from autosubmit_api.components.jobdata import JobDataStructure, JobRow from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder from autosubmit_api.history.data_classes.job_data import JobData @@ -606,7 +606,7 @@ class JobList: # Get structure if there are packages because package require special time calculation # print("Get Structure") if (job_to_package): - current_table_structure = DbStructure.get_structure(expid, path_structure) + current_table_structure = ExperimentStructureDbRepository(expid).get_structure() # Main loop # print("Start main loop") for job in allJobs: diff --git a/autosubmit_api/components/jobs/joblist_loader.py b/autosubmit_api/components/jobs/joblist_loader.py index e9923d6..7dcd65f 100644 --- a/autosubmit_api/components/jobs/joblist_loader.py +++ b/autosubmit_api/components/jobs/joblist_loader.py @@ -4,13 +4,14 @@ import os from fnmatch import fnmatch from autosubmit_api.components.jobs.joblist_helper import JobListHelper from autosubmit_api.components.jobs.job_factory import StandardJob, Job -from autosubmit.database import db_structure from autosubmit_api.common.utils import Status from bscearth.utils.date import date2str from typing import Dict, List, Set # Builder Imports import logging +from autosubmit_api.database.repositories import ExperimentStructureDbRepository + logger = logging.getLogger('gunicorn.error') @@ -144,7 +145,7 @@ class JobListLoader(object): self._job_dictionary[job.name] = job def load_existing_structure_adjacency(self): - self._structure_adjacency = db_structure.get_structure(self.expid, self.configuration_facade.structures_path) + self._structure_adjacency = ExperimentStructureDbRepository(self.expid).get_structure() def distribute_adjacency_into_jobs(self): parents_adjacency = {} diff --git a/autosubmit_api/database/repositories/__init__.py b/autosubmit_api/database/repositories/__init__.py index b70ca2d..8e772bb 100644 --- a/autosubmit_api/database/repositories/__init__.py +++ b/autosubmit_api/database/repositories/__init__.py @@ -13,6 +13,9 @@ from autosubmit_api.database.repositories.experiment_details import ( from autosubmit_api.database.repositories.experiment_status import ( ExperimentStatusDbRepository, ) +from autosubmit_api.database.repositories.experiment_structure import ( + ExperimentStructureDbRepository, +) from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository from autosubmit_api.database.repositories.join.experiment_join import ( ExperimentJoinDbRepository, @@ -21,7 +24,9 @@ from autosubmit_api.database.repositories.job_packages import ( JobPackagesDbRepository, WrapperJobPackagesDbRepository, ) -from autosubmit_api.database.repositories.experiment_run import ExperimentRunDbRepository +from autosubmit_api.database.repositories.experiment_run import ( + ExperimentRunDbRepository, +) from autosubmit_api.database.repositories.job_data import JobDataDbRepository @@ -29,6 +34,7 @@ __all__ = [ "ExperimentDbRepository", "ExperimentDetailsDbRepository", "ExperimentStatusDbRepository", + "ExperimentStructureDbRepository", "ExperimentRunDbRepository", "JobDataDbRepository", "ExpGraphDrawDBRepository", diff --git a/autosubmit_api/database/repositories/experiment_structure.py b/autosubmit_api/database/repositories/experiment_structure.py new file mode 100644 index 0000000..a587861 --- /dev/null +++ b/autosubmit_api/database/repositories/experiment_structure.py @@ -0,0 +1,37 @@ +from typing import Dict, List +from sqlalchemy import select +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.database import tables +from autosubmit_api.database.table_manager import create_db_table_manager +from autosubmit_api.persistance.experiment import ExperimentPaths + + + +class ExperimentStructureDbRepository: + def __init__(self, expid: str): + APIBasicConfig.read() + self.table_manager = create_db_table_manager( + table=tables.ExperimentStructureTable, + db_filepath=ExperimentPaths(expid).structure_db, + schema=expid, + ) + # with self.table_manager.get_connection() as conn: + # self.table_manager.create_table(conn) + + def get_structure(self): + structure: Dict[str, List[str]] = {} + + with self.table_manager.get_connection() as conn: + rows = conn.execute( + select(self.table_manager.table) + ).all() + + for row in rows: + edge = row._asdict() + _from, _to = edge.get("e_from"), edge.get("e_to") + + structure.setdefault(_from, []).append(_to) + structure.setdefault(_to, []) + + return structure + diff --git a/autosubmit_api/database/repositories/job_data.py b/autosubmit_api/database/repositories/job_data.py index f71f56e..babcf4a 100644 --- a/autosubmit_api/database/repositories/job_data.py +++ b/autosubmit_api/database/repositories/job_data.py @@ -15,6 +15,8 @@ class JobDataDbRepository: schema=expid, ) self.table = self.table_manager.table + with self.table_manager.get_connection() as conn: + self.table_manager.create_table(conn) def get_last_job_data_by_run_id(self, run_id: int) -> List[Dict[str, Any]]: """ diff --git a/autosubmit_api/database/table_manager.py b/autosubmit_api/database/table_manager.py index e433248..8c245cf 100644 --- a/autosubmit_api/database/table_manager.py +++ b/autosubmit_api/database/table_manager.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +import os from typing import Any, Dict, List, Optional, Type, Union from sqlalchemy import Connection, Engine, Table, delete, insert, select from sqlalchemy.schema import CreateTable, CreateSchema, DropTable @@ -91,7 +92,7 @@ class PostgresDbTableManager(DbTableManager): super().__init__(table, db_filepath, schema) self.engine = session.Session().bind if schema: - self.table = tables._table_change_schema(schema, table) + self.table = tables.table_change_schema(schema, table) def create_table(self, conn: Connection): """ diff --git a/autosubmit_api/database/tables.py b/autosubmit_api/database/tables.py index da9679f..1513cd7 100644 --- a/autosubmit_api/database/tables.py +++ b/autosubmit_api/database/tables.py @@ -1,7 +1,8 @@ -from sqlalchemy import Column, Integer, Text, Table +from typing import Type, Union +from sqlalchemy import Column, Integer, MetaData, Text, Table +from sqlalchemy.orm import DeclarativeBase from autosubmit.database.tables import ( metadata_obj, - _table_change_schema, ExperimentTable, ExperimentStructureTable, ExperimentStatusTable, @@ -11,7 +12,28 @@ from autosubmit.database.tables import ( JobDataTable, ) -table_change_schema = _table_change_schema + +def table_change_schema( + schema: str, source: Union[Type[DeclarativeBase], Table] +) -> Table: + """ + Copy the source table and change the schema of that SQLAlchemy table into a new table instance + """ + if isinstance(source, type) and issubclass(source, DeclarativeBase): + _source_table: Table = source.__table__ + elif isinstance(source, Table): + _source_table = source + else: + raise RuntimeError("Invalid source type on table schema change") + + metadata = MetaData(schema=schema) + dest_table = Table(_source_table.name, metadata) + + for col in _source_table.columns: + dest_table.append_column(col.copy()) + + return dest_table + ## API extended SQLAlchemy Core tables diff --git a/tests/test_db_repositories.py b/tests/test_db_repositories.py index a4bf3d0..ff6eb70 100644 --- a/tests/test_db_repositories.py +++ b/tests/test_db_repositories.py @@ -1,4 +1,7 @@ from autosubmit_api.database.repositories.experiment import ExperimentDbRepository +from autosubmit_api.database.repositories.experiment_structure import ( + ExperimentStructureDbRepository, +) from autosubmit_api.database.repositories.graph_draw import ExpGraphDrawDBRepository from autosubmit_api.database.repositories.join.experiment_join import ( ExperimentJoinDbRepository, @@ -59,3 +62,21 @@ class TestExperimentJoinDbRepository: for row in rows: assert row.get("status") + + +class TestExperimentStructureDbRepository: + def test_get(self, fixture_mock_basic_config): + structure_db = ExperimentStructureDbRepository("a007") + + # Check get_structure + structure = structure_db.get_structure() + assert sorted(structure) == sorted({ + "a007_20000101_fc0_1_SIM": ["a007_20000101_fc0_2_SIM"], + "a007_20000101_fc0_2_SIM": ["a007_POST"], + "a007_20000101_fc0_INI": ["a007_20000101_fc0_1_SIM"], + "a007_20000101_fc0_TRANSFER": [], + "a007_CLEAN": ["a007_20000101_fc0_TRANSFER"], + "a007_LOCAL_SETUP": ["a007_REMOTE_SETUP"], + "a007_POST": ["a007_CLEAN"], + "a007_REMOTE_SETUP": ["a007_20000101_fc0_INI"], + }) diff --git a/tests/test_fixtures.py b/tests/test_fixtures.py index 7021750..e2d9b2b 100644 --- a/tests/test_fixtures.py +++ b/tests/test_fixtures.py @@ -1,5 +1,6 @@ import os from typing import Tuple +import pytest from sqlalchemy import Engine, select from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.database import tables @@ -54,6 +55,7 @@ class TestSQLiteFixtures: assert "backend = sqlite" in content +@pytest.mark.pg class TestPostgresFixtures: def test_fixture_temp_dir_copy_exclude_db( self, fixture_temp_dir_copy_exclude_db: str -- GitLab