From 39c7f096881efafa9e9836882b3414ca49aa0bb6 Mon Sep 17 00:00:00 2001 From: ltenorio Date: Fri, 30 Aug 2024 16:23:40 +0200 Subject: [PATCH 1/2] update typing hints --- .../autosubmit_legacy/job/job_list.py | 18 +- .../autosubmit_legacy/job/job_utils.py | 6 +- autosubmit_api/builders/basic_builder.py | 13 +- .../builders/configuration_facade_builder.py | 41 ++--- .../builders/experiment_history_builder.py | 38 ++-- .../builders/joblist_helper_builder.py | 31 ++-- .../builders/joblist_loader_builder.py | 19 +- .../builders/pkl_organizer_builder.py | 29 ++- autosubmit_api/common/utils.py | 24 +-- .../experiment/configuration_facade.py | 122 +++++------- .../components/experiment/pkl_organizer.py | 42 ++--- autosubmit_api/components/jobs/job_factory.py | 173 +++++++----------- autosubmit_api/components/jobs/job_support.py | 27 +-- .../components/jobs/joblist_helper.py | 42 ++--- .../components/jobs/joblist_loader.py | 45 ++--- autosubmit_api/components/jobs/utils.py | 47 ++--- .../components/representations/graph/edge.py | 12 +- .../components/representations/graph/graph.py | 72 +++----- .../components/representations/graph/test.py | 6 +- .../components/representations/tree/tree.py | 42 ++--- autosubmit_api/config/IConfigStrategy.py | 9 +- autosubmit_api/config/confConfigStrategy.py | 24 +-- autosubmit_api/config/config_common.py | 13 +- autosubmit_api/database/db_common.py | 3 +- .../experiment/common_db_requests.py | 3 +- autosubmit_api/experiment/common_requests.py | 9 +- .../history/data_classes/experiment_run.py | 26 ++- .../history/data_classes/job_data.py | 18 +- .../database_managers/database_manager.py | 37 ++-- .../database_managers/database_models.py | 9 +- .../experiment_history_db_manager.py | 45 ++--- autosubmit_api/history/experiment_history.py | 12 +- autosubmit_api/history/internal_logging.py | 7 +- autosubmit_api/history/utils.py | 23 +-- .../performance/performance_metrics.py | 36 ++-- autosubmit_api/performance/utils.py | 8 +- autosubmit_api/statistics/job_stat.py | 9 +- autosubmit_api/statistics/statistics.py | 55 +++--- autosubmit_api/statistics/stats_summary.py | 2 +- autosubmit_api/statistics/utils.py | 3 +- .../business/process_graph_drawings.py | 5 +- .../workers/populate_details/populate.py | 1 - 42 files changed, 475 insertions(+), 731 deletions(-) diff --git a/autosubmit_api/autosubmit_legacy/job/job_list.py b/autosubmit_api/autosubmit_legacy/job/job_list.py index cc6e085..c3110f4 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_list.py +++ b/autosubmit_api/autosubmit_legacy/job/job_list.py @@ -42,7 +42,7 @@ from autosubmit_api.database.db_jobdata import JobDataStructure, JobRow from autosubmit_api.builders.experiment_history_builder import ExperimentHistoryDirector, ExperimentHistoryBuilder from autosubmit_api.history.data_classes.job_data import JobData -from typing import List, Dict, Tuple +from typing import List, Dict, Optional, Tuple from autosubmit_api.persistance.experiment import ExperimentPaths @@ -631,8 +631,9 @@ class JobList: return (job_running_time_seconds, job_running_to_runtext, []) @staticmethod - def _job_running_check(status_code, name, tmp_path): - # type: (int, str, str) -> Tuple[datetime.datetime, datetime.datetime, datetime.datetime, str] + def _job_running_check( + status_code: int, name: str, tmp_path: str + ) -> Tuple[datetime.datetime, datetime.datetime, datetime.datetime, str]: """ Receives job data and returns the data from its TOTAL_STATS file in an ordered way. :param status_code: Status of job @@ -701,8 +702,15 @@ class JobList: return (submit_time, start_time, finish_time, current_status) @staticmethod - def retrieve_times(status_code, name, tmp_path, make_exception=False, job_times=None, seconds=False, job_data_collection=None): - # type: (int, str, str, bool, Dict[str, Tuple[int, int, int, int, int]], bool, List[JobData]) -> JobRow + def retrieve_times( + status_code: int, + name: str, + tmp_path: str, + make_exception: bool = False, + job_times: Optional[Dict[str, Tuple[int, int, int, int, int]]] = None, + seconds: bool = False, + job_data_collection: Optional[List[JobData]] = None, + ) -> JobRow: """ Retrieve job timestamps from database. :param status_code: Code of the Status of the job diff --git a/autosubmit_api/autosubmit_legacy/job/job_utils.py b/autosubmit_api/autosubmit_legacy/job/job_utils.py index 241dd94..a32a1eb 100644 --- a/autosubmit_api/autosubmit_legacy/job/job_utils.py +++ b/autosubmit_api/autosubmit_legacy/job/job_utils.py @@ -270,8 +270,7 @@ def job_times_to_text(minutes_queue, minutes_running, status): return running_text -def datechunk_to_year(chunk_unit, chunk_size): - # type: (str, int) -> float +def datechunk_to_year(chunk_unit: str, chunk_size: int) -> float: """ Gets chunk unit and size and returns the value in years @@ -292,8 +291,7 @@ def datechunk_to_year(chunk_unit, chunk_size): return 0.0 -def tostamp(string_date): - # type: (str) -> int +def tostamp(string_date: str) -> int: """ String datetime to timestamp """ diff --git a/autosubmit_api/builders/basic_builder.py b/autosubmit_api/builders/basic_builder.py index 01f473c..d2e05b2 100644 --- a/autosubmit_api/builders/basic_builder.py +++ b/autosubmit_api/builders/basic_builder.py @@ -1,21 +1,18 @@ #!/usr/bin/env python -from ..config.basicConfig import APIBasicConfig +from autosubmit_api.config.basicConfig import APIBasicConfig from abc import ABCMeta class BasicBuilder(metaclass=ABCMeta): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str) -> None: self.expid = expid - def set_basic_config(self, basic_config): - # type: (APIBasicConfig) -> None + def set_basic_config(self, basic_config: APIBasicConfig) -> None: self.basic_config = basic_config - def generate_basic_config(self): - # type: () -> None + def generate_basic_config(self) -> None: APIBasicConfig.read() self.basic_config = APIBasicConfig - def _validate_basic_config(self): + def _validate_basic_config(self) -> None: if not self.basic_config: raise Exception("BasicConfig is missing.") \ No newline at end of file diff --git a/autosubmit_api/builders/configuration_facade_builder.py b/autosubmit_api/builders/configuration_facade_builder.py index 4aedeec..1dbc3d2 100644 --- a/autosubmit_api/builders/configuration_facade_builder.py +++ b/autosubmit_api/builders/configuration_facade_builder.py @@ -1,51 +1,45 @@ #!/usr/bin/env python -from ..config.basicConfig import APIBasicConfig -from ..config.config_common import AutosubmitConfigResolver -from .basic_builder import BasicBuilder -from ..components.experiment.configuration_facade import AutosubmitConfigurationFacade, BasicConfigurationFacade, ConfigurationFacade +from typing import Optional +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.config.config_common import AutosubmitConfigResolver +from autosubmit_api.builders.basic_builder import BasicBuilder +from autosubmit_api.components.experiment.configuration_facade import AutosubmitConfigurationFacade, BasicConfigurationFacade, ConfigurationFacade from bscearth.utils.config_parser import ConfigParserFactory from abc import ABCMeta, abstractmethod class Builder(BasicBuilder, metaclass=ABCMeta): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(Builder, self).__init__(expid) @abstractmethod def generate_autosubmit_config(self): - # type: () -> None pass @abstractmethod - def make_configuration_facade(self): - # type: () -> ConfigurationFacade + def make_configuration_facade(self) -> ConfigurationFacade: pass class BasicConfigurationBuilder(Builder): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(BasicConfigurationBuilder, self).__init__(expid) def generate_autosubmit_config(self): raise NotImplementedError - def make_configuration_facade(self): - # type: () -> ConfigurationFacade + def make_configuration_facade(self) -> ConfigurationFacade: if not self.basic_config: raise Exception("BasicConfig is missing.") return BasicConfigurationFacade(self.expid, self.basic_config) class AutosubmitConfigurationFacadeBuilder(Builder): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(AutosubmitConfigurationFacadeBuilder, self).__init__(expid) def generate_autosubmit_config(self): self._validate_basic_config() self.autosubmit_config = AutosubmitConfigResolver(self.expid, self.basic_config, ConfigParserFactory()) - def make_configuration_facade(self): - # type: () -> ConfigurationFacade + def make_configuration_facade(self) -> ConfigurationFacade: self._validate_basic_config() if not self.autosubmit_config: raise Exception("AutosubmitConfig is missing.") @@ -53,8 +47,7 @@ class AutosubmitConfigurationFacadeBuilder(Builder): class ConfigurationFacadeDirector(object): - def __init__(self, builder): - # type: (Builder) -> None + def __init__(self, builder: Builder): self.builder = builder def _set_basic_config(self, basic_config=None): @@ -63,13 +56,15 @@ class ConfigurationFacadeDirector(object): else: self.builder.generate_basic_config() - def build_basic_configuration_facade(self, basic_config=None): - # type: (APIBasicConfig) -> BasicConfigurationFacade + def build_basic_configuration_facade( + self, basic_config: Optional[APIBasicConfig] = None + ) -> BasicConfigurationFacade: self._set_basic_config(basic_config) return self.builder.make_configuration_facade() - def build_autosubmit_configuration_facade(self, basic_config=None): - # type: (APIBasicConfig) -> AutosubmitConfigurationFacade + def build_autosubmit_configuration_facade( + self, basic_config: Optional[APIBasicConfig] = None + ) -> AutosubmitConfigurationFacade: self._set_basic_config(basic_config) self.builder.generate_autosubmit_config() return self.builder.make_configuration_facade() \ No newline at end of file diff --git a/autosubmit_api/builders/experiment_history_builder.py b/autosubmit_api/builders/experiment_history_builder.py index dd13a67..eebf502 100644 --- a/autosubmit_api/builders/experiment_history_builder.py +++ b/autosubmit_api/builders/experiment_history_builder.py @@ -1,59 +1,50 @@ #!/usr/bin/python3.7 -from ..history.experiment_history import ExperimentHistory -from ..history.internal_logging import Logging -from ..config.basicConfig import APIBasicConfig -from ..history.database_managers.experiment_history_db_manager import ExperimentHistoryDbManager -from .basic_builder import BasicBuilder +from typing import Optional +from autosubmit_api.history.experiment_history import ExperimentHistory +from autosubmit_api.history.internal_logging import Logging +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.history.database_managers.experiment_history_db_manager import ExperimentHistoryDbManager +from autosubmit_api.builders.basic_builder import BasicBuilder from abc import ABCMeta, abstractmethod class Builder(BasicBuilder, metaclass=ABCMeta): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(Builder, self).__init__(expid) @abstractmethod def generate_experiment_history_db_manager(self): - # type: () -> None pass @abstractmethod def initialize_experiment_history_db_manager(self): - # type; () -> None pass @abstractmethod def generate_logger(self): - # type: () -> None pass @abstractmethod - def make_experiment_history(self): - # type: () -> ExperimentHistory + def make_experiment_history(self) -> ExperimentHistory: pass class ExperimentHistoryBuilder(Builder): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(ExperimentHistoryBuilder, self).__init__(expid) def generate_experiment_history_db_manager(self): - # type: () -> None self._validate_basic_config() self.experiment_history_db_manager = ExperimentHistoryDbManager(self.expid, self.basic_config) def initialize_experiment_history_db_manager(self): - # type: () -> None if not self.experiment_history_db_manager: raise Exception("Experiment Database Manager is missing") self.experiment_history_db_manager.initialize() def generate_logger(self): - # type: () -> None self._validate_basic_config() self.logger = Logging(self.expid, self.basic_config) - def make_experiment_history(self): - # type: () -> ExperimentHistory + def make_experiment_history(self) -> ExperimentHistory: self._validate_basic_config() if not self.experiment_history_db_manager: raise Exception("Experiment Database Manager is missing") @@ -65,12 +56,10 @@ class ExperimentHistoryBuilder(Builder): return ExperimentHistory(self.expid, self.basic_config, self.experiment_history_db_manager, self.logger) class ExperimentHistoryDirector(object): - def __init__(self, builder): - # type: (Builder) -> None + def __init__(self, builder: Builder): self.builder = builder - def build_current_experiment_history(self, basic_config=None): - # type: (APIBasicConfig) -> ExperimentHistory + def build_current_experiment_history(self, basic_config: Optional[APIBasicConfig] = None) -> ExperimentHistory: """ Builds ExperimentHistory updated to current version. """ if basic_config: self.builder.set_basic_config(basic_config) @@ -81,8 +70,7 @@ class ExperimentHistoryDirector(object): self.builder.generate_logger() return self.builder.make_experiment_history() - def build_reader_experiment_history(self, basic_config=None): - # type: (APIBasicConfig) -> ExperimentHistory + def build_reader_experiment_history(self, basic_config: Optional[APIBasicConfig] = None) -> ExperimentHistory: """ Buids ExperimentHistory that doesn't update to current version automatically. """ if basic_config: self.builder.set_basic_config(basic_config) diff --git a/autosubmit_api/builders/joblist_helper_builder.py b/autosubmit_api/builders/joblist_helper_builder.py index baaab3a..1641344 100644 --- a/autosubmit_api/builders/joblist_helper_builder.py +++ b/autosubmit_api/builders/joblist_helper_builder.py @@ -1,35 +1,31 @@ #!/usr/bin/env python -from ..config.basicConfig import APIBasicConfig -from .configuration_facade_builder import AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector -from .basic_builder import BasicBuilder -from .pkl_organizer_builder import PklOrganizerBuilder, PklOrganizerDirector -from ..components.jobs.joblist_helper import JobListHelper +from typing import Optional +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.builders.configuration_facade_builder import AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector +from autosubmit_api.builders.basic_builder import BasicBuilder +from autosubmit_api.builders.pkl_organizer_builder import PklOrganizerBuilder, PklOrganizerDirector +from autosubmit_api.components.jobs.joblist_helper import JobListHelper from abc import ABCMeta, abstractmethod class Builder(BasicBuilder, metaclass=ABCMeta): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(Builder, self).__init__(expid) @abstractmethod def generate_autosubmit_configuration_facade(self): - # type: () -> None pass @abstractmethod def generate_pkl_organizer(self): - # type: () -> None pass @abstractmethod - def make_joblist_helper(self): - # type: () -> JobListHelper + def make_joblist_helper(self) -> JobListHelper: pass class JobListHelperBuilder(Builder): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(JobListHelperBuilder, self).__init__(expid) def _validate_autosubmit_configuration_facade(self): @@ -48,16 +44,14 @@ class JobListHelperBuilder(Builder): self._validate_autosubmit_configuration_facade() self.pkl_organizer = PklOrganizerDirector(PklOrganizerBuilder(self.expid)).build_pkl_organizer_with_configuration_provided(self.configuration_facade) - def make_joblist_helper(self): - # type: () -> JobListHelper + def make_joblist_helper(self) -> JobListHelper: self._validate_basic_config() self._validate_autosubmit_configuration_facade() self._validate_pkl_organizer() return JobListHelper(self.expid, self.configuration_facade, self.pkl_organizer, self.basic_config) class JobListHelperDirector: - def __init__(self, builder): - # type: (Builder) -> None + def __init__(self, builder: Builder): self.builder = builder def _set_basic_config(self, basic_config=None): @@ -66,8 +60,7 @@ class JobListHelperDirector: else: self.builder.generate_basic_config() - def build_job_list_helper(self, basic_config=None): - # type: (APIBasicConfig) -> JobListHelper + def build_job_list_helper(self, basic_config: Optional[APIBasicConfig] = None) -> JobListHelper: self._set_basic_config(basic_config) self.builder.generate_autosubmit_configuration_facade() self.builder.generate_pkl_organizer() diff --git a/autosubmit_api/builders/joblist_loader_builder.py b/autosubmit_api/builders/joblist_loader_builder.py index c6ba450..4b9c91d 100644 --- a/autosubmit_api/builders/joblist_loader_builder.py +++ b/autosubmit_api/builders/joblist_loader_builder.py @@ -1,12 +1,12 @@ #!/usr/bin/env python -from ..config.basicConfig import APIBasicConfig -from .basic_builder import BasicBuilder -from ..components.jobs.joblist_loader import JobListLoader -from .joblist_helper_builder import JobListHelperBuilder, JobListHelperDirector +from typing import Optional +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.builders.basic_builder import BasicBuilder +from autosubmit_api.components.jobs.joblist_loader import JobListLoader +from autosubmit_api.builders.joblist_helper_builder import JobListHelperBuilder, JobListHelperDirector class JobListLoaderBuilder(BasicBuilder): def __init__(self, expid): - # type: (str) -> None super(JobListLoaderBuilder, self).__init__(expid) def generate_joblist_helper(self): @@ -17,15 +17,13 @@ class JobListLoaderBuilder(BasicBuilder): if not self.joblist_helper: raise Exception("JobListHelper is missing.") - def make_joblist_loader(self): - # type: () -> JobListLoader + def make_joblist_loader(self) -> JobListLoader: self._validate_basic_config() self._validate_joblist_helper() return JobListLoader(self.expid, self.joblist_helper) class JobListLoaderDirector: - def __init__(self, builder): - # type: (JobListLoaderBuilder) -> None + def __init__(self, builder: JobListLoaderBuilder): self.builder = builder def _set_basic_config(self, basic_config=None): @@ -34,8 +32,7 @@ class JobListLoaderDirector: else: self.builder.generate_basic_config() - def build_loaded_joblist_loader(self, basic_config=None): - # type: (APIBasicConfig) -> JobListLoader + def build_loaded_joblist_loader(self, basic_config: Optional[APIBasicConfig]=None) -> JobListLoader: self._set_basic_config(basic_config) self.builder.generate_joblist_helper() joblist_loader = self.builder.make_joblist_loader() diff --git a/autosubmit_api/builders/pkl_organizer_builder.py b/autosubmit_api/builders/pkl_organizer_builder.py index 1a3a2bb..ad1d9e2 100644 --- a/autosubmit_api/builders/pkl_organizer_builder.py +++ b/autosubmit_api/builders/pkl_organizer_builder.py @@ -1,17 +1,16 @@ #!/usr/bin/env python -from ..config.basicConfig import APIBasicConfig -from ..components.experiment.pkl_organizer import PklOrganizer -from .configuration_facade_builder import AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector -from .basic_builder import BasicBuilder -from ..components.experiment.configuration_facade import AutosubmitConfigurationFacade +from typing import Optional +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer +from autosubmit_api.builders.configuration_facade_builder import AutosubmitConfigurationFacadeBuilder, ConfigurationFacadeDirector +from autosubmit_api.builders.basic_builder import BasicBuilder +from autosubmit_api.components.experiment.configuration_facade import AutosubmitConfigurationFacade class PklOrganizerBuilder(BasicBuilder): - def __init__(self, expid): - # type: (str) -> None + def __init__(self, expid: str): super(PklOrganizerBuilder, self).__init__(expid) - def set_autosubmit_configuration_facade(self, configuration_facade): - # type: (AutosubmitConfigurationFacade) -> None + def set_autosubmit_configuration_facade(self, configuration_facade: AutosubmitConfigurationFacade): self.configuration_facade = configuration_facade def generate_autosubmit_configuration_facade(self): @@ -22,15 +21,13 @@ class PklOrganizerBuilder(BasicBuilder): if not self.configuration_facade: raise Exception("AutosubmitConfigurationFacade is missing.") - def make_pkl_organizer(self): - # type: () -> PklOrganizer + def make_pkl_organizer(self) -> PklOrganizer: self._validate_basic_config() self._validate_autosubmit_configuration_facade() return PklOrganizer(self.configuration_facade) class PklOrganizerDirector: - def __init__(self, builder): - # type: (PklOrganizerBuilder) -> None + def __init__(self, builder: PklOrganizerBuilder): self.builder = builder def _set_basic_config(self, basic_config=None): @@ -39,14 +36,12 @@ class PklOrganizerDirector: else: self.builder.generate_basic_config() - def build_pkl_organizer(self, basic_config=None): - # type: (APIBasicConfig) -> PklOrganizer + def build_pkl_organizer(self, basic_config: Optional[APIBasicConfig] = None) -> PklOrganizer: self._set_basic_config(basic_config) self.builder.generate_autosubmit_configuration_facade() return self.builder.make_pkl_organizer() - def build_pkl_organizer_with_configuration_provided(self, configuration_facade): - # type: (AutosubmitConfigurationFacade) -> PklOrganizer + def build_pkl_organizer_with_configuration_provided(self, configuration_facade: AutosubmitConfigurationFacade) -> PklOrganizer: self._set_basic_config(configuration_facade.basic_configuration) self.builder.set_autosubmit_configuration_facade(configuration_facade) return self.builder.make_pkl_organizer() \ No newline at end of file diff --git a/autosubmit_api/common/utils.py b/autosubmit_api/common/utils.py index d687e34..416fc9a 100644 --- a/autosubmit_api/common/utils.py +++ b/autosubmit_api/common/utils.py @@ -38,8 +38,7 @@ SECONDS_IN_A_DAY = 86400 PklJob = namedtuple('PklJob', ['name', 'id', 'status', 'priority', 'section', 'date', 'member', 'chunk', 'out_path_local', 'err_path_local', 'out_path_remote', 'err_path_remote']) PklJob14 = namedtuple('PklJob14', ['name', 'id', 'status', 'priority', 'section', 'date', 'member', 'chunk', 'out_path_local', 'err_path_local', 'out_path_remote', 'err_path_remote', 'wrapper_type']) -def tostamp(string_date): - # type: (str) -> int +def tostamp(string_date: str) -> int: """ String datetime to timestamp """ @@ -56,9 +55,8 @@ def tostamp(string_date): -def parse_number_processors(processors_str): +def parse_number_processors(processors_str: str) -> int: """ Defaults to 1 in case of error """ - # type: (str) -> int if ':' in processors_str: components = processors_str.split(":") processors = int(sum( @@ -156,16 +154,14 @@ def _date_to_str_space(date_str): else: return "" -def get_average_total_time(jobs): - # type: (List[object]) -> float +def get_average_total_time(jobs: List[object]) -> float: """ Job has attribute total_time (See JobFactory)""" if len(jobs): average = sum(job.total_time for job in jobs)/ len(jobs) return round(average, 4) return 0.0 -def parse_version_number(str_version): - # type : (str) -> Tuple[int, int] +def parse_version_number(str_version: str) -> Tuple[int, int]: if len(str_version.strip()) > 0: version_split = str_version.split('.') main = int(version_split[0]) @@ -185,20 +181,17 @@ def is_wrapper_type_in_pkl_version(str_version): return True return False -def get_current_timestamp(): - # type: () -> int +def get_current_timestamp() -> int: return int(time.time()) -def get_experiments_from_folder(root_folder): - # type: (str) -> List[str] +def get_experiments_from_folder(root_folder: str) -> List[str]: currentDirectories = subprocess.Popen(['ls', '-t', root_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdOut, _ = currentDirectories.communicate() folders = stdOut.split() return [expid for expid in folders if len(expid) == 4] -def timestamp_to_datetime_format(timestamp): - # type: (int) -> str +def timestamp_to_datetime_format(timestamp: int) -> str: """ %Y-%m-%d %H:%M:%S """ try: if timestamp and timestamp > 0: @@ -208,8 +201,7 @@ def timestamp_to_datetime_format(timestamp): return None return None -def datechunk_to_year(chunk_unit, chunk_size): - # type: (str, int) -> float +def datechunk_to_year(chunk_unit: int, chunk_size: int) -> float: """ Gets chunk unit and size and returns the value in years diff --git a/autosubmit_api/components/experiment/configuration_facade.py b/autosubmit_api/components/experiment/configuration_facade.py index 8ff5fad..9f0b602 100644 --- a/autosubmit_api/components/experiment/configuration_facade.py +++ b/autosubmit_api/components/experiment/configuration_facade.py @@ -9,7 +9,7 @@ from autosubmit_api.components.jobs.job_factory import SimJob from autosubmit_api.config.config_common import AutosubmitConfigResolver from abc import ABCMeta, abstractmethod from autosubmit_api.common.utils import JobSection, parse_number_processors, timestamp_to_datetime_format, datechunk_to_year -from typing import List +from typing import List, Optional from autosubmit_api.persistance.experiment import ExperimentPaths @@ -22,26 +22,24 @@ class ConfigurationFacade(metaclass=ABCMeta): """ - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None - self.basic_configuration = basic_config # type: APIBasicConfig - self.expid = expid # type: str - self.experiment_path = "" # type: str - self.pkl_path = "" # type: str - self.tmp_path = "" # type: str - self.log_path = "" # type: str - self.pkl_filename = "" # type: str - self.structures_path = "" # type: str - self.chunk_unit = "" # type: str - self.chunk_size = "" # type: int - self.current_years_per_sim = 0.0 # type: float - self.sim_processors = 0 # type: int - self.experiment_stat_data = None # type: os.stat_result - self.warnings = [] # type: List[str] + def __init__(self, expid: str, basic_config: APIBasicConfig): + self.basic_configuration: APIBasicConfig = basic_config + self.expid: str = expid + self.experiment_path: str = "" + self.pkl_path: str = "" + self.tmp_path: str = "" + self.log_path: str = "" + self.pkl_filename: str = "" + self.structures_path: str = "" + self.chunk_unit: str = "" + self.chunk_size = "" + self.current_years_per_sim: float = 0.0 + self.sim_processors: int = 0 + self.experiment_stat_data: os.stat_result = None + self.warnings: List[str] = [] self._process_basic_config() def _process_basic_config(self): - # type: () -> None exp_paths = ExperimentPaths(self.expid) self.pkl_filename = os.path.basename(exp_paths.job_list_pkl) self.experiment_path = exp_paths.exp_dir @@ -62,34 +60,28 @@ class ConfigurationFacade(metaclass=ABCMeta): pass @abstractmethod - def _get_processors_number(self, conf_sim_processors): - # type: (str) -> int + def _get_processors_number(self, conf_sim_processors: str) -> int: pass @abstractmethod - def get_model(self): - # type: () -> str + def get_model(self) -> str: pass @abstractmethod - def get_branch(self): - # type: () -> str + def get_branch(self) -> str: pass @abstractmethod - def get_owner_name(self): - # type: () -> str + def get_owner_name(self) -> str: pass @abstractmethod - def get_owner_id(self): - # type: () -> int + def get_owner_id(self) -> str: pass class BasicConfigurationFacade(ConfigurationFacade): """ BasicConfig and paths """ - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig): super(BasicConfigurationFacade, self).__init__(expid, basic_config) def _process_advanced_config(self): @@ -115,15 +107,13 @@ class BasicConfigurationFacade(ConfigurationFacade): class AutosubmitConfigurationFacade(ConfigurationFacade): """ Provides an interface to the Configuration of the experiment. """ - def __init__(self, expid, basic_config, autosubmit_config): - # type: (str, APIBasicConfig, AutosubmitConfigResolver) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig, autosubmit_config: AutosubmitConfigResolver): super(AutosubmitConfigurationFacade, self).__init__(expid, basic_config) self.autosubmit_conf = autosubmit_config self._process_advanced_config() def _process_advanced_config(self): """ Advanced Configuration from AutosubmitConfig """ - # type: () -> None self.autosubmit_conf.reload() self.chunk_unit = self.autosubmit_conf.get_chunk_size_unit() self.chunk_size = self.autosubmit_conf.get_chunk_size() @@ -146,33 +136,26 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): self.experiment_stat_data = os.stat(self.experiment_path) - def get_pkl_last_modified_timestamp(self): - # type: () -> int + def get_pkl_last_modified_timestamp(self) -> int: return int(os.stat(self.pkl_path).st_mtime) - def get_pkl_last_modified_time_as_datetime(self): - # type: () -> str + def get_pkl_last_modified_time_as_datetime(self) -> str: return timestamp_to_datetime_format(self.get_pkl_last_modified_timestamp()) - def get_experiment_last_access_time_as_datetime(self): - # type: () -> str + def get_experiment_last_access_time_as_datetime(self) -> str: return timestamp_to_datetime_format(int(self.experiment_stat_data.st_atime)) - def get_experiment_last_modified_time_as_datetime(self): - # type: () -> str + def get_experiment_last_modified_time_as_datetime(self) -> str: return timestamp_to_datetime_format(int(self.experiment_stat_data.st_mtime)) - def get_experiment_created_time_as_datetime(self): - # type: () -> str + def get_experiment_created_time_as_datetime(self) -> str: """ Important: Under OpenSUSE, it returns the last modified time.""" return timestamp_to_datetime_format(int(self.experiment_stat_data.st_ctime)) - def get_owner_id(self): - # type: () -> int + def get_owner_id(self) -> int: return int(self.experiment_stat_data.st_uid) - def get_owner_name(self): - # type: () -> str + def get_owner_name(self) -> str: try: stdout = os.popen("id -nu {0}".format(str(self.get_owner_id()))) owner_name = stdout.read().strip() @@ -180,15 +163,13 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): except: return "NA" - def get_autosubmit_version(self): - # type: () -> str + def get_autosubmit_version(self) -> str: return self.autosubmit_conf.get_version() def get_main_platform(self): return str(self.autosubmit_conf.get_platform()) - def get_section_processors(self, section_name): - # type: (str) -> int + def get_section_processors(self, section_name: str) -> int: return self._get_processors_number(str(self.autosubmit_conf.get_processors(section_name))) def get_section_qos(self, section_name): @@ -197,20 +178,17 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): def get_section_platform(self, section_name): return str(self.autosubmit_conf.get_job_platform(section_name)) - def get_platform_qos(self, platform_name, number_processors): - # type: (str, int) -> str + def get_platform_qos(self, platform_name: str, number_processors: int) -> str: if number_processors == 1: qos = str(self.autosubmit_conf.get_platform_serial_queue(platform_name)) if len(qos.strip()) > 0: return qos return str(self.autosubmit_conf.get_platform_queue(platform_name)) - def get_wrapper_qos(self): - # type: () -> str + def get_wrapper_qos(self) -> str: return str(self.autosubmit_conf.get_wrapper_queue()) - def get_wrapper_type(self): - # type: () -> str | None + def get_wrapper_type(self) -> Optional[str]: if self.autosubmit_conf.get_wrapper_type() and self.autosubmit_conf.get_wrapper_type().upper() != "NONE": return self.autosubmit_conf.get_wrapper_type().upper() return None @@ -221,16 +199,13 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): def get_platform_max_wallclock(self, platform_name): return str(self.autosubmit_conf.get_platform_wallclock(platform_name)) - def get_safety_sleep_time(self): - # type: () -> int + def get_safety_sleep_time(self) -> int: return self.autosubmit_conf.get_safetysleeptime() - def get_project_type(self): - # type: () -> str + def get_project_type(self) -> str: return self.autosubmit_conf.get_project_type() - def get_model(self): - # type: () -> str + def get_model(self) -> str: if self.get_project_type() == ProjectType.GIT: return self.get_git_project_origin() elif self.get_project_type() == ProjectType.SVN: @@ -238,8 +213,7 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): else: return "NA" - def get_branch(self): - # type: () -> str + def get_branch(self) -> str: if self.get_project_type() == ProjectType.GIT: return self.get_git_project_branch() elif self.get_project_type() == ProjectType.SVN: @@ -247,27 +221,22 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): else: return "NA" - def get_git_project_origin(self): - # type: () -> str + def get_git_project_origin(self) -> str: return self.autosubmit_conf.get_git_project_origin() - def get_git_project_branch(self): - # type: () -> str + def get_git_project_branch(self) -> str: return self.autosubmit_conf.get_git_project_branch() - def get_svn_project_url(self): - # type: () -> str + def get_svn_project_url(self) -> str: return self.autosubmit_conf.get_svn_project_url() - def update_sim_jobs(self, sim_jobs): - # type: (List[SimJob]) -> None + def update_sim_jobs(self, sim_jobs: List[SimJob]): """ Update the jobs with the latest configuration values: Processors, years per sim """ for job in sim_jobs: job.set_ncpus(self.sim_processing_elements) job.set_years_per_sim(self.current_years_per_sim) - def _get_processors_number(self, conf_job_processors): - # type: (str) -> int + def _get_processors_number(self, conf_job_processors: str) -> int: num_processors = 0 try: if str(conf_job_processors).find(":") >= 0: @@ -282,8 +251,7 @@ class AutosubmitConfigurationFacade(ConfigurationFacade): pass return num_processors - def _add_warning(self, message): - # type: (str) -> None + def _add_warning(self, message: str): self.warnings.append(message) def _estimate_requested_nodes(self) -> int: diff --git a/autosubmit_api/components/experiment/pkl_organizer.py b/autosubmit_api/components/experiment/pkl_organizer.py index 4150a3c..d12a7b1 100644 --- a/autosubmit_api/components/experiment/pkl_organizer.py +++ b/autosubmit_api/components/experiment/pkl_organizer.py @@ -21,37 +21,34 @@ class PklOrganizer(object): def __init__(self, configuration_facade: AutosubmitConfigurationFacade): self.current_content: List[Union[PklJob,PklJob14]] = [] - self.configuration_facade = configuration_facade # type: AutosubmitConfigurationFacade - self.sim_jobs = [] # type: List[Job] - self.post_jobs = [] # type: List[Job] - self.transfer_jobs = [] # type: List[Job] - self.clean_jobs = [] # type: List[Job] - self.pkl_path = configuration_facade.pkl_path # type: str - self.warnings = [] # type: List[str] - self.dates = set() # type: Set[str] - self.members = set() # type: Set[str] - self.sections = set() # type: Set[str] - self.section_jobs_map = {} # type: Dict[str, List[Job]] + self.configuration_facade: AutosubmitConfigurationFacade = configuration_facade + self.sim_jobs: List[Job] = [] + self.post_jobs: List[Job] = [] + self.transfer_jobs: List[Job] = [] + self.clean_jobs: List[Job] = [] + self.pkl_path: str = configuration_facade.pkl_path + self.warnings: List[str] = [] + self.dates: Set[str] = set() + self.members: Set[str] = set() + self.sections: Set[str] = set() + self.section_jobs_map: Dict[str, List[Job]] = {} # self.is_wrapper_type_in_pkl = is_wrapper_type_in_pkl_version(configuration_facade.get_autosubmit_version()) self._process_pkl() def prepare_jobs_for_performance_metrics(self): - # type: () -> None self.identify_dates_members_sections() self.distribute_jobs() self._sort_distributed_jobs() self._validate_current() - def get_completed_section_jobs(self, section): - # type: (str) -> List[Job] + def get_completed_section_jobs(self, section: str) -> List[Job]: if section in self.section_jobs_map: return [job for job in self.section_jobs_map[section] if job.status == Status.COMPLETED] else: return [] # raise KeyError("Section not supported.") - def get_simple_jobs(self, tmp_path): - # type: (str) -> List[SimpleJob] + def get_simple_jobs(self, tmp_path: str) -> List[SimpleJob]: """ Get jobs in pkl as SimpleJob objects.""" return [SimpleJob(job.name, tmp_path, job.status) for job in self.current_content] @@ -65,7 +62,6 @@ class PklOrganizer(object): raise Exception("Pkl file {0} not found.".format(self.pkl_path)) def identify_dates_members_sections(self): - # type: () -> None for job in self.current_content: if job.date and job.date not in self.dates: self.dates.add(job.date) @@ -76,7 +72,6 @@ class PklOrganizer(object): def distribute_jobs(self): - # type: () -> None for pkl_job in self.current_content: if JobSection.SIM == pkl_job.section: self.sim_jobs.append(factory.get_job_from_factory(pkl_job.section).from_pkl(pkl_job)) @@ -94,7 +89,6 @@ class PklOrganizer(object): } def _sort_distributed_jobs(self): - # type : () -> None """ SIM jobs are sorted by start_time """ self._sort_list_by_start_time(self.sim_jobs) self._sort_list_by_finish_time(self.post_jobs) @@ -102,7 +96,6 @@ class PklOrganizer(object): self._sort_list_by_finish_time(self.clean_jobs) def _validate_current(self): - # type : () -> None if len(self.get_completed_section_jobs(JobSection.SIM)) == 0: self._add_warning("We couldn't find COMPLETED SIM jobs in the experiment.") if len(self.get_completed_section_jobs(JobSection.POST)) == 0: @@ -112,17 +105,14 @@ class PklOrganizer(object): if len(self.get_completed_section_jobs(JobSection.TRANSFER)) == 0 and len(self.get_completed_section_jobs(JobSection.CLEAN)) > 0: self._add_warning("RSYPD | There are no TRANSFER (COMPLETED) jobs in the experiment. We will use (COMPLETED) CLEAN jobs to compute RSYPD.") - def _add_warning(self, message): - # type: (str) -> None + def _add_warning(self, message: str): self.warnings.append(message) - def _sort_list_by_finish_time(self, jobs): - # type: (List[Job]) -> None + def _sort_list_by_finish_time(self, jobs: List[Job]): if len(jobs): jobs.sort(key = lambda x: x.finish, reverse=False) - def _sort_list_by_start_time(self, jobs): - # type: (List[Job]) -> None + def _sort_list_by_start_time(self, jobs: List[Job]): if len(jobs): jobs.sort(key = lambda x: x.start, reverse=False) diff --git a/autosubmit_api/components/jobs/job_factory.py b/autosubmit_api/components/jobs/job_factory.py index 25a05da..34f4e57 100644 --- a/autosubmit_api/components/jobs/job_factory.py +++ b/autosubmit_api/components/jobs/job_factory.py @@ -25,41 +25,41 @@ class Job(metaclass=ABCMeta): """ Abstract Job """ def __init__(self): - self.name = None # type: str - self._id = None # type: int - self.status = Status.UNKNOWN # type: int - self.priority = 0 # type: int - self.date = None # type: str - self.member = None # type: str - self.chunk = None # type: str - self.out_path_local = None # type: str - self.err_path_local = None # type: str - self.out_path_remote = None # type: str - self.err_path_remote = None # type: str - self.section = "" # type: str - self._queue_time = 0 # type: int - self._run_time = 0 # type: int - self.energy = 0 # type: int - self._submit = 0 # type: int - self._start = 0 # type: int - self._finish = 0 # type: int - self.ncpus = 0 # type: int - self.platform = None # type: str - self.qos = "" # type: str - self.wallclock = "" # type: str - self.parents_names = set() # type: Set[str] - self.children_names = set() # type: Set[str] - self.package = None # type: str - self.package_code = None # type: str - self.package_symbol = None # type: str - self.running_time_text = None # type: str - self.tree_parent = [] # type: List[str] - self.run_id = None # type: int - self.x_coordinate = 0 # type: int - self.y_coordinate = 0 # type: int - self.level = 0 # type: int - self.horizontal_order = 1 # type: int - self.barycentric_value = 0.0 # type: float + self.name: str = None + self._id: int = None + self.status: int = Status.UNKNOWN + self.priority: int = 0 + self.date: str = None + self.member: str = None + self.chunk: str = None + self.out_path_local: str = None + self.err_path_local: str = None + self.out_path_remote: str = None + self.err_path_remote: str = None + self.section: str = "" + self._queue_time: int = 0 + self._run_time: int = 0 + self.energy: int = 0 + self._submit: int = 0 + self._start: int = 0 + self._finish: int = 0 + self.ncpus: int = 0 + self.platform: str = None + self.qos: str = "" + self.wallclock: str = "" + self.parents_names: Set[str] = set() + self.children_names: Set[str] = set() + self.package: str = None + self.package_code: str = None + self.package_symbol: str = None + self.running_time_text: str = None + self.tree_parent: List[str] = [] + self.run_id: int = None + self.x_coordinate: int = 0 + self.y_coordinate: int = 0 + self.level: int = 0 + self.horizontal_order: int = 1 + self.barycentric_value: float = 0.0 def has_parents(self): return len(self.parents_names) > 0 @@ -80,27 +80,23 @@ class Job(metaclass=ABCMeta): return None @property - def package_tag(self): + def package_tag(self) -> str: """ Also known as wrapper_tag """ - # type: () -> str if self.package and len(self.package) > 0: return JUtils.wrapped_title_format.format(self.package) return None @property - def rm_id(self): - # type: () -> int + def rm_id(self) -> int: return self._id @property - def queue_time(self): - # type: () -> int + def queue_time(self) -> int: """ Queue time fixed is provided. """ return self._queue_time @property - def run_time(self): - # type: () -> int + def run_time(self) -> int: """ Proper run time is provided. """ return self._run_time @@ -135,42 +131,35 @@ class Job(metaclass=ABCMeta): return self.finish @property - def submit_datetime(self): - # type: () -> str + def submit_datetime(self) -> str: return util.timestamp_to_datetime_format(self.submit) @property - def start_datetime(self): - # type: () -> str + def start_datetime(self) -> str: return util.timestamp_to_datetime_format(self.start) @property - def finish_datetime(self): - # type: () -> str + def finish_datetime(self) -> str: return util.timestamp_to_datetime_format(self.finish) @property - def status_color(self): + def status_color(self) -> str: return Monitor.color_status(self.status) @property - def status_text(self): - # type: () -> str + def status_text(self) -> str: return str(Status.VALUE_TO_KEY[self.status]) @property - def total_time(self): - # type: () -> int + def total_time(self) -> int: return self.queue_time + self.run_time @property - def total_processors(self): - # type: () -> int + def total_processors(self) -> int: return self.ncpus @property - def total_wallclock(self): - # type: () -> float + def total_wallclock(self) -> float: """ In hours """ if self.wallclock: hours, minutes = self.wallclock.split(':') @@ -178,8 +167,7 @@ class Job(metaclass=ABCMeta): return 0 @property - def tree_title(self): - # type: () -> str + def tree_title(self) -> str: title = "{0} #{2}".format(self.name, self.status_color, self.status_text, JUtils.get_status_text_color(self.status)) if self.running_time_text and len(self.running_time_text) > 0: title += " ~ {0}".format(self.running_time_text) @@ -194,8 +182,7 @@ class Job(metaclass=ABCMeta): return title @property - def leaf(self): - # type: () -> Dict[str, str] + def leaf(self) -> Dict[str, str]: return { "title": self.tree_title, "refKey": self.name, @@ -205,12 +192,10 @@ class Job(metaclass=ABCMeta): @abstractmethod def do_print(self): - # type: () -> None print(("Job {0} \n Date {5} \n Section {1} \n Qos {2} \n Children: {3} \n Platform {4} \n TreeParent {6}. ".format( self.name, self.section, self.qos, self.children_names, self.platform, self.date, self.tree_parent))) - def update_from_jobrow(self, jobrow): - # type: (JobRow) -> None + def update_from_jobrow(self, jobrow: JobRow): """ Updates: submit, start, finish, queue_time, run_time, energy, run_id. """ if jobrow: self._queue_time = max(int(jobrow.queue_time), 0) @@ -221,22 +206,18 @@ class Job(metaclass=ABCMeta): self._finish = int(jobrow.finish) self.run_id = jobrow.run_id - def set_ncpus(self, parallelization): - # type: (int) -> None + def set_ncpus(self, parallelization: int) -> None: self.ncpus = parallelization - def set_years_per_sim(self, years_per_sim): - # type: (float) -> None + def set_years_per_sim(self, years_per_sim: float) -> None: self.years_per_sim = max(years_per_sim, 0.0) - def get_date_ini_end(self, chunk_size, chunk_unit): - # type: (int, str) -> Tuple[str, str] + def get_date_ini_end(self, chunk_size: int, chunk_unit: str) -> Tuple[str, str]: return util.date_plus(self.date, chunk_unit, self.chunk, chunk_size) @classmethod - def from_pkl(cls, pkl_item): - # type: (str) -> Job + def from_pkl(cls, pkl_item: str) -> "Job": job = cls() job.name = pkl_item.name job._id = pkl_item.id @@ -278,8 +259,7 @@ class Job(metaclass=ABCMeta): return job @classmethod - def from_job_data_dc(cls, job_data_dc): - # type: (JobData) -> Job + def from_job_data_dc(cls, job_data_dc: JobData) -> "Job": job = cls() job.name = job_data_dc.job_name job._id = job_data_dc._id @@ -316,35 +296,31 @@ class SimJob(Job): def __init__(self): super(SimJob, self).__init__() self.section = util.JobSection.SIM - self.post_jobs_total_time_average = 0.0 # type: float - self.years_per_sim = 0 # type: float + self.post_jobs_total_time_average: float = 0.0 + self.years_per_sim: float = 0 @property - def CHSY(self): - # type: () -> float + def CHSY(self) -> float: if self.years_per_sim > 0: return round(((self.ncpus * self.run_time) / self.years_per_sim) / util.SECONDS_IN_ONE_HOUR, 2) return 0 @property - def JPSY(self): - # type: () -> float + def JPSY(self) -> float: if self.years_per_sim > 0: return round(self.energy / self.years_per_sim, 2) return 0 @property - def SYPD(self): - # type: () -> float + def SYPD(self) -> float: if self.years_per_sim > 0 and self.run_time > 0: return round((self.years_per_sim * util.SECONDS_IN_A_DAY) / self.run_time, 2) return 0 @property - def ASYPD(self): + def ASYPD(self) -> float: """ ASYPD calculation requires the average of the queue and run time of all post jobs """ - # type: () -> float divisor = self.total_time + self.post_jobs_total_time_average if divisor > 0: return round((self.years_per_sim * util.SECONDS_IN_A_DAY) / (divisor), 2) @@ -354,8 +330,7 @@ class SimJob(Job): def do_print(self): return super(SimJob, self).do_print() - def set_post_jobs_total_average(self, val): - # type: (float) -> None + def set_post_jobs_total_average(self, val: float): self.post_jobs_total_time_average = val @@ -404,43 +379,35 @@ class JobFactory(metaclass=ABCMeta): """ Generic Factory """ @abstractmethod - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: """ """ class SimFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return SimJob() class PostFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return PostJob() class TransferMemberFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return TransferMemberJob() class TransferFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return TransferJob() class CleanMemberFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return CleanMemberJob() class CleanFactory(JobFactory): - def factory_method(self): - # type: () -> Job + def factory_method(self) -> Job: return CleanJob() -def get_job_from_factory(section): - # type: (str) -> StandardJob +def get_job_from_factory(section: str) -> StandardJob: factories = { util.JobSection.SIM : SimFactory(), util.JobSection.POST : PostFactory(), diff --git a/autosubmit_api/components/jobs/job_support.py b/autosubmit_api/components/jobs/job_support.py index e60939b..b4cba4c 100644 --- a/autosubmit_api/components/jobs/job_support.py +++ b/autosubmit_api/components/jobs/job_support.py @@ -1,8 +1,8 @@ import os -from ...config.basicConfig import APIBasicConfig -from .job_factory import Job +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.components.jobs.job_factory import Job from bscearth.utils.date import parse_date -from .utils import is_a_completed_retrial +from autosubmit_api.components.jobs.utils import is_a_completed_retrial from datetime import datetime from typing import List @@ -16,8 +16,7 @@ class JobSupport: """ Provides the methods to get the retrials of a job from the TOTAL_STATS files. """ - def __init__(self, expid, job, basic_config): - # type: (str, Job, APIBasicConfig) -> None + def __init__(self, expid: str, job: Job, basic_config: APIBasicConfig): self.expid = expid self.job = job self.basic_config = basic_config @@ -25,8 +24,7 @@ class JobSupport: self.total_stats_file_name = "{}_TOTAL_STATS".format(self.job.name) self.complete_total_stats_path = os.path.join(self.complete_tmp_path, self.total_stats_file_name) - def get_last_retrials(self): - # type: () -> List[List[datetime]] + def get_last_retrials(self) -> List[List[datetime]]: retrials_list = [] if os.path.exists(self.complete_total_stats_path): already_completed = False @@ -40,8 +38,7 @@ class JobSupport: retrials_list.insert(0, retrial_dates) return retrials_list - def check_started_after(self, start_datetime): - # type: (datetime) -> bool + def check_started_after(self, start_datetime: datetime) -> bool: """ Checks if the job started after the given date """ @@ -50,8 +47,7 @@ class JobSupport: else: return False - def check_running_after(self, finish_datetime): - # type: (datetime) -> bool + def check_running_after(self, finish_datetime: datetime) -> bool: """ Checks if the job was running after the given date """ @@ -60,22 +56,19 @@ class JobSupport: else: return False - def check_retrials_start_time(self): - # type: () -> List[datetime] + def check_retrials_start_time(self) -> List[datetime]: """ Returns list of start datetime for retrials from total stats file """ return self._get_from_total_stats(TotalStatsPosition.START) - def check_retrials_end_time(self): - # type: () -> List[datetime] + def check_retrials_end_time(self) -> List[datetime]: """ Returns list of end datetime for retrials from total stats file """ return self._get_from_total_stats(TotalStatsPosition.FINISH) - def _get_from_total_stats(self, index): - # type: (int) -> List[datetime] + def _get_from_total_stats(self, index) -> List[datetime]: """ Returns list of values from given column index position in TOTAL_STATS file associated to job """ diff --git a/autosubmit_api/components/jobs/joblist_helper.py b/autosubmit_api/components/jobs/joblist_helper.py index 14a87f7..4eadd69 100644 --- a/autosubmit_api/components/jobs/joblist_helper.py +++ b/autosubmit_api/components/jobs/joblist_helper.py @@ -11,20 +11,19 @@ from autosubmit_api.persistance.job_package_reader import JobPackageReader class JobListHelper(object): """ Loads time (queuing runnning) and packages. Applies the fix for queue time of jobs in wrappers. """ - def __init__(self, expid, configuration_facade, pkl_organizer, basic_config): - # type: (str, AutosubmitConfigurationFacade, PklOrganizer, APIBasicConfig) -> None - self.basic_config = basic_config # type: APIBasicConfig - self.configuration_facade = configuration_facade # type: AutosubmitConfigurationFacade - self.pkl_organizer = pkl_organizer # type: PklOrganizer - self.job_to_package = {} # type: Dict[str, str] - self.package_to_jobs = {} # type: Dict[str, List[str]] - self.package_to_package_id = {} # type: Dict[str, str] - self.package_to_symbol = {} # type: Dict[str, str] - self.job_name_to_job_row = {} # type: Dict[str, JobRow] - self.job_running_time_to_text = {} # type: Dict[str, str] - self._run_id_to_run_object = {} # type: Dict - self.warning_messages = [] # type: List - self.expid = expid # type: str + def __init__(self, expid, configuration_facade: AutosubmitConfigurationFacade, pkl_organizer: PklOrganizer, basic_config: APIBasicConfig): + self.basic_config: APIBasicConfig = basic_config + self.configuration_facade: AutosubmitConfigurationFacade = configuration_facade + self.pkl_organizer: PklOrganizer = pkl_organizer + self.job_to_package: Dict[str, str] = {} + self.package_to_jobs: Dict[str, List[str]] = {} + self.package_to_package_id: Dict[str, str] = {} + self.package_to_symbol: Dict[str, str] = {} + self.job_name_to_job_row: Dict[str, JobRow] = {} + self.job_running_time_to_text: Dict[str, str] = {} + self._run_id_to_run_object: Dict = {} + self.warning_messages: List = [] + self.expid: str = expid self.simple_jobs = self.pkl_organizer.get_simple_jobs(self.configuration_facade.tmp_path) self._initialize_main_values() @@ -42,15 +41,13 @@ class JobListHelper(object): self.job_name_to_job_row, self.job_running_time_to_text, self.warning_messages = JobList.get_job_times_collection( self.basic_config, self.simple_jobs, self.expid, self.job_to_package, self.package_to_jobs, timeseconds=True) - def update_with_timedata(self, section_jobs): - # type: (List[Job]) -> None + def update_with_timedata(self, section_jobs: List[Job]): """ Update Job information with JobRow (time) data from Historical Database (Or as_times information) """ for job in section_jobs: # if job.name in self.job_name_to_job_row: job.update_from_jobrow(self.job_name_to_job_row.get(job.name, None)) - def update_with_yps_per_run(self, section_jobs): - # type: (List[Job]) -> None + def update_with_yps_per_run(self, section_jobs: List[Job]): """ Update Job information with Historical Run information: years_per_sim """ self._retrieve_current_experiment_runs_required(section_jobs) for job in section_jobs: @@ -58,20 +55,17 @@ class JobListHelper(object): if yps_per_run > 0.0: job.set_years_per_sim(yps_per_run) - def _retrieve_current_experiment_runs_required(self, section_jobs): - # type: (List[Job]) -> None + def _retrieve_current_experiment_runs_required(self, section_jobs: List[Job]): for job in section_jobs: self._add_experiment_run(job.run_id) - def _get_yps_per_run_id(self, run_id): - # type: (int) -> float + def _get_yps_per_run_id(self, run_id: int) -> float: experiment_run = self._run_id_to_run_object.get(run_id, None) if experiment_run: return datechunk_to_year(experiment_run.chunk_unit, experiment_run.chunk_size) else: return 0.0 - def _add_experiment_run(self, run_id): - # type: (int) -> None + def _add_experiment_run(self, run_id: int): if run_id and run_id not in self._run_id_to_run_object: self._run_id_to_run_object[run_id] = JobDataStructure(self.expid, self.basic_config).get_experiment_run_by_id(run_id) diff --git a/autosubmit_api/components/jobs/joblist_loader.py b/autosubmit_api/components/jobs/joblist_loader.py index 1edc563..21f0231 100644 --- a/autosubmit_api/components/jobs/joblist_loader.py +++ b/autosubmit_api/components/jobs/joblist_loader.py @@ -16,15 +16,14 @@ logger = logging.getLogger('gunicorn.error') class JobListLoader(object): """ Class that manages loading the list of jobs from the pkl. Adds other resources. """ - def __init__(self, expid, joblist_helper): - # type: (str, JobListHelper) -> None + def __init__(self, expid: str, joblist_helper: JobListHelper): self.expid = expid self.joblist_helper = joblist_helper self.configuration_facade = self.joblist_helper.configuration_facade self.pkl_organizer = self.joblist_helper.pkl_organizer - self._jobs = [] # type: List[Job] - self._structure_adjacency = {} # type: Dict[str, List[str]] - self._job_dictionary = {} # type: Dict[str, Job] + self._jobs: List[Job] = [] + self._structure_adjacency: Dict[str, List[str]] = {} + self._job_dictionary: Dict[str, Job] = {} def load_jobs(self): @@ -41,8 +40,7 @@ class JobListLoader(object): self._generate_job_dictionary() self._update_job_logs() - def are_these_in_same_package(self, *names): - # type: (List[str]) -> bool + def are_these_in_same_package(self, *names: List[str]) -> bool: packages = set() for job_name in names: package_name = self.joblist_helper.job_to_package.get(job_name, None) @@ -66,8 +64,7 @@ class JobListLoader(object): if len(dates) != len(set(dates)): raise Exception("Repeated dates found. Autosubmit API can't generate a representation for this configuration. Review your configuration files.") - def get_all_jobs_in_package(self, package_name): - # type: (str) -> List[Job] + def get_all_jobs_in_package(self, package_name: str) -> List[Job]: jobs = [] job_names = self.joblist_helper.package_to_jobs.get(package_name, []) if job_names and len(job_names) > 0: @@ -80,30 +77,25 @@ class JobListLoader(object): return self.configuration_facade.log_path @property - def package_names(self): - # type: () -> Set[str] + def package_names(self) -> Set[str]: if self.joblist_helper.package_to_jobs: return set([package for package in self.joblist_helper.package_to_jobs]) return [] @property - def jobs(self): - # type: () -> List[Job] + def jobs(self) -> List[Job]: return self._jobs @property - def job_dictionary(self): - # type: () -> Dict[str, Job] + def job_dictionary(self) -> Dict[str, Job]: return self._job_dictionary @property - def chunk_unit(self): - # type: () -> str + def chunk_unit(self) -> str: return self.configuration_facade.chunk_unit @property - def chunk_size(self): - # type: () -> int + def chunk_size(self) -> int: return self.configuration_facade.chunk_size @property @@ -111,8 +103,7 @@ class JobListLoader(object): return self.pkl_organizer.dates @property - def dates_formatted_dict(self): - # type: () -> Dict[str, str] + def dates_formatted_dict(self) -> Dict[str, str]: if len(self.dates) > 0: date_format = self.date_format return {date: date2str(date, date_format) for date in self.dates} @@ -128,8 +119,7 @@ class JobListLoader(object): return self.pkl_organizer.sections @property - def date_format(self): - # type: () -> str + def date_format(self) -> str: date_format = '' for date in self.pkl_organizer.dates: if date.hour > 1: @@ -156,7 +146,6 @@ class JobListLoader(object): job.parents_names = set(parents_adjacency.get(job.name, [])) def assign_configuration_data_to_jobs(self): - # type: () -> None """ Sets Number of Processors, Platform, QoS, Wallclock""" section_to_config = {} for job in self._jobs: @@ -178,8 +167,7 @@ class JobListLoader(object): job_platform = self.configuration_facade.get_main_platform() return job_platform - def _determine_qos(self, job): - # type: (Job) -> None + def _determine_qos(self, job: Job): job_qos = "" if job.package is not None: job_qos = self.configuration_facade.get_wrapper_qos() @@ -190,8 +178,7 @@ class JobListLoader(object): job_qos = self.configuration_facade.get_platform_qos(job.platform, job.ncpus) return job_qos - def _determine_wallclock(self, job): - # type: (Job) -> None + def _determine_wallclock(self, job: Job): wallclock = self.configuration_facade.get_section_wallclock(job.section) if len(wallclock.strip()) == 0: if job.platform != "None": @@ -199,7 +186,6 @@ class JobListLoader(object): return wallclock def assign_packages_to_jobs(self): - # type: () -> None if self.joblist_helper.job_to_package: for job in self._jobs: job.package = self.joblist_helper.job_to_package.get(job.name, None) @@ -214,7 +200,6 @@ class JobListLoader(object): def _update_job_logs(self): - # type: () -> None """ Updates job out and err logs of the job list """ diff --git a/autosubmit_api/components/jobs/utils.py b/autosubmit_api/components/jobs/utils.py index 95ec42d..4a7f8f1 100644 --- a/autosubmit_api/components/jobs/utils.py +++ b/autosubmit_api/components/jobs/utils.py @@ -23,47 +23,40 @@ SUBMIT_STATUS = {Status.COMPLETED, Status.FAILED, Status.QUEUING, Status.RUNNING START_STATUS = {Status.COMPLETED, Status.FAILED, Status.RUNNING} FINISH_STATUS = {Status.COMPLETED, Status.FAILED} -def is_a_completed_retrial(fields): - # type: (List[str]) -> bool +def is_a_completed_retrial(fields: List[str]) -> bool: """ Identifies one line of _TOTAL_STATS file """ if len(fields) == 4: if fields[3] == 'COMPLETED': return True return False -def get_corrected_submit_time_by_status(status_code, submit_time): - # type: (int, str) -> str +def get_corrected_submit_time_by_status(status_code: int, submit_time: str) -> str: if status_code in SUBMIT_STATUS: return submit_time return None -def get_corrected_start_time_by_status(status_code, start_time): - # type: (int, str) -> str +def get_corrected_start_time_by_status(status_code: int, start_time: str) -> str: if status_code in START_STATUS: return start_time return None -def get_corrected_finish_time_by_status(status_code, finish_time): - # type: (int, str) -> str +def get_corrected_finish_time_by_status(status_code: int, finish_time: str) -> str: if status_code in FINISH_STATUS: return finish_time return None -def get_status_text_color(status_code): - # type: (int) -> str +def get_status_text_color(status_code: int) -> str: if status_code in [Status.RUNNING, Status.FAILED, Status.HELD]: return "#fff" return "#000" -def get_folder_checkmark(completed_count, jobs_in_folder_count): - # type: (int, int) -> str +def get_folder_checkmark(completed_count: int, jobs_in_folder_count: int) -> str: if completed_count == jobs_in_folder_count: return checkmark_tag return "" -def get_folder_completed_tag(completed_count, jobs_in_folder_count): - # type: (int, int) -> str +def get_folder_completed_tag(completed_count: int, jobs_in_folder_count: int) -> str: tag = "" if completed_count == jobs_in_folder_count: tag = "" @@ -71,34 +64,27 @@ def get_folder_completed_tag(completed_count, jobs_in_folder_count): tag = "" return "{0} {1} / {2} COMPLETED".format(tag, completed_count, jobs_in_folder_count) -def get_folder_running_tag(running_count): - # type: (int) -> str +def get_folder_running_tag(running_count: int) -> str: if running_count > 0: return " {0} RUNNING".format(running_count) return "" -def get_folder_queuing_tag(queuing_count): - # type: (int) -> str +def get_folder_queuing_tag(queuing_count: int) -> str: if queuing_count > 0: return " {0} QUEUING".format(queuing_count) return "" -def get_folder_failed_tag(failed_count): - # type: (int) -> str +def get_folder_failed_tag(failed_count: int) -> str: if failed_count > 0: return " {0} FAILED".format(failed_count) return "" -def get_folder_held_tag(held_count): - # type: (int) -> str +def get_folder_held_tag(held_count: int) -> str: if held_count > 0: return " {0} HELD".format(held_count) return "" - -def get_date_folder_tag(title, startdate_count): - # type: (str, int) -> str - +def get_date_folder_tag(title: str, startdate_count: int) -> str: # set the proper color if title == "COMPLETED": color = "yellow" @@ -109,8 +95,7 @@ def get_date_folder_tag(title, startdate_count): tag = "".format(color) return "{0} {1} / {2} {3} ".format(tag, startdate_count, startdate_count, title) -def get_folder_date_member_title(expid, formatted_date, member, date_member_jobs_count, counters): - # type: (str, str, str, int, Dict[int, int]) -> str +def get_folder_date_member_title(expid: str, formatted_date: str, member: str, date_member_jobs_count: int, counters: Dict[int, int]) -> str: return "{0}_{1}_{2} {3}{4}{5}{6}{7}{8}".format( expid, formatted_date, @@ -123,8 +108,7 @@ def get_folder_date_member_title(expid, formatted_date, member, date_member_jobs get_folder_checkmark(counters[Status.COMPLETED], date_member_jobs_count) ) -def get_folder_package_title(package_name, jobs_count, counters): - # type: (str, int, Dict[int, int]) -> str +def get_folder_package_title(package_name: str, jobs_count: int, counters: Dict[int, int]) -> str: return "Wrapper: {0} {1}{2}{3}{4}{5}{6}".format( package_name, get_folder_completed_tag(counters[Status.COMPLETED], jobs_count), @@ -238,6 +222,5 @@ def job_times_to_text(minutes_queue: int, minutes_running: int, status: str): return running_text -def generate_job_html_title(job_name, status_color, status_text): - # type: (str, str, str) -> str +def generate_job_html_title(job_name: str, status_color: str, status_text: str) -> str: return job_name + " #" + status_text + "" \ No newline at end of file diff --git a/autosubmit_api/components/representations/graph/edge.py b/autosubmit_api/components/representations/graph/edge.py index 6239a23..7e033ab 100644 --- a/autosubmit_api/components/representations/graph/edge.py +++ b/autosubmit_api/components/representations/graph/edge.py @@ -13,12 +13,10 @@ class Edge(object, metaclass=ABCMeta): self._is_in_wrapper = "" self._dashed = "" - def _get_build_identifier(self): - # type: () -> str + def _get_build_identifier(self) -> str: return "{0}-{1}".format(self._from, self._to) - def get_as_json(self): - # type: () -> Dict[str, str] + def get_as_json(self) -> Dict[str, str]: return { "id": self._id, "from": self._from, @@ -28,8 +26,7 @@ class Edge(object, metaclass=ABCMeta): } class RealEdge(Edge): - def __init__(self, from_node_name, to_node_name, in_wrapper_and_same_wrapper): - # type: (str, str, bool) -> None + def __init__(self, from_node_name: str, to_node_name: str, in_wrapper_and_same_wrapper: bool): super(RealEdge, self).__init__() self._from = from_node_name self._to = to_node_name @@ -38,8 +35,7 @@ class RealEdge(Edge): self._dashed = False class PackageInnerEdge(Edge): - def __init__(self, from_node_name, to_node_name): - # type: (str, str) -> None + def __init__(self, from_node_name: str, to_node_name: str): super(PackageInnerEdge, self).__init__() self._from = from_node_name self._to = to_node_name diff --git a/autosubmit_api/components/representations/graph/graph.py b/autosubmit_api/components/representations/graph/graph.py index 5e4c96c..cd2d479 100644 --- a/autosubmit_api/components/representations/graph/graph.py +++ b/autosubmit_api/components/representations/graph/graph.py @@ -2,17 +2,17 @@ from multiprocessing.sharedctypes import Value import networkx as nx -from ....performance import utils as PUtils +from autosubmit_api.performance import utils as PUtils # import common.utils as utils -from ....common.utils import Status, get_average_total_time +from autosubmit_api.common.utils import Status, get_average_total_time from networkx.linalg.laplacianmatrix import laplacian_matrix -from ...jobs.job_factory import Job -from ...jobs.joblist_loader import JobListLoader -from ....monitor.monitor import Monitor -from ....database.db_jobdata import ExperimentGraphDrawing +from autosubmit_api.components.jobs.job_factory import Job +from autosubmit_api.components.jobs.joblist_loader import JobListLoader +from autosubmit_api.monitor.monitor import Monitor +from autosubmit_api.database.db_jobdata import ExperimentGraphDrawing -from .edge import Edge, RealEdge -from typing import List, Dict, Tuple, Set, Any +from autosubmit_api.components.representations.graph.edge import Edge, RealEdge +from typing import List, Dict, Optional, Tuple, Set, Any from scipy.sparse import linalg GRAPHVIZ_MULTIPLIER = 90 @@ -31,8 +31,7 @@ class GroupedBy: class GraphRepresentation(object): """ Graph Representation of Experiment """ - def __init__(self, expid, job_list_loader, layout, grouped=GroupedBy.NO_GROUP): - # type: (str, JobListLoader, str, str) -> None + def __init__(self, expid: str, job_list_loader: JobListLoader, layout: str, grouped: str = GroupedBy.NO_GROUP): self.expid = expid self.layout = layout self.grouped_by = grouped @@ -40,15 +39,15 @@ class GraphRepresentation(object): self.joblist_helper = self.joblist_loader.joblist_helper self.jobs = self.joblist_loader.jobs self.job_dictionary = self.joblist_loader.job_dictionary - self.average_post_time = 0.0 # type: float - self.we_have_valid_graph_drawing = False # type: bool - self.we_have_valid_graphviz_drawing = False # type: bool - self.edges = [] # type: List[Edge] - self.package_edges = [] # type: List[Edge] - self.nodes = [] # type: List[Dict[str, Any]] - self.groups = {} # type: Dict[str, Dict[str, Any]] - self.max_children_count = 0 # type: int - self.max_parent_count = 0 # type: int + self.average_post_time: float = 0.0 + self.we_have_valid_graph_drawing: bool = False + self.we_have_valid_graphviz_drawing: bool = False + self.edges: List[Edge] = [] + self.package_edges: List[Edge] = [] + self.nodes: List[Dict[str, Any]] = [] + self.groups: Dict[str, Dict[str, Any]] = {} + self.max_children_count: int = 0 + self.max_parent_count: int = 0 @property def job_count(self): @@ -59,7 +58,6 @@ class GraphRepresentation(object): return len(self.edges) def perform_calculations(self): - # type: () -> None """ Calculate Graph Representation """ self.joblist_loader.validate_job_list_configuration() self.add_normal_edges() @@ -106,16 +104,14 @@ class GraphRepresentation(object): else: raise ValueError("You have provided an invalid grouping selection: {}".format(self.grouped_by)) - def _get_grouped_by_status_dict(self): - # type: () -> Dict[str, Dict[str, Any]] + def _get_grouped_by_status_dict(self) -> Dict[str, Dict[str, Any]]: groups = {} groups['WAITING'] = {"color": Monitor.color_status(Status.WAITING)} groups['COMPLETED'] = {"color": Monitor.color_status(Status.COMPLETED)} groups['SUSPENDED'] = {"color": Monitor.color_status(Status.SUSPENDED)} return groups - def _get_grouped_by_date_member_dict(self): - # type: () -> Dict[str, Dict[str, Any]] + def _get_grouped_by_date_member_dict(self) -> Dict[str, Dict[str, Any]]: if len(self.joblist_loader.dates) == 0 or len(self.joblist_loader.members) == 0: raise Exception("This experiment doesn't admit grouping by date and member because there are {} dates and {} members.".format( len(self.joblist_loader.dates), len(self.joblist_loader.members))) @@ -144,8 +140,7 @@ class GraphRepresentation(object): return groups - def _solve_group_collisions(self, group_coordinates): - # type: (List[Tuple[str, int, int]]) -> Dict[str, Tuple[int, int]] + def _solve_group_collisions(self, group_coordinates: List[Tuple[str, int, int]]) -> Dict[str, Tuple[int, int]]: group_coordinates.sort(key=lambda group_triple: group_triple[2], reverse=True) new_group_coordinates = dict() visited = set() @@ -163,8 +158,7 @@ class GraphRepresentation(object): new_group_coordinates[group_name] = (x_i_coordinate, y_i_coordinate) return new_group_coordinates - def _get_defined_group_color(self, status_counters): - # type: (Dict[int, int]) -> str + def _get_defined_group_color(self, status_counters: Dict[int, int]) -> str: if status_counters.get(Status.FAILED, 0) > 0: return Monitor.color_status(Status.FAILED) elif status_counters.get(Status.RUNNING, 0) > 0: @@ -203,9 +197,7 @@ class GraphRepresentation(object): self.we_have_valid_graphviz_drawing = False def update_jobs_level(self): - # type: () -> None - def update_level(parent_job): - # type: (Job) -> None + def update_level(parent_job: Job): stack.append(parent_job) while stack: current = stack.pop() @@ -280,26 +272,22 @@ class GraphRepresentation(object): "y": job.y_coordinate }) - def _calculate_max_children_parent(self, children_count, parent_count): - # type: (int, int) -> None + def _calculate_max_children_parent(self, children_count: int, parent_count: int): self.max_children_count = max(self.max_children_count, children_count) self.max_parent_count = max(self.max_parent_count, parent_count) - def _assign_coordinates_to_jobs(self, valid_coordinates): + def _assign_coordinates_to_jobs(self, valid_coordinates: Optional[Dict[str, Tuple[int, int]]]) -> bool: """ False if valid_coordinates is None OR empty""" - # type: (Dict[str, Tuple[int, int]] | None) -> bool if valid_coordinates and len(valid_coordinates) > 0: for job_name in self.job_dictionary: self.job_dictionary[job_name].x_coordinate, self.job_dictionary[job_name].y_coordinate = valid_coordinates[job_name] return True return False - def _get_graph_drawing_data(self): - # type: () -> Dict[str, Tuple[int, int]] | None + def _get_graph_drawing_data(self) -> Optional[Dict[str, Tuple[int, int]]]: return ExperimentGraphDrawing(self.expid).get_validated_data(self.jobs) - def _get_calculated_graph_drawing(self): - # type: () -> Dict[str, Tuple[int, int]] + def _get_calculated_graph_drawing(self) -> Dict[str, Tuple[int, int]]: coordinates = dict() graph = Monitor().create_tree_list(self.expid, self.jobs, None, dict(), False, self.job_dictionary) graph_viz_result = graph.create("dot", format="plain") @@ -309,8 +297,7 @@ class GraphRepresentation(object): coordinates[str(node_data[1].decode())] = (int(float(node_data[2])) * GRAPHVIZ_MULTIPLIER, int(float(node_data[3])) * -GRAPHVIZ_MULTIPLIER) return coordinates - def _get_calculated_graph_laplacian_drawing(self): - # type: () -> Dict[str, Tuple[int, int]] + def _get_calculated_graph_laplacian_drawing(self) -> Dict[str, Tuple[int, int]]: coordinates = dict() nx_graph = nx.Graph() for job_name in self.job_dictionary: @@ -327,8 +314,7 @@ class GraphRepresentation(object): coordinates[job_name] = (int(x_coords[i]), int(y_coords[i])) return coordinates - def _get_calculated_hierarchical_drawing(self): - # type: () -> Dict[str, Tuple[int, int]] + def _get_calculated_hierarchical_drawing(self) -> Dict[str, Tuple[int, int]]: coordinates = {} processed_packages = set() max_level = max(job.level for job in self.jobs) diff --git a/autosubmit_api/components/representations/graph/test.py b/autosubmit_api/components/representations/graph/test.py index 1625297..9fde3ea 100644 --- a/autosubmit_api/components/representations/graph/test.py +++ b/autosubmit_api/components/representations/graph/test.py @@ -21,14 +21,12 @@ class TestGraph(unittest.TestCase): def get_loader(self, expid): return JobListLoaderDirector(JobListLoaderBuilder(expid)).build_loaded_joblist_loader(TestUtils.get_mock_basic_config()) - def get_standard_case_with_no_calculations(self): - # type: () -> GraphRepresentation + def get_standard_case_with_no_calculations(self) -> GraphRepresentation: """ """ loader = JobListLoaderDirector(JobListLoaderBuilder(CASE_NO_WRAPPERS)).build_loaded_joblist_loader(TestUtils.get_mock_basic_config()) return GraphRepresentation(CASE_NO_WRAPPERS, loader, Layout.STANDARD) - def get_wrapper_case_with_no_calculations(self): - # type: () -> GraphRepresentation + def get_wrapper_case_with_no_calculations(self) -> GraphRepresentation: loader = JobListLoaderDirector(JobListLoaderBuilder(CASE_WITH_WRAPPERS)).build_loaded_joblist_loader(TestUtils.get_mock_basic_config()) return GraphRepresentation(CASE_WITH_WRAPPERS, loader, Layout.STANDARD) diff --git a/autosubmit_api/components/representations/tree/tree.py b/autosubmit_api/components/representations/tree/tree.py index 20d2f12..1e7d4c7 100644 --- a/autosubmit_api/components/representations/tree/tree.py +++ b/autosubmit_api/components/representations/tree/tree.py @@ -1,9 +1,9 @@ #!/usr/bin/env python -from ...jobs import utils as JUtils -from ....performance import utils as PUtils -from ...jobs.joblist_loader import JobListLoader -from ...jobs.job_factory import Job -from ....common.utils import Status, get_average_total_time, get_current_timestamp +from autosubmit_api.components.jobs import utils as JUtils +from autosubmit_api.performance import utils as PUtils +from autosubmit_api.components.jobs.joblist_loader import JobListLoader +from autosubmit_api.components.jobs.job_factory import Job +from autosubmit_api.common.utils import Status, get_average_total_time, get_current_timestamp from collections import deque, OrderedDict from typing import List, Dict, Tuple, Set, Any from operator import is_not @@ -12,24 +12,21 @@ from functools import partial DEFAULT_MEMBER = "DEFAULT" class TreeRepresentation(object): - def __init__(self, expid, job_list_loader): - # type: (str, JobListLoader) -> None - self.expid = expid # type: str - # self.jobs = [] # type: List[Job] - self.joblist_loader = job_list_loader - self._date_member_distribution = {} # type: Dict[Tuple[str, str], List[Job]] - self._no_date_no_member_jobs = [] # type: List[Job] - self._normal_status = {Status.COMPLETED, Status.WAITING, Status.READY, Status.SUSPENDED} # type: Set - self.result_tree = list() # type: List - self.result_header = dict() # type: Dict - self.average_post_time = 0.0 # type: float - self.nodes = [] # type: List[Dict] - self._distributed_dates = OrderedDict() # type: OrderedDict[str, None] - self._distributed_members = OrderedDict() # type: OrderedDict[str, None] + def __init__(self, expid: str, job_list_loader: JobListLoader): + self.expid: str = expid + self.joblist_loader: JobListLoader = job_list_loader + self._date_member_distribution: Dict[Tuple[str, str], List[Job]] = {} + self._no_date_no_member_jobs: List[Job] = [] + self._normal_status: Set[Status] = {Status.COMPLETED, Status.WAITING, Status.READY, Status.SUSPENDED} + self.result_tree: List = [] + self.result_header: Dict = {} + self.average_post_time: float = 0.0 + self.nodes: List[Dict] = [] + self._distributed_dates: OrderedDict[str, None] = OrderedDict() + self._distributed_members: OrderedDict[str, None] = OrderedDict() def perform_calculations(self): - # type: () -> None self._distribute_into_date_member_groups() self._generate_date_member_tree_folders() self._generate_no_date_no_member_tree_folder() @@ -38,8 +35,7 @@ class TreeRepresentation(object): self._calculate_average_post_time() self._generate_node_data() - def get_tree_structure(self): - # type: () -> Dict[str, Any] + def get_tree_structure(self) -> Dict[str, Any]: return { "tree": self.result_tree, "jobs": self.nodes, @@ -51,7 +47,6 @@ class TreeRepresentation(object): } def _distribute_into_date_member_groups(self): - # type: () -> None for job in self.joblist_loader.jobs: if job.date is not None and job.member is not None: self._date_member_distribution.setdefault((job.date, job.member), []).append(job) @@ -78,7 +73,6 @@ class TreeRepresentation(object): self._no_date_no_member_jobs.append(job) def _generate_date_member_tree_folders(self): - # type: () -> None for date in self._distributed_dates: folders_in_date = list() formatted_date = self.joblist_loader.dates_formatted_dict.get(date, None) diff --git a/autosubmit_api/config/IConfigStrategy.py b/autosubmit_api/config/IConfigStrategy.py index 9fcdb8e..b687867 100644 --- a/autosubmit_api/config/IConfigStrategy.py +++ b/autosubmit_api/config/IConfigStrategy.py @@ -508,8 +508,7 @@ class IConfigStrategy(ABC): """ - def get_chunk_size_unit(self): - # type: () -> str + def get_chunk_size_unit(self) -> str: """ Unit for the chunk length @@ -519,8 +518,7 @@ class IConfigStrategy(ABC): pass - def get_chunk_size(self, default=1): - # type: (int) -> int + def get_chunk_size(self, default: int = 1) -> int: """ Chunk Size as defined in the expdef file. @@ -805,6 +803,5 @@ class IConfigStrategy(ABC): pass @staticmethod - def get_parser(parser_factory, file_path): - # type: (ConfigParserFactory, str) -> ConfigParser + def get_parser(parser_factory: ConfigParserFactory, file_path: str) -> ConfigParser: pass \ No newline at end of file diff --git a/autosubmit_api/config/confConfigStrategy.py b/autosubmit_api/config/confConfigStrategy.py index 46687a4..25515b7 100644 --- a/autosubmit_api/config/confConfigStrategy.py +++ b/autosubmit_api/config/confConfigStrategy.py @@ -31,8 +31,8 @@ from pyparsing import nestedExpr from bscearth.utils.config_parser import ConfigParserFactory, ConfigParser from bscearth.utils.date import parse_date from bscearth.utils.log import Log -from ..config.basicConfig import APIBasicConfig -from ..config.IConfigStrategy import IConfigStrategy +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.config.IConfigStrategy import IConfigStrategy logger = logging.getLogger('gunicorn.error') @@ -44,39 +44,37 @@ class confConfigStrategy(IConfigStrategy): :type expid: str """ - def __init__(self, expid, basic_config, parser_factory, extension=".conf"): - # type: (str, APIBasicConfig, ConfigParserFactory, Extension) -> None - + def __init__(self, expid: str, basic_config: APIBasicConfig, parser_factory: ConfigParserFactory, extension: str=".conf"): self.expid = expid self.basic_config = basic_config self.parser_factory = parser_factory # By default check for .yml files first as it is the new standard for AS 4.0 - self._conf_parser = None # type: ConfigParser + self._conf_parser: ConfigParser = None self._conf_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "autosubmit_" + expid + extension) if os.path.exists(self._conf_parser_file) == False: return None - self._exp_parser = None # type: ConfigParser + self._exp_parser: ConfigParser = None self._exp_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "expdef_" + expid + extension) if os.path.exists(self._exp_parser_file) == False: return None - self._platforms_parser = None # type: ConfigParser + self._platforms_parser: ConfigParser = None self._platforms_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "platforms_" + expid + extension) if os.path.exists(self._platforms_parser_file) == False: return None - self._jobs_parser = None # type: ConfigParser + self._jobs_parser: ConfigParser = None self._jobs_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "jobs_" + expid + extension) if os.path.exists(self._jobs_parser_file) == False: return None - self._proj_parser = None # type: ConfigParser + self._proj_parser: ConfigParser = None self._proj_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "proj_" + expid + extension) if os.path.exists(self._proj_parser_file) == False: @@ -952,8 +950,7 @@ class confConfigStrategy(IConfigStrategy): return default return int(chunk_ini) - def get_chunk_size_unit(self): - # type: () -> str + def get_chunk_size_unit(self) -> str: """ Unit for the chunk length @@ -968,8 +965,7 @@ class confConfigStrategy(IConfigStrategy): # res = self.autosubmit_conf.get('experiment') return self._exp_parser.get('experiment', 'CHUNKSIZEUNIT').lower() - def get_chunk_size(self, default=1): - # type: (int) -> int + def get_chunk_size(self, default: int = 1) -> int: """ Chunk Size as defined in the expdef file. diff --git a/autosubmit_api/config/config_common.py b/autosubmit_api/config/config_common.py index 567fb84..22ae980 100644 --- a/autosubmit_api/config/config_common.py +++ b/autosubmit_api/config/config_common.py @@ -37,9 +37,7 @@ class AutosubmitConfigResolver(object): :configWrapper: IConfigStrategy -> handling strategy for the type of config files used """ - def __init__(self, expid, basic_config, parser_factory): - # type: (str, APIBasicConfig, ConfigParserFactory) -> None - + def __init__(self, expid: str, basic_config: APIBasicConfig, parser_factory: ConfigParserFactory): self.expid = expid self._configWrapper = None self.basic_config = basic_config @@ -528,8 +526,7 @@ class AutosubmitConfigResolver(object): """ return self._configWrapper.get_chunk_ini(default) - def get_chunk_size_unit(self): - # type: () -> str + def get_chunk_size_unit(self) -> str: """ Unit for the chunk length @@ -539,8 +536,7 @@ class AutosubmitConfigResolver(object): return self._configWrapper.get_chunk_size_unit() - def get_chunk_size(self, default=1): - # type: (int) -> int + def get_chunk_size(self, default: int = 1) -> int: """ Chunk Size as defined in the expdef file. @@ -817,8 +813,7 @@ class AutosubmitConfigResolver(object): return self._configWrapper.is_valid_git_repository() - def get_parser(self, parser_factory, file_path) -> ConfigParser: - # type: (ConfigParserFactory, str) -> ConfigParser + def get_parser(self, parser_factory: ConfigParserFactory, file_path: str) -> ConfigParser: """ Gets parser for given file diff --git a/autosubmit_api/database/db_common.py b/autosubmit_api/database/db_common.py index 6eec21a..69e4920 100644 --- a/autosubmit_api/database/db_common.py +++ b/autosubmit_api/database/db_common.py @@ -372,8 +372,7 @@ def get_current_running_exp(): return {'experiment': result} -def get_experiment_by_id(expid): - # type: (str) -> Dict[str, Any] +def get_experiment_by_id(expid: str) -> Dict[str, Any]: result = {'id': 0, 'name': expid, 'description': "NA", 'version': "NA"} if not check_db(): return result diff --git a/autosubmit_api/experiment/common_db_requests.py b/autosubmit_api/experiment/common_db_requests.py index e8aa22f..ef8b82e 100644 --- a/autosubmit_api/experiment/common_db_requests.py +++ b/autosubmit_api/experiment/common_db_requests.py @@ -16,8 +16,7 @@ DB_FILES_STATUS = os.path.join( # STATUS ARCHIVE # Might be removed soon -def create_connection(db_file): - # type: (str) -> sqlite3.Connection +def create_connection(db_file: str) -> sqlite3.Connection: """ Create a database connection to the SQLite database specified by db_file. :param db_file: database file name diff --git a/autosubmit_api/experiment/common_requests.py b/autosubmit_api/experiment/common_requests.py index 82be53f..e7f055b 100644 --- a/autosubmit_api/experiment/common_requests.py +++ b/autosubmit_api/experiment/common_requests.py @@ -571,8 +571,7 @@ def get_job_log(expid, logfile, nlines=150): 'logcontent': logcontent} -def get_experiment_pkl(expid): - # type: (str) -> Dict[str, Any] +def get_experiment_pkl(expid: str) -> Dict: """ Gets the current state of the pkl in a format proper for graph update. """ @@ -628,8 +627,7 @@ def get_experiment_pkl(expid): } -def get_experiment_tree_pkl(expid): - # type: (str) -> Dict[str, Any] +def get_experiment_tree_pkl(expid: str) -> Dict[str, Any]: """ Gets the current state of the pkl in a format for tree update """ @@ -916,8 +914,7 @@ def get_quick_view(expid): return {"error": error, "error_message": error_message, "view_data": view_data, "tree_view": list(quick_tree_view), "total": total_count, "completed": completed_count, "failed": failed_count, "running": running_count, "queuing": queuing_count} -def get_job_history(expid, job_name): - # type: (str, str) -> Dict[str, Any] +def get_job_history(expid: str, job_name: str) -> Dict[str, Any]: error = False error_message = "" path_to_job_logs = "" diff --git a/autosubmit_api/history/data_classes/experiment_run.py b/autosubmit_api/history/data_classes/experiment_run.py index ef3459f..350cb4d 100644 --- a/autosubmit_api/history/data_classes/experiment_run.py +++ b/autosubmit_api/history/data_classes/experiment_run.py @@ -55,15 +55,15 @@ class ExperimentRun(object): self.modified = get_current_datetime_if_none(modified) # Added on DB 16 self.start = start self.finish = finish - self.chunk_unit = chunk_unit # type: str - self.chunk_size = chunk_size # type: int - self.submitted = submitted # type: int - self.queuing = queuing # type: int - self.running = running # type: int - self.completed = completed # type: int - self.failed = failed # type: int - self.total = total # type: int - self.suspended = suspended # type: int + self.chunk_unit: str = chunk_unit + self.chunk_size: int = chunk_size + self.submitted: int = submitted + self.queuing: int = queuing + self.running: int = running + self.completed: int = completed + self.failed: int = failed + self.total: int = total + self.suspended: int = suspended self.metadata = metadata @property @@ -84,8 +84,7 @@ class ExperimentRun(object): return wrapper_type return None - def getSYPD(self, job_list): - # type: (List[JobData]) -> float + def getSYPD(self, job_list: List[JobData]) -> float: outlier_free_list = [] if job_list: performance_jobs = [SimJob.from_job_data_dc(job_data_dc) for job_data_dc in job_list] @@ -104,11 +103,8 @@ class ExperimentRun(object): return round((years_per_sim * number_SIM * seconds_per_day) / total_run_time, 2) return None - def getASYPD(self, job_sim_list, job_post_list, run_id_wrapper_code_to_job_dcs): - # type: (List[JobData], List[JobData], Dict[Tuple[int, int], List[JobData]]) -> float - + def getASYPD(self, job_sim_list: List[JobData], job_post_list: List[JobData], run_id_wrapper_code_to_job_dcs: Dict[Tuple[int, int], List[JobData]]) -> float: try: - SIM_no_outlier_list = [] if job_sim_list and len(job_sim_list) > 0: performance_jobs = [SimJob.from_job_data_dc(job_data_dc) for job_data_dc in job_sim_list] diff --git a/autosubmit_api/history/data_classes/job_data.py b/autosubmit_api/history/data_classes/job_data.py index 739e0a5..897db9a 100644 --- a/autosubmit_api/history/data_classes/job_data.py +++ b/autosubmit_api/history/data_classes/job_data.py @@ -18,9 +18,9 @@ import collections import time -from .. import utils as HUtils -from ..database_managers import database_models as Models -from ...common import utils as common_utils +from autosubmit_api.history import utils as HUtils +from autosubmit_api.history.database_managers import database_models as Models +from autosubmit_api.common import utils as common_utils from datetime import datetime, timedelta from json import dumps, loads from typing import List @@ -271,8 +271,7 @@ class JobData(object): return None @property - def running_time(self): - # type: () -> int + def running_time(self) -> int: """ Calculates and returns the running time of the job, in seconds. """ @@ -281,8 +280,7 @@ class JobData(object): # return 0 @property - def queuing_time(self): - # type: () -> int + def queuing_time(self) -> int: """ Calculates and returns the queuing time of the job, in seconds. """ @@ -290,8 +288,7 @@ class JobData(object): return HUtils.calculate_queue_time_in_seconds(self.submit, self.start) # return 0 - def queuing_time_considering_package(self, jobs_in_package): - # type: (List[JobData]) -> int + def queuing_time_considering_package(self, jobs_in_package: List["JobData"]) -> int: considered_jobs = [job for job in jobs_in_package if job.job_name != self.job_name and job.start < (self.start - 20)] if len(considered_jobs) > 0: considered_jobs.sort(key=lambda x: x.queuing_time, reverse=True) @@ -300,8 +297,7 @@ class JobData(object): return max(0, int(self.start - self.submit) - int(max_queue)) return self.queuing_time - def delta_queueing_time_considering_package(self, jobs_in_package): - # type: (List[JobData]) -> str + def delta_queueing_time_considering_package(self, jobs_in_package: List["JobData"]) -> str: return str(timedelta(seconds=self.queuing_time_considering_package(jobs_in_package))) def get_hdata(self): diff --git a/autosubmit_api/history/database_managers/database_manager.py b/autosubmit_api/history/database_managers/database_manager.py index c7cb2d9..dd94e23 100644 --- a/autosubmit_api/history/database_managers/database_manager.py +++ b/autosubmit_api/history/database_managers/database_manager.py @@ -18,6 +18,7 @@ import sqlite3 import os +from typing import Tuple, List from autosubmit_api.history import utils as HUtils from autosubmit_api.history.database_managers import database_models as Models from autosubmit_api.config.basicConfig import APIBasicConfig @@ -31,15 +32,13 @@ class DatabaseManager(metaclass=ABCMeta): """ Simple database manager. Needs expid. """ AS_TIMES_DB_NAME = "as_times.db" # default AS_TIMES location ECEARTH_DB_NAME = "ecearth.db" # default EC_EARTH_DB_NAME location - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig): self.expid = expid self.JOBDATA_DIR = basic_config.JOBDATA_DIR self.LOCAL_ROOT_DIR = basic_config.LOCAL_ROOT_DIR self.db_version = Models.DatabaseVersion.NO_DATABASE.value - def get_connection(self, path): - # type : (str) -> Sqlite3Connection + def get_connection(self, path: str) -> sqlite3.Connection: """ Create a database connection to the SQLite database specified by path. :param path: database file name @@ -49,14 +48,12 @@ class DatabaseManager(metaclass=ABCMeta): self._create_database_file(path) return sqlite3.connect(path) - def _create_database_file(self, path): - # type : (str) -> None + def _create_database_file(self, path: str): """ creates a database files with full permissions """ os.umask(0) os.open(path, os.O_WRONLY | os.O_CREAT, 0o776) - def execute_statement_on_dbfile(self, path, statement): - # type : (str, str) -> None + def execute_statement_on_dbfile(self, path: str, statement: str): """ Executes a statement on a database file specified by path. """ conn = self.get_connection(path) cursor = conn.cursor() @@ -64,8 +61,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.commit() conn.close() - def execute_statement_with_arguments_on_dbfile(self, path, statement, arguments): - # type : (str, str, Tuple) -> None + def execute_statement_with_arguments_on_dbfile(self, path: str, statement: str, arguments: Tuple): """ Executes an statement with arguments on a database file specified by path. """ conn = self.get_connection(path) cursor = conn.cursor() @@ -73,8 +69,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.commit() conn.close() - def execute_many_statement_with_arguments_on_dbfile(self, path, statement, arguments_list): - # type : (str, str, List[Tuple]) -> None + def execute_many_statement_with_arguments_on_dbfile(self, path: str, statement: str, arguments_list: List[Tuple]) -> None: """ Executes many statements from a list of arguments specified by a path. """ conn = self.get_connection(path) cursor = conn.cursor() @@ -82,8 +77,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.commit() conn.close() - def execute_many_statements_on_dbfile(self, path, statements): - # type : (str, List[str]) -> None + def execute_many_statements_on_dbfile(self, path: str, statements: List[str]) -> None: """ Updates the table schema using a **small** list of statements. No Exception raised. Should be used to execute a list of schema updates that might have been already applied. @@ -94,8 +88,7 @@ class DatabaseManager(metaclass=ABCMeta): except Exception as exp: pass - def get_from_statement(self, path, statement): - # type : (str, str) -> List[Tuple] + def get_from_statement(self, path: str, statement: str) -> List[Tuple]: """ Get the rows from a statement with no arguments """ conn = self.get_connection(path) conn.text_factory = str @@ -105,8 +98,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.close() return statement_rows - def get_from_statement_with_arguments(self, path, statement, arguments): - # type : (str, str, Tuple) -> List[Tuple] + def get_from_statement_with_arguments(self, path: str, statement: str, arguments: Tuple) -> List[Tuple]: """ Get the rows from a statement with arguments """ conn = self.get_connection(path) conn.text_factory = str @@ -116,8 +108,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.close() return statement_rows - def insert_statement(self, path, statement): - # type : (str, str) -> int + def insert_statement(self, path: str, statement: str) -> int: """ Insert statement into path """ conn = self.get_connection(path) conn.text_factory = str @@ -128,8 +119,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.close() return lastrow_id - def insert_statement_with_arguments(self, path, statement, arguments): - # type : (str, str, Tuple) -> int + def insert_statement_with_arguments(self, path: str, statement: str, arguments: Tuple) -> int: """ Insert statement with arguments into path """ conn = self.get_connection(path) conn.text_factory = str @@ -140,8 +130,7 @@ class DatabaseManager(metaclass=ABCMeta): conn.close() return lastrow_id - def get_built_select_statement(self, table_name, conditions=None): - # type : (str, namedtuple, str) -> str + def get_built_select_statement(self, table_name: str, conditions: str = None) -> str: """ Build and return a SELECT statement with the same fields as the model. Requires that the table is associated with a model (namedtuple). """ model = Models.get_correct_model_for_table_and_version(table_name, self.db_version) # Models.table_name_to_model[table_name] if conditions: diff --git a/autosubmit_api/history/database_managers/database_models.py b/autosubmit_api/history/database_managers/database_models.py index 33c49c5..a249dda 100644 --- a/autosubmit_api/history/database_managers/database_models.py +++ b/autosubmit_api/history/database_managers/database_models.py @@ -51,8 +51,7 @@ ExperimentStatusRow = collections.namedtuple('ExperimentStatusRow', ['exp_id', ' -def get_experiment_row_model(db_version): - # type: (int) -> collections.namedtuple +def get_experiment_row_model(db_version: int) -> collections.namedtuple: if db_version >= DatabaseVersion.EXPERIMENT_HEADER_PLATFORM_ADDED.value: return ExperimentRunRow elif db_version >= DatabaseVersion.EXPERIMENT_HEADER_SCHEMA_CHANGES.value: @@ -60,8 +59,7 @@ def get_experiment_row_model(db_version): else: return ExperimentRunRowBase -def get_job_data_row_model(db_version): - # type: (int) -> collections.namedtuple +def get_job_data_row_model(db_version: int) -> collections.namedtuple: if db_version >= DatabaseVersion.EXPERIMENT_HEADER_PLATFORM_ADDED.value: return JobDataRow elif db_version >= DatabaseVersion.EXPERIMENT_HEADER_SCHEMA_CHANGES.value: @@ -102,8 +100,7 @@ table_name_to_model = { "pragma_version" : PragmaVersion } -def get_correct_model_for_table_and_version(table_name, db_version=0): - # type: (str, int) -> collections.namedtuple +def get_correct_model_for_table_and_version(table_name: str, db_version: int = 0) -> collections.namedtuple: if table_name == "experiment_run": return get_experiment_row_model(db_version) elif table_name == "job_data": diff --git a/autosubmit_api/history/database_managers/experiment_history_db_manager.py b/autosubmit_api/history/database_managers/experiment_history_db_manager.py index 1f35a92..4888a17 100644 --- a/autosubmit_api/history/database_managers/experiment_history_db_manager.py +++ b/autosubmit_api/history/database_managers/experiment_history_db_manager.py @@ -25,7 +25,7 @@ from autosubmit_api.history.data_classes.job_data import JobData from autosubmit_api.history.data_classes.experiment_run import ExperimentRun from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api.history.database_managers.database_manager import DatabaseManager -from typing import List +from typing import List, Optional from collections import namedtuple DEFAULT_MAX_COUNTER = 0 @@ -33,8 +33,7 @@ DEFAULT_MAX_COUNTER = 0 class ExperimentHistoryDbManager(DatabaseManager): """ Manages actions directly on the database. """ - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig): """ Requires expid and jobdata_dir_path. """ super(ExperimentHistoryDbManager, self).__init__(expid, basic_config) self._set_schema_changes() @@ -135,7 +134,6 @@ class ExperimentHistoryDbManager(DatabaseManager): ''') def _set_schema_changes(self): - # type : () -> None """ Creates the list of schema changes""" self.version_schema_changes = [ "ALTER TABLE job_data ADD COLUMN nnodes INTEGER NOT NULL DEFAULT 0", @@ -196,14 +194,12 @@ class ExperimentHistoryDbManager(DatabaseManager): raise Exception("No Experiment Runs registered.") return self.experiment_run_row_model(*max_experiment_run[0]) - def get_experiment_run_by_id(self, run_id): - # type: (int) -> ExperimentRun | None + def get_experiment_run_by_id(self, run_id: int) -> Optional[ExperimentRun]: if run_id: return ExperimentRun.from_model(self._get_experiment_run_by_id(run_id)) return None - def _get_experiment_run_by_id(self, run_id): - # type: (int) -> namedtuple + def _get_experiment_run_by_id(self, run_id: int) -> namedtuple: statement = self.get_built_select_statement("experiment_run", "run_id=?") arguments = (run_id,) experiment_run = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) @@ -211,13 +207,11 @@ class ExperimentHistoryDbManager(DatabaseManager): raise Exception("Experiment run {0} for experiment {1} does not exists.".format(run_id, self.expid)) return self.experiment_run_row_model(*experiment_run[0]) - def get_experiment_runs_dcs(self): - # type: () -> List[ExperimentRun] + def get_experiment_runs_dcs(self) -> List[ExperimentRun]: experiment_run_rows = self._get_experiment_runs() return [ExperimentRun.from_model(row) for row in experiment_run_rows] - def _get_experiment_runs(self): - # type: () -> List[namedtuple] + def _get_experiment_runs(self) -> List[namedtuple]: statement = self.get_built_select_statement("experiment_run") experiment_runs = self.get_from_statement(self.historicaldb_file_path, statement) return [self.experiment_run_row_model(*row) for row in experiment_runs] @@ -229,8 +223,7 @@ class ExperimentHistoryDbManager(DatabaseManager): return True return False - def get_job_data_dcs_all(self): - # type: () -> List[JobData] + def get_job_data_dcs_all(self) -> List[JobData]: """ Gets all content from job_data ordered by id (from table). """ return [JobData.from_model(row) for row in self.get_job_data_all()] @@ -279,8 +272,7 @@ class ExperimentHistoryDbManager(DatabaseManager): job_data_rows_last = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) return [self.job_data_row_model(*row) for row in job_data_rows_last] - def get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code, run_id): - # type: (int, int) -> List[JobData] + def get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code: int, run_id: int) -> List[JobData]: if not run_id or package_code <= Models.RowType.NORMAL: return [] job_data_rows = self._get_job_data_dc_COMPLETED_by_wrapper_run_id(package_code, run_id) @@ -288,8 +280,7 @@ class ExperimentHistoryDbManager(DatabaseManager): return [] return [JobData.from_model(row) for row in job_data_rows] - def _get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code, run_id): - # type: (int, int) -> List[namedtuple] + def _get_job_data_dc_COMPLETED_by_wrapper_run_id(self, package_code: int, run_id: int) -> List[namedtuple]: statement = self.get_built_select_statement("job_data", "run_id=? and rowtype=? and status=? ORDER BY id") arguments = (run_id, package_code, "COMPLETED") job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) @@ -312,8 +303,7 @@ class ExperimentHistoryDbManager(DatabaseManager): else: return [] - def get_job_data_dcs_COMPLETED_by_section(self, section): - # type: (str) -> List[JobData] + def get_job_data_dcs_COMPLETED_by_section(self, section: str) -> List[JobData]: arguments = {"status": "COMPLETED", "section": section} job_data_rows = self._get_job_data_COMPLETD_by_section(section) return [JobData.from_model(row) for row in job_data_rows] @@ -343,21 +333,18 @@ class ExperimentHistoryDbManager(DatabaseManager): job_data_rows = self.get_from_statement(self.historicaldb_file_path, statement) return [self.job_data_row_model(*row) for row in job_data_rows] - def get_job_data_dcs_by_name(self, job_name): - # type: (str) -> List[JobData] + def get_job_data_dcs_by_name(self, job_name: str) -> List[JobData]: job_data_rows = self._get_job_data_by_name(job_name) return [JobData.from_model(row) for row in job_data_rows] - def _get_job_data_by_name(self, job_name): - # type: (str) -> List[namedtuple] + def _get_job_data_by_name(self, job_name: str) -> List[namedtuple]: """ Get List of Models.JobDataRow for job_name """ statement = self.get_built_select_statement("job_data", "job_name=? ORDER BY counter DESC") arguments = (job_name,) job_data_rows = self.get_from_statement_with_arguments(self.historicaldb_file_path, statement, arguments) return [self.job_data_row_model(*row) for row in job_data_rows] - def _insert_job_data(self, job_data): - # type : (JobData) -> int + def _insert_job_data(self, job_data: JobData) -> int: """ Insert data class JobData into job_data table. """ statement = ''' INSERT INTO job_data(counter, job_name, created, modified, submit, start, finish, status, rowtype, ncpus, @@ -384,8 +371,7 @@ class ExperimentHistoryDbManager(DatabaseManager): experiment_run.submitted, experiment_run.suspended, experiment_run.metadata) return self.insert_statement_with_arguments(self.historicaldb_file_path, statement, arguments) - def update_many_job_data_change_status(self, changes): - # type : (List[Tuple]) -> None + def update_many_job_data_change_status(self, changes: List[tuple]): """ Update many job_data rows in bulk. Requires a changes list of argument tuples. Only updates finish, modified, status, and rowstatus by id. @@ -455,8 +441,7 @@ class ExperimentHistoryDbManager(DatabaseManager): statement = "pragma user_version={v:d};".format(v=version) self.execute_statement_on_dbfile(self.historicaldb_file_path, statement) - def _get_pragma_version(self): - # type: () -> int + def _get_pragma_version(self) -> int: """ Gets current pragma version as int. """ statement = "pragma user_version;" pragma_result = self.get_from_statement(self.historicaldb_file_path, statement) diff --git a/autosubmit_api/history/experiment_history.py b/autosubmit_api/history/experiment_history.py index f9ae423..b2643c6 100644 --- a/autosubmit_api/history/experiment_history.py +++ b/autosubmit_api/history/experiment_history.py @@ -28,8 +28,7 @@ from typing import List, Dict, Tuple, Any SECONDS_WAIT_PLATFORM = 60 class ExperimentHistory(): - def __init__(self, expid, basic_config, experiment_history_db_manager, logger): - # type: (str, APIBasicConfig, ExperimentHistoryDbManager, Logging) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig, experiment_history_db_manager: ExperimentHistoryDbManager, logger: Logging) -> None: self.expid = expid self._log = logger self.basic_config = basic_config @@ -46,20 +45,19 @@ class ExperimentHistory(): return self.manager.is_header_ready_db_version() return False - def get_historic_job_data(self, job_name): - # type: (str) -> List[Dict[str, Any]] + def get_historic_job_data(self, job_name: str) -> List[Dict[str, Any]]: result = [] all_job_data_dcs = self.manager.get_job_data_dcs_by_name(job_name) post_job_data_dcs = self.manager.get_job_data_dcs_COMPLETED_by_section("POST") - run_id_to_POST_job_data_dcs = {} # type: Dict[int, List[JobData]] - run_id_wrapper_code_to_job_data_dcs = {} # type: Dict[Tuple[int, int], List[JobData]] + run_id_to_POST_job_data_dcs: Dict[int, List[JobData]] = {} + run_id_wrapper_code_to_job_data_dcs: Dict[Tuple[int, int], List[JobData]] = {} for job_data_dc in post_job_data_dcs: run_id_to_POST_job_data_dcs.setdefault(job_data_dc.run_id, []).append(job_data_dc) if (job_data_dc.run_id, job_data_dc.rowtype) not in run_id_wrapper_code_to_job_data_dcs: run_id_wrapper_code_to_job_data_dcs[(job_data_dc.run_id, job_data_dc.rowtype)] = self.manager.get_job_data_dc_COMPLETED_by_wrapper_run_id(job_data_dc.rowtype, job_data_dc.run_id) - run_id_to_experiment_run_involved = {} # type: Dict[int, ExperimentRun] + run_id_to_experiment_run_involved: Dict[int, ExperimentRun] = {} for job_data_dc in all_job_data_dcs: if job_data_dc.run_id not in run_id_to_experiment_run_involved: run_id_to_experiment_run_involved[job_data_dc.run_id] = self.manager.get_experiment_run_by_id(job_data_dc.run_id) diff --git a/autosubmit_api/history/internal_logging.py b/autosubmit_api/history/internal_logging.py index 4453fd3..464c78d 100644 --- a/autosubmit_api/history/internal_logging.py +++ b/autosubmit_api/history/internal_logging.py @@ -16,13 +16,12 @@ # You should have received a copy of the GNU General Public License # along with Autosubmit. If not, see . import os -from ..experiment import utils as HUtils +from autosubmit_api.experiment import utils as HUtils # from database_managers.database_manager import DEFAULT_LOCAL_ROOT_DIR, DEFAULT_HISTORICAL_LOGS_DIR -from ..config.basicConfig import APIBasicConfig +from autosubmit_api.config.basicConfig import APIBasicConfig class Logging(): - def __init__(self, expid, basic_config): - # type: (str, APIBasicConfig) -> None + def __init__(self, expid: str, basic_config: APIBasicConfig): self.expid = expid self.historiclog_dir_path = basic_config.HISTORICAL_LOG_DIR self._make_log_directory_if_not_exists() diff --git a/autosubmit_api/history/utils.py b/autosubmit_api/history/utils.py index 78b6dab..26e1101 100644 --- a/autosubmit_api/history/utils.py +++ b/autosubmit_api/history/utils.py @@ -21,6 +21,8 @@ import time import os from datetime import datetime from typing import Dict, Any, Tuple +from typing import Any, Union +from datetime import datetime DATETIME_FORMAT = '%Y-%m-%d-%H:%M:%S' SAFE_TIME_LIMIT = 300 @@ -30,48 +32,41 @@ def get_fields_as_comma_str(model): """ Get the fields of a namedtumple as a comma separated string. """ return ",".join(model._fields) -def calculate_queue_time_in_seconds(submit_time, start_time): - # type : (float, float) -> int +def calculate_queue_time_in_seconds(submit_time: float, start_time: float) -> int: """ Calculates queue time in seconds based on submit and start timestamps. """ if submit_time > 0 and start_time > 0 and (start_time - submit_time) > 0: return int(start_time - submit_time) return 0 -def calculate_run_time_in_seconds(start_time, finish_time): - # type : (float, float) -> int +def calculate_run_time_in_seconds(start_time: float, finish_time: float) -> int: """ Calculates run time in seconds based on start and finish timestamps. """ if finish_time > 0 and start_time > 0 and (finish_time - start_time) > 0: return int(finish_time - start_time) return 0 -def get_current_datetime(): - # type : () -> str +def get_current_datetime() -> str: """ Returns the current time in format '%Y-%m-%d-%H:%M:%S' """ return datetime.today().strftime(DATETIME_FORMAT) -def get_current_datetime_if_none(argument): - # type : (Any) -> Union[Any, str] +def get_current_datetime_if_none(argument: Any) -> Union[Any, str]: """ Returns the current time in format '%Y-%m-%d-%H:%M:%S' if the supplied argument is None, else return argument. """ if argument is None: return get_current_datetime() else: return argument -def create_file_with_full_permissions(path): - # type : (str) -> None +def create_file_with_full_permissions(path: str) -> None: """ creates a database files with full permissions """ os.umask(0) os.open(path, os.O_WRONLY | os.O_CREAT, 0o777) -def create_path_if_not_exists(path): - # type : (str) -> bool +def create_path_if_not_exists(path: str) -> bool: if not os.path.exists(path): os.makedirs(path) return True return False -def get_built_statement_from_kwargs(order_by="id", **conditions): - # type: (str, Dict[Any, Any]) -> Tuple[str, Tuple] +def get_built_statement_from_kwargs(order_by: str = "id", **conditions: Dict[Any, Any]) -> Tuple[str, Tuple]: columns_base = [key for key in conditions] columns = ["{} = ?".format(str(key)) for key in columns_base] values_base = tuple([conditions[key] for key in columns_base]) diff --git a/autosubmit_api/performance/performance_metrics.py b/autosubmit_api/performance/performance_metrics.py index a60caea..f7a55f8 100644 --- a/autosubmit_api/performance/performance_metrics.py +++ b/autosubmit_api/performance/performance_metrics.py @@ -1,5 +1,7 @@ #!/usr/bin/env python import traceback +from autosubmit_api.components.experiment.configuration_facade import AutosubmitConfigurationFacade +from autosubmit_api.components.experiment.pkl_organizer import PklOrganizer from autosubmit_api.logger import logger from autosubmit_api.common import utils as utils from autosubmit_api.components.jobs.joblist_helper import JobListHelper @@ -13,25 +15,25 @@ class PerformanceMetrics(object): self.expid = expid self.error = False self.error_message = "" - self.total_sim_run_time = 0 # type : int - self.total_sim_queue_time = 0 # type : int - self.SYPD = 0 # type: float - self.ASYPD = 0 # type: float - self.CHSY = 0 # type: float - self.JPSY = 0 # type: float - self.RSYPD = 0 # type: float - self.processing_elements = 1 + self.total_sim_run_time: int = 0 + self.total_sim_queue_time: int = 0 + self.SYPD: float = 0 + self.ASYPD: float = 0 + self.CHSY: float = 0 + self.JPSY: float = 0 + self.RSYPD: float = 0 + self.processing_elements: int = 1 self._considered: List[Dict] = [] self._not_considered: List[Dict] = [] - self._sim_processors = 1 # type : int - self.warnings = [] # type : List - self.post_jobs_total_time_average = 0 # type : int + self._sim_processors: int = 1 + self.warnings: List[str] = [] + self.post_jobs_total_time_average: int = 0 self.sim_jobs_valid: List[SimJob] = [] self.sim_jobs_invalid: List[SimJob] = [] try: - self.joblist_helper = joblist_helper # type: JobListHelper - self.configuration_facade = self.joblist_helper.configuration_facade # type : AutosubmitConfigurationFacade - self.pkl_organizer = self.joblist_helper.pkl_organizer # type : PklOrganizer + self.joblist_helper: JobListHelper = joblist_helper + self.configuration_facade: AutosubmitConfigurationFacade = self.joblist_helper.configuration_facade + self.pkl_organizer: PklOrganizer = self.joblist_helper.pkl_organizer self.pkl_organizer.prepare_jobs_for_performance_metrics() self._sim_processors = self.configuration_facade.sim_processors self.processing_elements = self.configuration_facade.sim_processing_elements @@ -157,8 +159,7 @@ class PerformanceMetrics(object): else: return [] - def _get_RSYPD_divisor(self): - # type: () -> float + def _get_RSYPD_divisor(self) -> float: support_list = self._get_RSYPD_support_list() divisor = 0 if len(support_list) > 0 and len(self.sim_jobs_valid): @@ -180,8 +181,7 @@ class PerformanceMetrics(object): "chunk": simjob.chunk } - def to_json(self): - # type: () -> Dict + def to_json(self) -> Dict: return {"SYPD": self.SYPD, "ASYPD": self.ASYPD, "RSYPD": self.RSYPD, diff --git a/autosubmit_api/performance/utils.py b/autosubmit_api/performance/utils.py index 0364d52..aec71af 100644 --- a/autosubmit_api/performance/utils.py +++ b/autosubmit_api/performance/utils.py @@ -1,8 +1,7 @@ #!/usr/bin/env pytthon from autosubmit_api.common.utils import Status, datechunk_to_year -def calculate_SYPD_perjob(chunk_unit, chunk_size, job_chunk, run_time, status): - # type: (str, int, int, int, int) -> float +def calculate_SYPD_perjob(chunk_unit: str, chunk_size: int, job_chunk: int, run_time: int, status: int) -> float: """ Generalization of SYPD at job level. """ @@ -13,8 +12,7 @@ def calculate_SYPD_perjob(chunk_unit, chunk_size, job_chunk, run_time, status): return None -def calculate_ASYPD_perjob(chunk_unit, chunk_size, job_chunk, queue_run_time, average_post, status): - # type: (str, int, int, int, float, int) -> float +def calculate_ASYPD_perjob(chunk_unit: str, chunk_size: int, job_chunk: int, queue_run_time: int, average_post: float, status: int) -> float: """ Generalization of ASYPD at job level """ @@ -24,4 +22,4 @@ def calculate_ASYPD_perjob(chunk_unit, chunk_size, job_chunk, queue_run_time, av divisor = queue_run_time + average_post if divisor > 0.0: return round((years_per_sim * 86400) / divisor, 2) - return None \ No newline at end of file + return None diff --git a/autosubmit_api/statistics/job_stat.py b/autosubmit_api/statistics/job_stat.py index 7481447..89709e3 100644 --- a/autosubmit_api/statistics/job_stat.py +++ b/autosubmit_api/statistics/job_stat.py @@ -3,14 +3,13 @@ from datetime import datetime, timedelta from .utils import timedelta2hours class JobStat(object): - def __init__(self, name, processors, wallclock, section, date, member, chunk): - # type: (str, int, float, str, str, str, str) -> None + def __init__(self, name: str, processors: int, wallclock: float, section: str, date: str, member: str, chunk: str): self._name = name self._processors = processors self._wallclock = wallclock - self.submit_time = None # type: datetime - self.start_time = None # type: datetime - self.finish_time = None # type: datetime + self.submit_time: datetime = None + self.start_time: datetime = None + self.finish_time: datetime = None self.completed_queue_time = timedelta() self.completed_run_time = timedelta() self.failed_queue_time = timedelta() diff --git a/autosubmit_api/statistics/statistics.py b/autosubmit_api/statistics/statistics.py index fe0d91f..712264e 100644 --- a/autosubmit_api/statistics/statistics.py +++ b/autosubmit_api/statistics/statistics.py @@ -1,12 +1,12 @@ #!/bin/env/python -from ..components.jobs import utils as JobUtils +from autosubmit_api.components.jobs import utils as JobUtils from datetime import datetime, timedelta -from ..config.basicConfig import APIBasicConfig -from ..components.jobs.job_factory import Job -from .job_stat import JobStat -from .stats_summary import StatsSummary -from ..components.jobs.job_support import JobSupport -from .utils import timedelta2hours +from autosubmit_api.config.basicConfig import APIBasicConfig +from autosubmit_api.components.jobs.job_factory import Job +from autosubmit_api.statistics.job_stat import JobStat +from autosubmit_api.statistics.stats_summary import StatsSummary +from autosubmit_api.components.jobs.job_support import JobSupport +from autosubmit_api.statistics.utils import timedelta2hours from typing import List, Union, Dict # from collections import namedtuple @@ -15,8 +15,7 @@ _FAILED_RETRIAL = 0 class Statistics(object): - def __init__(self, expid, jobs, start, end, queue_time_fix, basic_config): - # type: (str, List[Job], datetime, datetime, Dict[str, int], APIBasicConfig) -> None + def __init__(self, expid: str, jobs: List[Job], start: datetime, end: datetime, queue_time_fix: Dict[str, int], basic_config: APIBasicConfig): """ """ self.expid = expid @@ -25,26 +24,25 @@ class Statistics(object): self._end = end self._queue_time_fixes = queue_time_fix self.basic_config = basic_config - self._name_to_jobstat_dict = dict() # type: Dict[str, JobStat] - self.jobs_stat = [] # type: List[JobStat] + self._name_to_jobstat_dict: Dict[str, JobStat] = dict() + self.jobs_stat: List[JobStat] = [] # Old format - self.max_time = 0.0 # type: float - self.max_fail = 0 # type: int - self.start_times = [] # type: List[Union[datetime, None]] - self.end_times = [] # type: List[Union[datetime, None]] - self.queued = [] # type: List[timedelta] - self.run = [] # type: List[timedelta] - self.failed_jobs = [] # type: List[int] - self.fail_queued = [] # type: List[timedelta] - self.fail_run = [] # type: List[timedelta] - self.wallclocks = [] # type: List[float] - self.threshold = 0.0 # type: float - self.failed_jobs_dict = {} # type: Dict[str, int] + self.max_time: float = 0.0 + self.max_fail: int = 0 + self.start_times: List[Union[datetime, None]] = [] + self.end_times: List[Union[datetime, None]] = [] + self.queued: List[timedelta] = [] + self.run: List[timedelta] = [] + self.failed_jobs: List[int] = [] + self.fail_queued: List[timedelta] = [] + self.fail_run: List[timedelta] = [] + self.wallclocks: List[float] = [] + self.threshold: float = 0.0 + self.failed_jobs_dict: Dict[str, int] = {} self.summary = StatsSummary() self.totals = [" Description text \n", "Line 1"] - def calculate_statistics(self): - # type: () -> List[JobStat] + def calculate_statistics(self) -> List[JobStat]: for index, job in enumerate(self._jobs): retrials = JobSupport(self.expid, job, self.basic_config).get_last_retrials() for retrial in retrials: @@ -71,8 +69,7 @@ class Statistics(object): self.jobs_stat = sorted(list(self._name_to_jobstat_dict.values()), key=lambda x: (x.date if x.date else datetime.now(), x.member if x.member else "", x.section if x.section else "", x.chunk if x.chunk is not None else 0)) return self.jobs_stat - def calculate_summary(self): - # type: () -> StatsSummary + def calculate_summary(self) -> StatsSummary: stat_summary = StatsSummary() for job in self.jobs_stat: job_stat_dict = job.get_as_dict() @@ -108,7 +105,6 @@ class Statistics(object): } def make_old_format(self): - # type: () -> None """ Makes old format """ self.start_times = [job.start_time for job in self.jobs_stat] self.end_times = [job.finish_time for job in self.jobs_stat] @@ -126,8 +122,7 @@ class Statistics(object): max_fail_run = max(self.fail_run) self.max_time = max(max_queue, max_run, max_fail_queue, max_fail_run, self.threshold) - def build_failed_jobs_only_list(self): - # type: () -> Dict[str, int] + def build_failed_jobs_only_list(self) -> Dict[str, int]: for i, job in enumerate(self.jobs_stat): if self.failed_jobs[i] > 0: self.failed_jobs_dict[job._name] = self.failed_jobs[i] diff --git a/autosubmit_api/statistics/stats_summary.py b/autosubmit_api/statistics/stats_summary.py index 7fc9558..c05397b 100644 --- a/autosubmit_api/statistics/stats_summary.py +++ b/autosubmit_api/statistics/stats_summary.py @@ -13,7 +13,7 @@ class StatsSummary(object): self.real_consumption = 0.0 self.failed_real_consumption = 0.0 # CPU Consumption - self.expected_cpu_consumption = 0.0 # type: float + self.expected_cpu_consumption: float = 0.0 self.cpu_consumption = 0.0 self.failed_cpu_consumption = 0.0 self.total_queue_time = 0.0 diff --git a/autosubmit_api/statistics/utils.py b/autosubmit_api/statistics/utils.py index bf0a09b..d83ff10 100644 --- a/autosubmit_api/statistics/utils.py +++ b/autosubmit_api/statistics/utils.py @@ -3,6 +3,5 @@ from datetime import timedelta -def timedelta2hours(deltatime): - # type: (timedelta) -> float +def timedelta2hours(deltatime: timedelta) -> float: return deltatime.days * 24 + deltatime.seconds / 3600.0 diff --git a/autosubmit_api/workers/business/process_graph_drawings.py b/autosubmit_api/workers/business/process_graph_drawings.py index fc3517e..c536d42 100644 --- a/autosubmit_api/workers/business/process_graph_drawings.py +++ b/autosubmit_api/workers/business/process_graph_drawings.py @@ -12,7 +12,7 @@ from autosubmit_api.builders.joblist_loader_builder import ( JobListLoaderBuilder, JobListLoaderDirector, ) -from typing import List, Any +from typing import List, Any, Optional def process_active_graphs(): @@ -47,8 +47,7 @@ def process_active_graphs(): print(("Error while processing graph drawing: {}".format(exp))) -def _process_graph(expid, chunk_size): - # type: (str, int) -> List[Any] | None +def _process_graph(expid: str, chunk_size: int) -> Optional[List[Any]]: result = None experimentGraphDrawing = ExperimentGraphDrawing(expid) locked = experimentGraphDrawing.locked diff --git a/autosubmit_api/workers/populate_details/populate.py b/autosubmit_api/workers/populate_details/populate.py index 175b21b..760d9dd 100644 --- a/autosubmit_api/workers/populate_details/populate.py +++ b/autosubmit_api/workers/populate_details/populate.py @@ -108,7 +108,6 @@ class DetailsProcessor: conn.commit() def _clean_table(self): - # type: () -> None with self.main_db_engine.connect() as conn: with conn.execution_options(isolation_level="AUTOCOMMIT"): conn.execute(tables.details_table.delete()) -- GitLab From 209a9142b0f0ae9d56de7566c0ec625a19c60d30 Mon Sep 17 00:00:00 2001 From: ltenorio Date: Mon, 2 Sep 2024 14:09:33 +0200 Subject: [PATCH 2/2] fix import issues (F401 & F811) --- autosubmit_api/app.py | 1 - autosubmit_api/blueprints/v4.py | 1 - autosubmit_api/builders/experiment_builder.py | 1 - autosubmit_api/components/experiment/pkl_organizer.py | 2 -- autosubmit_api/components/representations/graph/edge.py | 4 ++-- autosubmit_api/components/representations/graph/graph.py | 4 +--- autosubmit_api/components/representations/graph/test.py | 3 +-- autosubmit_api/components/representations/tree/test.py | 3 --- autosubmit_api/config/IConfigStrategy.py | 9 --------- autosubmit_api/config/confConfigStrategy.py | 4 ---- autosubmit_api/history/utils.py | 5 +---- tests/conftest.py | 2 -- 12 files changed, 5 insertions(+), 34 deletions(-) diff --git a/autosubmit_api/app.py b/autosubmit_api/app.py index b191135..2d46cdb 100644 --- a/autosubmit_api/app.py +++ b/autosubmit_api/app.py @@ -10,7 +10,6 @@ from autosubmit_api.database import prepare_db from autosubmit_api.experiment import common_requests as CommonRequests from autosubmit_api.logger import get_app_logger from autosubmit_api.config.basicConfig import APIBasicConfig -import sys from autosubmit_api.config import ( PROTECTION_LEVEL, RUN_BACKGROUND_TASKS_ON_START, diff --git a/autosubmit_api/blueprints/v4.py b/autosubmit_api/blueprints/v4.py index 46cfb35..f4092e5 100644 --- a/autosubmit_api/blueprints/v4.py +++ b/autosubmit_api/blueprints/v4.py @@ -1,5 +1,4 @@ from flask import Blueprint -from autosubmit_api.views import v3 as v3_views from autosubmit_api.views import v4 as v4_views diff --git a/autosubmit_api/builders/experiment_builder.py b/autosubmit_api/builders/experiment_builder.py index 4aab284..c21e53d 100644 --- a/autosubmit_api/builders/experiment_builder.py +++ b/autosubmit_api/builders/experiment_builder.py @@ -7,7 +7,6 @@ from autosubmit_api.builders.configuration_facade_builder import ( from autosubmit_api.database import tables from autosubmit_api.database.common import ( create_autosubmit_db_engine, - create_main_db_conn, ) from autosubmit_api.database.models import ExperimentModel diff --git a/autosubmit_api/components/experiment/pkl_organizer.py b/autosubmit_api/components/experiment/pkl_organizer.py index d12a7b1..d640861 100644 --- a/autosubmit_api/components/experiment/pkl_organizer.py +++ b/autosubmit_api/components/experiment/pkl_organizer.py @@ -1,8 +1,6 @@ #!/usr/bin/env python import os -import pickle -from networkx import DiGraph from autosubmit_api.components.jobs import job_factory as factory from autosubmit_api.common.utils import JobSection, PklJob, PklJob14, Status from autosubmit_api.components.experiment.configuration_facade import AutosubmitConfigurationFacade diff --git a/autosubmit_api/components/representations/graph/edge.py b/autosubmit_api/components/representations/graph/edge.py index 7e033ab..e17e71e 100644 --- a/autosubmit_api/components/representations/graph/edge.py +++ b/autosubmit_api/components/representations/graph/edge.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -from abc import ABCMeta, abstractmethod -from typing import List, Dict +from abc import ABCMeta +from typing import Dict class Edge(object, metaclass=ABCMeta): """ Abstract Edge """ diff --git a/autosubmit_api/components/representations/graph/graph.py b/autosubmit_api/components/representations/graph/graph.py index cd2d479..c05b7ce 100644 --- a/autosubmit_api/components/representations/graph/graph.py +++ b/autosubmit_api/components/representations/graph/graph.py @@ -1,18 +1,16 @@ #!/usr/bin/env python -from multiprocessing.sharedctypes import Value import networkx as nx from autosubmit_api.performance import utils as PUtils # import common.utils as utils from autosubmit_api.common.utils import Status, get_average_total_time -from networkx.linalg.laplacianmatrix import laplacian_matrix from autosubmit_api.components.jobs.job_factory import Job from autosubmit_api.components.jobs.joblist_loader import JobListLoader from autosubmit_api.monitor.monitor import Monitor from autosubmit_api.database.db_jobdata import ExperimentGraphDrawing from autosubmit_api.components.representations.graph.edge import Edge, RealEdge -from typing import List, Dict, Optional, Tuple, Set, Any +from typing import List, Dict, Optional, Tuple, Any from scipy.sparse import linalg GRAPHVIZ_MULTIPLIER = 90 diff --git a/autosubmit_api/components/representations/graph/test.py b/autosubmit_api/components/representations/graph/test.py index 9fde3ea..c89557f 100644 --- a/autosubmit_api/components/representations/graph/test.py +++ b/autosubmit_api/components/representations/graph/test.py @@ -5,9 +5,8 @@ import math import autosubmit_api.common.utils_for_testing as TestUtils from autosubmit_api.common.utils import Status -from autosubmit_api.components.representations.graph.graph import GraphRepresentation, GroupedBy, Layout +from autosubmit_api.components.representations.graph.graph import GraphRepresentation, Layout from autosubmit_api.builders.joblist_loader_builder import JobListLoaderBuilder, JobListLoaderDirector -from collections import Counter CASE_NO_WRAPPERS = "a3tb" # Job count = 55 CASE_WITH_WRAPPERS = "a28v" diff --git a/autosubmit_api/components/representations/tree/test.py b/autosubmit_api/components/representations/tree/test.py index bd01740..2f1f24d 100644 --- a/autosubmit_api/components/representations/tree/test.py +++ b/autosubmit_api/components/representations/tree/test.py @@ -2,7 +2,6 @@ import unittest import common.utils_for_testing as UtilsForTesting -from mock import Mock from components.representations.tree.tree import TreeRepresentation from components.jobs.joblist_loader import JobListLoader from components.experiment.pkl_organizer import PklOrganizer @@ -11,9 +10,7 @@ from components.experiment.configuration_facade import AutosubmitConfigurationFa from bscearth.utils.config_parser import ConfigParserFactory from config.config_common import AutosubmitConfigResolver -from common.utils import Status from components.jobs.joblist_helper import JobListHelper -from config.basicConfig import APIBasicConfig class TestTreeRepresentation(unittest.TestCase): def setUp(self): diff --git a/autosubmit_api/config/IConfigStrategy.py b/autosubmit_api/config/IConfigStrategy.py index b687867..1a381cf 100644 --- a/autosubmit_api/config/IConfigStrategy.py +++ b/autosubmit_api/config/IConfigStrategy.py @@ -19,18 +19,9 @@ from configparser import ConfigParser as PyConfigParser from autosubmitconfigparser.config.configcommon import AutosubmitConfig as Autosubmit4Config -import os import re -import subprocess -import json -import logging -import locale -from pyparsing import nestedExpr from bscearth.utils.config_parser import ConfigParserFactory, ConfigParser -from bscearth.utils.date import parse_date -from bscearth.utils.log import Log -from ..config.basicConfig import APIBasicConfig from abc import ABC, abstractmethod diff --git a/autosubmit_api/config/confConfigStrategy.py b/autosubmit_api/config/confConfigStrategy.py index 25515b7..fb8bcd6 100644 --- a/autosubmit_api/config/confConfigStrategy.py +++ b/autosubmit_api/config/confConfigStrategy.py @@ -81,10 +81,6 @@ class confConfigStrategy(IConfigStrategy): return None - @property - def jobs_parser(self): - return self._jobs_parser - @property def jobs_parser(self): return self._jobs_parser diff --git a/autosubmit_api/history/utils.py b/autosubmit_api/history/utils.py index 26e1101..402050e 100644 --- a/autosubmit_api/history/utils.py +++ b/autosubmit_api/history/utils.py @@ -17,12 +17,9 @@ # You should have received a copy of the GNU General Public License # along with Autosubmit. If not, see . -import time import os from datetime import datetime -from typing import Dict, Any, Tuple -from typing import Any, Union -from datetime import datetime +from typing import Dict, Any, Tuple, Union DATETIME_FORMAT = '%Y-%m-%d-%H:%M:%S' SAFE_TIME_LIMIT = 300 diff --git a/tests/conftest.py b/tests/conftest.py index be699e0..7423b36 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,11 +4,9 @@ import os from flask import Flask import pytest -from autosubmitconfigparser.config.basicconfig import BasicConfig from autosubmit_api.app import create_app from autosubmit_api.config.basicConfig import APIBasicConfig from autosubmit_api import config -from tests.custom_utils import custom_return_value FAKE_EXP_DIR = "./tests/experiments/" -- GitLab