From ab51bde849defdd3c18df02dff882717027f488a Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:17:02 +0200 Subject: [PATCH 1/9] add ruff dependency --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index e012e0ffd..a370f5e39 100644 --- a/setup.py +++ b/setup.py @@ -71,6 +71,7 @@ tests_require = [ 'pytest==8.2.*', 'pytest-cov', 'pytest-mock', + 'ruff==0.6.2', # 'testcontainers' # TODO: pending Postgres MR ] -- GitLab From 2b94a19098c43138436b9b7b7dcb8a40edf5057a Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:03:49 +0200 Subject: [PATCH 2/9] fix E713 not-in-test --- autosubmit/job/job_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 22245c23a..f6bb62488 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -1945,7 +1945,7 @@ class JobList(object): jobs_to_check = unparsed_jobs.split("&") select_jobs_by_name = jobs_to_check[0] unparsed_jobs = jobs_to_check[1] - if not ";" in unparsed_jobs: + if ";" not in unparsed_jobs: if '[' in unparsed_jobs: select_all_jobs_by_section = unparsed_jobs filter_jobs_by_section = "" -- GitLab From cf705b9b70fea7b46631145865cba6b857557e2e Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:04:15 +0200 Subject: [PATCH 3/9] fix E731 lambda-assignment --- autosubmit/job/job_grouping.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/autosubmit/job/job_grouping.py b/autosubmit/job/job_grouping.py index 63a064719..b3072dcc7 100644 --- a/autosubmit/job/job_grouping.py +++ b/autosubmit/job/job_grouping.py @@ -89,7 +89,8 @@ class JobGrouping(object): out = nestedExpr('[', ']').parseString(text).asList() - depth = lambda L: isinstance(L, list) and max(list(map(depth, L))) + 1 + def depth(L): + return isinstance(L, list) and max(list(map(depth, L))) + 1 if self.group_by == 'date': if depth(out) == 2: -- GitLab From 1b88b91a8e26f1c27bc81a7b4b88671b2d9e2b85 Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:07:57 +0200 Subject: [PATCH 4/9] fix E712 true-false-comparison --- autosubmit/history/experiment_history.py | 2 +- autosubmit/platforms/slurmplatform.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/autosubmit/history/experiment_history.py b/autosubmit/history/experiment_history.py index 5fd081600..8ee86b2f3 100644 --- a/autosubmit/history/experiment_history.py +++ b/autosubmit/history/experiment_history.py @@ -230,7 +230,7 @@ class ExperimentHistory: should_create_new_run = self.should_we_create_a_new_run(job_list, len(update_these_changes), current_experiment_run_dc, chunk_unit, chunk_size, create) - if len(update_these_changes) > 0 and should_create_new_run == False: + if len(update_these_changes) > 0 and should_create_new_run is False: self.manager.update_many_job_data_change_status(update_these_changes) if should_create_new_run: return self.create_new_experiment_run(chunk_unit, chunk_size, current_config, job_list) diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index c48b98e9c..3f250a8c5 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -99,7 +99,7 @@ class SlurmPlatform(ParamikoPlatform): """ try: - valid_packages_to_submit = [ package for package in valid_packages_to_submit if package.x11 != True] + valid_packages_to_submit = [ package for package in valid_packages_to_submit if package.x11 is not True] if len(valid_packages_to_submit) > 0: duplicated_jobs_already_checked = False platform = valid_packages_to_submit[0].jobs[0].platform -- GitLab From 1d6a82df9eb3a06c8d97f51044cd2fc75a71006a Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:13:42 +0200 Subject: [PATCH 5/9] fix F403 undefined-local-with-import-star --- test/regression/tests_runner.py | 11 ++++++++++- test/regression/tests_utils.py | 2 +- test/unit/test_chunk_date_lib.py | 17 ++++++++++++++++- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/test/regression/tests_runner.py b/test/regression/tests_runner.py index 54d38624e..907140000 100644 --- a/test/regression/tests_runner.py +++ b/test/regression/tests_runner.py @@ -1,6 +1,15 @@ from .tests_log import Log from .tests_utils import check_cmd, next_experiment_id, copy_experiment_conf_files, create_database, clean_database -from .tests_commands import * +from .tests_commands import ( + generate_experiment_cmd, + create_experiment_cmd, + check_experiment_cmd, + monitor_experiment_cmd, + refresh_experiment_cmd, + run_experiment_cmd, + stats_experiment_cmd, + recovery_experiment_cmd, +) from threading import Thread from time import sleep import argparse diff --git a/test/regression/tests_utils.py b/test/regression/tests_utils.py index 82e881248..196fae730 100644 --- a/test/regression/tests_utils.py +++ b/test/regression/tests_utils.py @@ -1,4 +1,4 @@ -from .tests_commands import * +from .tests_commands import create_database_cmd import os import subprocess import string diff --git a/test/unit/test_chunk_date_lib.py b/test/unit/test_chunk_date_lib.py index af5101e0b..da3587907 100644 --- a/test/unit/test_chunk_date_lib.py +++ b/test/unit/test_chunk_date_lib.py @@ -1,5 +1,20 @@ from unittest import TestCase -from bscearth.utils.date import * +from bscearth.utils.date import ( + add_time, + add_years, + add_months, + add_days, + add_hours, + subs_dates, + sub_days, + chunk_start_date, + chunk_end_date, + previous_day, + parse_date, + date2str, + sum_str_hours, + split_str_hours, +) from datetime import datetime -- GitLab From 40730192ac3e6e1ec236ac489dd3984c57a463de Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:23:03 +0200 Subject: [PATCH 6/9] fix F901 raise-not-implemented --- .../platforms/wrappers/wrapper_factory.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/autosubmit/platforms/wrappers/wrapper_factory.py b/autosubmit/platforms/wrappers/wrapper_factory.py index 1f47996a9..613dcdaa1 100644 --- a/autosubmit/platforms/wrappers/wrapper_factory.py +++ b/autosubmit/platforms/wrappers/wrapper_factory.py @@ -72,16 +72,16 @@ class WrapperFactory(object): return wrapper_cmd def vertical_wrapper(self, **kwargs): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def horizontal_wrapper(self, **kwargs): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def hybrid_wrapper_horizontal_vertical(self, **kwargs): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def hybrid_wrapper_vertical_horizontal(self, **kwargs): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def header_directives(self, **kwargs): pass @@ -124,21 +124,21 @@ class WrapperFactory(object): def reservation_directive(self, reservation): return '#' def dependency_directive(self, dependency): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def queue_directive(self, queue): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def processors_directive(self, processors): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def nodes_directive(self, nodes): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def tasks_directive(self, tasks): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def partition_directive(self, partition): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def exclusive_directive(self, exclusive): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) def threads_directive(self, threads): - raise NotImplemented(self.exception) + raise NotImplementedError(self.exception) class SlurmWrapperFactory(WrapperFactory): -- GitLab From d10820fc892f658cc0c87914fd8003bc4f82af9f Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:35:23 +0200 Subject: [PATCH 7/9] fix E721 type-comparison --- autosubmit/job/job_dict.py | 68 ++++++++++++++++---------------- autosubmit/job/job_list.py | 6 +-- autosubmit/platforms/platform.py | 2 +- autosubmit/provenance/rocrate.py | 2 +- 4 files changed, 39 insertions(+), 39 deletions(-) diff --git a/autosubmit/job/job_dict.py b/autosubmit/job/job_dict.py index 397de45ee..d72af775d 100644 --- a/autosubmit/job/job_dict.py +++ b/autosubmit/job/job_dict.py @@ -321,47 +321,47 @@ class DicJobs: elif "all" in filters_to['DATES_TO'].lower(): for date in jobs.keys(): if jobs.get(date, None): - if type(jobs.get(date, None)) == list: + if type(jobs.get(date, None)) is list: for aux_job in jobs[date]: final_jobs_list.append(aux_job) - elif type(jobs.get(date, None)) == Job: + elif type(jobs.get(date, None)) is Job: final_jobs_list.append(jobs[date]) - elif type(jobs.get(date, None)) == dict: + elif type(jobs.get(date, None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[date]) else: for date in filters_to.get('DATES_TO', "").split(","): if jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None): - if type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) == list: + if type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) is list: for aux_job in jobs[datetime.datetime.strptime(date, "%Y%m%d")]: final_jobs_list.append(aux_job) - elif type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) == Job: + elif type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) is Job: final_jobs_list.append(jobs[datetime.datetime.strptime(date, "%Y%m%d")]) - elif type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) == dict: + elif type(jobs.get(datetime.datetime.strptime(date, "%Y%m%d"), None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[ datetime.datetime.strptime(date, "%Y%m%d")]) else: if job.running == "once": for key in jobs.keys(): - if type(jobs.get(key, None)) == list: # TODO + if type(jobs.get(key, None)) is list: # TODO for aux_job in jobs[key]: final_jobs_list.append(aux_job) - elif type(jobs.get(key, None)) == Job: # TODO + elif type(jobs.get(key, None)) is Job: # TODO final_jobs_list.append(jobs[key]) - elif type(jobs.get(key, None)) == dict: + elif type(jobs.get(key, None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[key]) elif jobs.get(job.date, None): - if type(jobs.get(natural_date, None)) == list: # TODO + if type(jobs.get(natural_date, None)) is list: # TODO for aux_job in jobs[natural_date]: final_jobs_list.append(aux_job) - elif type(jobs.get(natural_date, None)) == Job: # TODO + elif type(jobs.get(natural_date, None)) is Job: # TODO final_jobs_list.append(jobs[natural_date]) - elif type(jobs.get(natural_date, None)) == dict: + elif type(jobs.get(natural_date, None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[natural_date]) else: jobs_aux = {} jobs = jobs_aux if len(jobs) > 0: - if type(jobs) == list: # TODO check the other todo, maybe this is not neccesary, https://earth.bsc.es/gitlab/es/autosubmit/-/merge_requests/387#note_243751 + if type(jobs) is list: # TODO check the other todo, maybe this is not neccesary, https://earth.bsc.es/gitlab/es/autosubmit/-/merge_requests/387#note_243751 final_jobs_list.extend(jobs) jobs = {} else: @@ -374,48 +374,48 @@ class DicJobs: elif "all" in filters_to['MEMBERS_TO'].lower(): for member in jobs.keys(): if jobs.get(member.upper(), None): - if type(jobs.get(member.upper(), None)) == list: + if type(jobs.get(member.upper(), None)) is list: for aux_job in jobs[member.upper()]: final_jobs_list.append(aux_job) - elif type(jobs.get(member.upper(), None)) == Job: + elif type(jobs.get(member.upper(), None)) is Job: final_jobs_list.append(jobs[member.upper()]) - elif type(jobs.get(member.upper(), None)) == dict: + elif type(jobs.get(member.upper(), None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[member.upper()]) else: for member in filters_to.get('MEMBERS_TO', "").split(","): if jobs.get(member.upper(), None): - if type(jobs.get(member.upper(), None)) == list: + if type(jobs.get(member.upper(), None)) is list: for aux_job in jobs[member.upper()]: final_jobs_list.append(aux_job) - elif type(jobs.get(member.upper(), None)) == Job: + elif type(jobs.get(member.upper(), None)) is Job: final_jobs_list.append(jobs[member.upper()]) - elif type(jobs.get(member.upper(), None)) == dict: + elif type(jobs.get(member.upper(), None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[member.upper()]) else: if job.running == "once" or not job.member: for key in jobs.keys(): - if type(jobs.get(key, None)) == list: + if type(jobs.get(key, None)) is list: for aux_job in jobs[key.upper()]: final_jobs_list.append(aux_job) - elif type(jobs.get(key.upper(), None)) == Job: + elif type(jobs.get(key.upper(), None)) is Job: final_jobs_list.append(jobs[key]) - elif type(jobs.get(key.upper(), None)) == dict: + elif type(jobs.get(key.upper(), None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[key.upper()]) elif jobs.get(job.member.upper(), None): - if type(jobs.get(natural_member.upper(), None)) == list: + if type(jobs.get(natural_member.upper(), None)) is list: for aux_job in jobs[natural_member.upper()]: final_jobs_list.append(aux_job) - elif type(jobs.get(natural_member.upper(), None)) == Job: + elif type(jobs.get(natural_member.upper(), None)) is Job: final_jobs_list.append(jobs[natural_member.upper()]) - elif type(jobs.get(natural_member.upper(), None)) == dict: + elif type(jobs.get(natural_member.upper(), None)) is dict: jobs_aux = self.update_jobs_filtered(jobs_aux, jobs[natural_member.upper()]) else: jobs_aux = {} jobs = jobs_aux if len(jobs) > 0: - if type(jobs) == list: + if type(jobs) is list: final_jobs_list.extend(jobs) else: if filters_to.get('CHUNKS_TO', None): @@ -423,30 +423,30 @@ class DicJobs: pass elif "all" in filters_to['CHUNKS_TO'].lower(): for chunk in jobs.keys(): - if type(jobs.get(chunk, None)) == list: + if type(jobs.get(chunk, None)) is list: for aux_job in jobs[chunk]: final_jobs_list.append(aux_job) - elif type(jobs.get(chunk, None)) == Job: + elif type(jobs.get(chunk, None)) is Job: final_jobs_list.append(jobs[chunk]) else: for chunk in filters_to.get('CHUNKS_TO', "").split(","): chunk = int(chunk) - if type(jobs.get(chunk, None)) == list: + if type(jobs.get(chunk, None)) is list: for aux_job in jobs[chunk]: final_jobs_list.append(aux_job) - elif type(jobs.get(chunk, None)) == Job: + elif type(jobs.get(chunk, None)) is Job: final_jobs_list.append(jobs[chunk]) else: if job.running == "once" or not job.chunk: for chunk in jobs.keys(): - if type(jobs.get(chunk, None)) == list: + if type(jobs.get(chunk, None)) is list: final_jobs_list += [aux_job for aux_job in jobs[chunk]] - elif type(jobs.get(chunk, None)) == Job: + elif type(jobs.get(chunk, None)) is Job: final_jobs_list.append(jobs[chunk]) elif jobs.get(job.chunk, None): - if type(jobs.get(natural_chunk, None)) == list: + if type(jobs.get(natural_chunk, None)) is list: final_jobs_list += [aux_job for aux_job in jobs[natural_chunk]] - elif type(jobs.get(natural_chunk, None)) == Job: + elif type(jobs.get(natural_chunk, None)) is Job: final_jobs_list.append(jobs[natural_chunk]) if len(final_jobs_list) > 0: diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index f6bb62488..936f795e7 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -659,7 +659,7 @@ class JobList(object): """ filters = [] if level_to_check == "DATES_FROM": - if type(value_to_check) != str: + if type(value_to_check) is not str: value_to_check = date2str(value_to_check, "%Y%m%d") # need to convert in some cases try: values_list = [date2str(date_, "%Y%m%d") for date_ in self._date_list] # need to convert in some cases @@ -714,8 +714,8 @@ class JobList(object): # Will enter chunks_from, and obtain [{DATES_TO: "20020201", MEMBERS_TO: "fc2", CHUNKS_TO: "ALL", SPLITS_TO: "2"] if "CHUNKS_FROM" in filter: filters_to_apply_c = self._check_chunks({"CHUNKS_FROM": (filter.pop("CHUNKS_FROM"))}, current_job) - if len(filters_to_apply_c) > 0 and (type(filters_to_apply_c) != list or ( - type(filters_to_apply_c) == list and len(filters_to_apply_c[0]) > 0)): + if len(filters_to_apply_c) > 0 and (type(filters_to_apply_c) is not list or ( + type(filters_to_apply_c) is list and len(filters_to_apply_c[0]) > 0)): filters_to_apply[i].update(filters_to_apply_c) # IGNORED if "SPLITS_FROM" in filter: diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 80a2d4971..561f9014e 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -98,7 +98,7 @@ class Platform(object): if not self.two_factor_auth: self.pw = None elif auth_password is not None and self.two_factor_auth: - if type(auth_password) == list: + if type(auth_password) is list: self.pw = auth_password[0] else: self.pw = auth_password diff --git a/autosubmit/provenance/rocrate.py b/autosubmit/provenance/rocrate.py index de77b3e5b..cbfac2da5 100644 --- a/autosubmit/provenance/rocrate.py +++ b/autosubmit/provenance/rocrate.py @@ -496,7 +496,7 @@ def create_rocrate_archive( f"Could not locate a type in RO-Crate for parameter {param_name} type {python_type}", 7014) # The formal parameters are added to the workflow (main entity). additional_type = PARAMETER_TYPES_MAP[python_type] - if type(additional_type) != str: + if type(additional_type) is not str: additional_type = PARAMETER_TYPES_MAP[python_type](additional_type) formal_parameter = _create_formal_parameter( crate, -- GitLab From c7014801ef7bd596baf9c11e275e4bed1065d65e Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:41:36 +0200 Subject: [PATCH 8/9] fix E722 bare-except --- autosubmit/autosubmit.py | 26 +++++++++---------- autosubmit/git/autosubmit_git.py | 2 +- autosubmit/history/experiment_history.py | 2 +- autosubmit/job/job.py | 4 +-- autosubmit/job/job_list.py | 8 +++--- autosubmit/migrate/migrate.py | 2 +- autosubmit/platforms/ecplatform.py | 6 ++--- autosubmit/platforms/paramiko_platform.py | 8 +++--- autosubmit/platforms/platform.py | 4 +-- autosubmit/platforms/slurmplatform.py | 2 +- log/fd_show.py | 2 +- log/log.py | 4 +-- test/regression/local_check_details.py | 2 +- .../regression/local_check_details_wrapper.py | 2 +- 14 files changed, 37 insertions(+), 37 deletions(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 6bc39f862..09205063e 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -989,7 +989,7 @@ class Autosubmit: admin_user = "eadmin" # to be improved in #944 try: eadmin = current_user_id == pwd.getpwnam(admin_user).pw_uid - except: + except Exception: Log.info(f"Autosubmit admin user: {admin_user} is not set") current_owner_id = Path(BasicConfig.LOCAL_ROOT_DIR, expid).stat().st_uid try: @@ -1346,7 +1346,7 @@ class Autosubmit: except OSError as e: try: Autosubmit._delete_expid(exp_id, True) - except: + except Exception: pass raise AutosubmitCritical("Error while creating the experiment structure: {0}".format(str(e)), 7011) @@ -1362,7 +1362,7 @@ class Autosubmit: except Exception as e: try: Autosubmit._delete_expid(exp_id, True) - except: + except Exception: pass raise AutosubmitCritical("Error while creating the experiment configuration: {0}".format(str(e)), 7011) # Change template values by default values specified from the commandline @@ -1371,7 +1371,7 @@ class Autosubmit: except Exception as e: try: Autosubmit._delete_expid(exp_id, True) - except: + except Exception: pass raise AutosubmitCritical("Error while setting the default values: {0}".format(str(e)), 7011) @@ -2379,7 +2379,7 @@ class Autosubmit: # Database is locked, may be related to my local db todo 4.1.1 try: exp_history.finish_current_experiment_run() - except: + except Exception: Log.warning("Database is locked") except (portalocker.AlreadyLocked, portalocker.LockException) as e: message = "We have detected that there is another Autosubmit instance using the experiment\n. Stop other Autosubmit instances that are using the experiment or delete autosubmit.lock file located on tmp folder" @@ -3243,7 +3243,7 @@ class Autosubmit: job_parameters = job.update_parameters(as_conf, {}) for key, value in job_parameters.items(): jobs_parameters["JOBS"+"."+job.section+"."+key] = value - except: + except Exception: pass if len(jobs_parameters) > 0: del as_conf.experiment_data["JOBS"] @@ -3337,7 +3337,7 @@ class Autosubmit: try: if f.is_dir() and f.owner() == get_from_user: experiments_ids.append(f.name) - except: + except Exception: pass # if it reachs there it means that f.owner() doesn't exist anymore( owner is an id) so we just skip it and continue else: experiments_ids = experiments_ids.split(' ') @@ -4126,7 +4126,7 @@ class Autosubmit: os.popen(bash_command).read() exp_history.initialize_database() - except: + except Exception: Log.warning("It was not possible to restore the jobs_data.db file... , a new blank db will be created") result = os.popen("rm {0}".format(database_path)).read() @@ -4508,7 +4508,7 @@ class Autosubmit: job_list = JobList(expid, BasicConfig, YAMLParserFactory(),Autosubmit._get_job_list_persistence(expid, as_conf), as_conf) try: prev_job_list_logs = Autosubmit.load_logs_from_previous_run(expid, as_conf) - except: + except Exception: prev_job_list_logs = None date_format = '' if as_conf.get_chunk_size_unit() == 'hour': @@ -4545,7 +4545,7 @@ class Autosubmit: os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "pkl"), "job_packages_" + expid) packages_persistence.reset_table() packages_persistence.reset_table(True) - except: + except Exception: pass groups_dict = dict() @@ -6017,7 +6017,7 @@ class Autosubmit: current_status = current_status.upper().split(" ") try: current_status = [Status.KEY_TO_VALUE[x.strip()] for x in current_status] - except: + except Exception: raise AutosubmitCritical("Invalid status -fs. All values must match one of {0}".format(Status.VALUE_TO_KEY.keys()), 7011) @@ -6050,12 +6050,12 @@ class Autosubmit: if force: try: os.kill(int(process_id_), signal.SIGKILL) # don't wait for logs - except: + except Exception: continue else: try: os.kill(int(process_id_), signal.SIGINT) # wait for logs - except: + except Exception: continue valid_expids.append(expid) except Exception as e: diff --git a/autosubmit/git/autosubmit_git.py b/autosubmit/git/autosubmit_git.py index f824c87f8..048af72d1 100644 --- a/autosubmit/git/autosubmit_git.py +++ b/autosubmit/git/autosubmit_git.py @@ -212,7 +212,7 @@ class AutosubmitGit: for number in git_version.split("."): version_int += number git_version = int(version_int) - except: + except Exception: git_version = 2251 if git_remote_project_path == '': command_0 = "cd {0} ; {1}".format(project_path, command_0) diff --git a/autosubmit/history/experiment_history.py b/autosubmit/history/experiment_history.py index 8ee86b2f3..fbf4403cb 100644 --- a/autosubmit/history/experiment_history.py +++ b/autosubmit/history/experiment_history.py @@ -223,7 +223,7 @@ class ExperimentHistory: try: current_experiment_run_dc = self.manager.get_experiment_run_dc_with_max_id() update_these_changes = self._get_built_list_of_changes(job_list) - except: + except Exception: current_experiment_run_dc = 0 update_these_changes = [] # ("no runs") diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 0f5085f1f..bf102db05 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -2138,7 +2138,7 @@ class Job(object): filename = os.path.basename(os.path.splitext(additional_file)[0]) full_path = os.path.join(self._tmp_path,filename ) + "_" + self.name[5:] open(full_path, 'wb').write(additional_template_content.encode(lang)) - except: + except Exception: pass for key, value in parameters.items(): # parameters[key] can have '\\' characters that are interpreted as escape characters @@ -2755,7 +2755,7 @@ class WrapperJob(Job): try: self._platform.send_command( self._platform.cancel_cmd + " " + str(self.id)) - except: + except Exception: Log.info(f'Job with {self.id} was finished before canceling it') for job in self.job_list: diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 936f795e7..47df44aa2 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -224,7 +224,7 @@ class JobList(object): self.graph = nx.DiGraph() except AutosubmitCritical: raise - except: + except Exception: self.graph = nx.DiGraph() self._dic_jobs = DicJobs(date_list, member_list, chunk_list, date_format, default_retrials, as_conf) self._dic_jobs.graph = self.graph @@ -663,7 +663,7 @@ class JobList(object): value_to_check = date2str(value_to_check, "%Y%m%d") # need to convert in some cases try: values_list = [date2str(date_, "%Y%m%d") for date_ in self._date_list] # need to convert in some cases - except: + except Exception: values_list = self._date_list elif level_to_check == "MEMBERS_FROM": values_list = self._member_list # Str list @@ -2492,7 +2492,7 @@ class JobList(object): try: Log.status("{0:<35}{1:<15}{2:<15}{3:<20}{4:<15}", job.name, job_id, Status( ).VALUE_TO_KEY[job.status], platform_name, queue) - except: + except Exception: Log.debug("Couldn't print job status for job {0}".format(job.name)) for job in failed_job_list: if len(job.queue) < 1: @@ -3142,7 +3142,7 @@ class JobList(object): results.append(self._recursion_print(root, 0, visited, nocolor=True)) else: results.append("Cannot find root.") - except: + except Exception: return f'Job List object' return "\n".join(results) diff --git a/autosubmit/migrate/migrate.py b/autosubmit/migrate/migrate.py index 26c95134f..b5d3a51cf 100644 --- a/autosubmit/migrate/migrate.py +++ b/autosubmit/migrate/migrate.py @@ -294,7 +294,7 @@ class Migrate: command = f"cd {p.remote_log_dir} ; find {p.root_dir} -type l -lname '/*' -printf 'var=\"$(realpath -s --relative-to=\"%p\" \"$(readlink \"%p\")\")\" && var=${{var:3}} && ln -sf $var \"%p\" \\n' > convertLink.sh" try: p.check_absolute_file_exists(p.temp_dir) - except: + except Exception: exit_with_errors = True Log.printlog(f'{p.temp_dir} does not exist on platform [{p.name}]', 7014) platforms_with_issues.append(p.name) diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py index 713cb17eb..dd5954a6f 100644 --- a/autosubmit/platforms/ecplatform.py +++ b/autosubmit/platforms/ecplatform.py @@ -167,7 +167,7 @@ class EcPlatform(ParamikoPlatform): self.connected = True else: self.connected = False - except: + except Exception: self.connected = False if not self.log_retrieval_process_active and ( as_conf is None or str(as_conf.platforms_data.get(self.name, {}).get('DISABLE_RECOVERY_THREADS', @@ -191,7 +191,7 @@ class EcPlatform(ParamikoPlatform): self.connected = True else: self.connected = False - except: + except Exception: self.connected = False def test_connection(self,as_conf): @@ -218,7 +218,7 @@ class EcPlatform(ParamikoPlatform): else: self.connected = False return "Invalid certificate" - except: + except Exception: self.connected = False return "Invalid certificate" diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index 288a4f00b..bf9af5ce9 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -135,7 +135,7 @@ class ParamikoPlatform(Platform): try: transport = self._ssh.get_transport() transport.send_ignore() - except: + except Exception: message = "Timeout connection" return message @@ -770,9 +770,9 @@ class ParamikoPlatform(Platform): try: if self.cancel_cmd is not None: job.platform.send_command(self.cancel_cmd + " " + str(job.id)) - except: + except Exception: pass - except: + except Exception: job_status = Status.FAILED if job_status in self.job_status['COMPLETED']: job_status = Status.COMPLETED @@ -1432,7 +1432,7 @@ class ParamikoPlatform(Platform): return True else: return False - except: + except Exception: return False class ParamikoPlatformException(Exception): """ diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 561f9014e..18d0dfaf5 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -859,7 +859,7 @@ class Platform(object): job_names_processed.add(f'{job.name}_{job.fail_count}') else: job_names_processed.add(f'{job.name}') - except: + except Exception: pass except queue.Empty: pass @@ -868,5 +868,5 @@ class Platform(object): except Exception as e: try: self.restore_connection(None) - except: + except Exception: pass diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index 3f250a8c5..3a84e0283 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -120,7 +120,7 @@ class SlurmPlatform(ParamikoPlatform): #cancel bad submitted job if jobid is encountered for id_ in jobid: self.send_command(self.cancel_job(id_)) - except: + except Exception: pass jobs_id = None self.connected = False diff --git a/log/fd_show.py b/log/fd_show.py index f6aa9027a..334e22e64 100644 --- a/log/fd_show.py +++ b/log/fd_show.py @@ -16,7 +16,7 @@ def fd_table_status(): for fd in range(1024): try: s = os.fstat(fd) - except: + except Exception: continue for fd_type, func in _fd_types: if func(s.st_mode): diff --git a/log/log.py b/log/log.py index cf8363383..8b6a94646 100644 --- a/log/log.py +++ b/log/log.py @@ -242,7 +242,7 @@ class Log: status_file_handler.addFilter(custom_filter) Log.log.addHandler(status_file_handler) os.chmod(file_path, 509) - except: # retry again + except Exception: # retry again pass @staticmethod @@ -272,7 +272,7 @@ class Log: status_file_handler.setFormatter(LogFormatter(False)) status_file_handler.addFilter(custom_filter) Log.log.addHandler(status_file_handler) - except: # retry again + except Exception: # retry again pass @staticmethod def set_console_level(level): diff --git a/test/regression/local_check_details.py b/test/regression/local_check_details.py index ad7578063..ce35a3bcb 100644 --- a/test/regression/local_check_details.py +++ b/test/regression/local_check_details.py @@ -48,7 +48,7 @@ def perform_test(expids): # put it in a single file with open(f"{FILE_NAME}", "a") as myfile: myfile.write(output) - except: + except Exception: to_exclude.append(expid) # print to_exclude in format ["a001","a002"] print(to_exclude) diff --git a/test/regression/local_check_details_wrapper.py b/test/regression/local_check_details_wrapper.py index 7165889ea..592bfc0f6 100644 --- a/test/regression/local_check_details_wrapper.py +++ b/test/regression/local_check_details_wrapper.py @@ -34,7 +34,7 @@ def perform_test(expids): # put it in a single file with open(f"{VERSION}_multi_test.txt", "a") as myfile: myfile.write(output) - except: + except Exception: raise Exception(f"Error in {expid}") # print to_exclude in format ["a001","a002"] -- GitLab From 6a4b6f43ce3f2a29f16bd4c90647dac8fef7fbed Mon Sep 17 00:00:00 2001 From: Luiggi Tenorio Date: Fri, 11 Oct 2024 15:46:10 +0200 Subject: [PATCH 9/9] fix E402 module-import-not-at-top-of-file --- autosubmit/autosubmit.py | 3 ++- autosubmit/database/db_common.py | 4 +++- autosubmit/job/job_packages.py | 4 +++- test/unit/test_dic_jobs.py | 2 +- test/unit/test_job_grouping.py | 3 ++- test/unit/test_wrappers.py | 2 +- 6 files changed, 12 insertions(+), 6 deletions(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 09205063e..e3f2745de 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -58,7 +58,6 @@ from .platforms.paramiko_submitter import ParamikoSubmitter from .platforms.platform import Platform from .migrate.migrate import Migrate -dialog = None from time import sleep import argparse import subprocess @@ -90,6 +89,8 @@ from autosubmit.helpers.utils import proccess_id, terminate_child_process, check from contextlib import suppress +dialog = None + """ Main module for autosubmit. Only contains an interface class to all functionality implemented on autosubmit """ diff --git a/autosubmit/database/db_common.py b/autosubmit/database/db_common.py index 81692d954..ede152ae9 100644 --- a/autosubmit/database/db_common.py +++ b/autosubmit/database/db_common.py @@ -24,9 +24,11 @@ import os import sqlite3 import multiprocessing from log.log import Log, AutosubmitCritical -Log.get_logger("Autosubmit") from autosubmitconfigparser.config.basicconfig import BasicConfig +Log.get_logger("Autosubmit") + + CURRENT_DATABASE_VERSION = 1 TIMEOUT = 10 diff --git a/autosubmit/job/job_packages.py b/autosubmit/job/job_packages.py index 313f9cfa3..0e5d32eb4 100644 --- a/autosubmit/job/job_packages.py +++ b/autosubmit/job/job_packages.py @@ -27,7 +27,6 @@ from datetime import timedelta from autosubmit.job.job_common import Status from log.log import Log, AutosubmitCritical -Log.get_logger("Autosubmit") from autosubmit.job.job import Job from bscearth.utils.date import sum_str_hours from threading import Thread, Lock @@ -37,6 +36,9 @@ import tarfile import datetime import re import locale + +Log.get_logger("Autosubmit") + lock = Lock() def threaded(fn): def wrapper(*args, **kwargs): diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index f57318fbe..ec0c20e3c 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -7,6 +7,7 @@ from mock import Mock import math import shutil import tempfile +import inspect from autosubmit.job.job import Job from autosubmitconfigparser.config.yamlparser import YAMLParserFactory @@ -604,7 +605,6 @@ class TestDicJobs(TestCase): -import inspect class FakeBasicConfig: def __init__(self): pass diff --git a/test/unit/test_job_grouping.py b/test/unit/test_job_grouping.py index 01b53761a..237255410 100644 --- a/test/unit/test_job_grouping.py +++ b/test/unit/test_job_grouping.py @@ -1,5 +1,6 @@ import shutil import tempfile +import inspect from unittest import TestCase from mock import Mock @@ -992,7 +993,7 @@ class TestJobGrouping(TestCase): job.split = split return job -import inspect + class FakeBasicConfig: def __init__(self): pass diff --git a/test/unit/test_wrappers.py b/test/unit/test_wrappers.py index c47b884bd..998b65e96 100644 --- a/test/unit/test_wrappers.py +++ b/test/unit/test_wrappers.py @@ -3,6 +3,7 @@ from operator import attrgetter import shutil import tempfile +import inspect from unittest import TestCase from mock import MagicMock @@ -1939,7 +1940,6 @@ class TestWrappers(TestCase): return job -import inspect class FakeBasicConfig: def __init__(self): pass -- GitLab