From 1322ac346129601afb227a19618ec156138ee5a6 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Tue, 20 Dec 2016 12:00:44 +0100 Subject: [PATCH 01/60] CHUNKINI configuration parameter was added --- autosubmit/autosubmit.py | 20 +++++++++++--------- autosubmit/config/config_common.py | 9 +++++++++ autosubmit/config/files/expdef.conf | 18 ++++++++++-------- autosubmit/job/job_list.py | 28 ++++++++++++++++------------ test/unit/test_dic_jobs.py | 2 +- test/unit/test_job_list.py | 2 +- 6 files changed, 48 insertions(+), 31 deletions(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 6bdecc01a..d97121137 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -706,12 +706,12 @@ class Autosubmit: save = False for platform in platforms_to_test: for job_package in job_list.get_ready_packages(platform): - try: - job_package.submit(as_conf, job_list.parameters) - save = True - except Exception: - Log.error("{0} submission failed", platform.name) - continue + try: + job_package.submit(as_conf, job_list.parameters) + save = True + except Exception: + Log.error("{0} submission failed", platform.name) + continue return save @staticmethod @@ -1641,6 +1641,7 @@ class Autosubmit: Log.error('There are repeated start dates!') return False num_chunks = as_conf.get_num_chunks() + chunk_ini = as_conf.get_chunk_ini() member_list = as_conf.get_member_list() if len(member_list) != len(set(member_list)): Log.error('There are repeated member names!') @@ -1659,7 +1660,7 @@ class Autosubmit: date_format = 'H' if date.minute > 1: date_format = 'M' - job_list.generate(date_list, member_list, num_chunks, parameters, date_format, as_conf.get_retrials(), + job_list.generate(date_list, member_list, num_chunks, chunk_ini, parameters, date_format, as_conf.get_retrials(), as_conf.get_default_job_type()) if rerun == "true": chunk_list = Autosubmit._create_json(as_conf.get_chunk_list()) @@ -2224,8 +2225,9 @@ class Autosubmit: date_format = 'H' if date.minute > 1: date_format = 'M' - job_list.generate(date_list, as_conf.get_member_list(), as_conf.get_num_chunks(), as_conf.load_parameters(), - date_format, as_conf.get_retrials(), as_conf.get_default_job_type(), False) + job_list.generate(date_list, as_conf.get_member_list(), as_conf.get_num_chunks(), as_conf.get_chunk_ini(), + as_conf.load_parameters(), date_format, as_conf.get_retrials(), + as_conf.get_default_job_type(), False) return job_list @staticmethod diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 3b6405389..f1ff163e0 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -643,6 +643,15 @@ class AutosubmitConfig: """ return int(self._exp_parser.get('experiment', 'NUMCHUNKS')) + def get_chunk_ini(self): + """ + Returns the first chunk from where the experiment will start + + :return: initial chunk + :rtype: int + """ + return int(self.get_option(self._exp_parser, 'experiment', 'CHUNKINI', 1)) + def get_chunk_size_unit(self): """ Unit for the chunk length diff --git a/autosubmit/config/files/expdef.conf b/autosubmit/config/files/expdef.conf index b8afe6584..cb6612491 100644 --- a/autosubmit/config/files/expdef.conf +++ b/autosubmit/config/files/expdef.conf @@ -26,16 +26,11 @@ CHUNKSIZEUNIT = month CHUNKSIZE = # Total number of chunks in experiment. NUMERIC = 30, 15, 10 NUMCHUNKS = +# Initial chunk of the experiment. Optional. DEFAULT = 1 +CHUNKINI = # Calendar used. LIST: standard, noleap CALENDAR = standard -[rerun] -# Is a rerun or not? [Default: Do set FALSE]. BOOLEAN = TRUE, FALSE -RERUN = FALSE -# If RERUN = TRUE then supply the list of chunks to rerun -# LIST = [ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ] -CHUNKLIST = - [project] # Select project type. STRING = git, svn, local, none # If PROJECT_TYPE is set to none, Autosubmit self-contained dummy templates will be used @@ -71,4 +66,11 @@ FILE_PROJECT_CONF = # Where is JOBS CONFIGURATION file location relative to project root path FILE_JOBS_CONF = # Default job scripts type in the project. type = STRING, default = bash, supported = 'bash', 'python' or 'r' -JOB_SCRIPTS_TYPE = \ No newline at end of file +JOB_SCRIPTS_TYPE = + +[rerun] +# Is a rerun or not? [Default: Do set FALSE]. BOOLEAN = TRUE, FALSE +RERUN = FALSE +# If RERUN = TRUE then supply the list of chunks to rerun +# LIST = [ 19601101 [ fc0 [1 2 3 4] fc1 [1] ] 19651101 [ fc0 [16-30] ] ] +CHUNKLIST = \ No newline at end of file diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 48515eac7..6073e70b3 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -89,8 +89,8 @@ class JobList: def graph(self, value): self._graph = value - def generate(self, date_list, member_list, num_chunks, parameters, date_format, default_retrials, default_job_type, - new=True): + def generate(self, date_list, member_list, num_chunks, chunk_ini, parameters, date_format, default_retrials, + default_job_type, new=True): """ Creates all jobs needed for the current workflow @@ -102,6 +102,8 @@ class JobList: :type member_list: list :param num_chunks: number of chunks to run :type num_chunks: int + :param chunk_ini: the experiment will start by the given chunk + :type chunk_ini: int :param parameters: parameters for the jobs :type parameters: dict :param date_format: option to format dates @@ -115,7 +117,7 @@ class JobList: self._date_list = date_list self._member_list = member_list - chunk_list = range(1, num_chunks + 1) + chunk_list = range(chunk_ini, num_chunks + 1) self._chunk_list = chunk_list jobs_parser = self._get_jobs_parser() @@ -169,7 +171,8 @@ class JobList: return dependencies @staticmethod - def _manage_job_dependencies(dic_jobs, job, date_list, member_list, chunk_list, dependencies_keys, dependencies, graph): + def _manage_job_dependencies(dic_jobs, job, date_list, member_list, chunk_list, dependencies_keys, dependencies, + graph): for key in dependencies_keys: dependency = dependencies[key] skip, (chunk, member, date) = JobList._calculate_dependency_metadata(job.chunk, chunk_list, @@ -670,6 +673,7 @@ class JobList: jobs_parser = self._get_jobs_parser() Log.info("Adding dependencies...") + dependencies = dict() for job_section in jobs_parser.sections(): Log.debug("Reading rerun dependencies for {0} jobs".format(job_section)) @@ -816,9 +820,9 @@ class DicJobs: """ - def __init__(self, joblist, parser, date_list, member_list, chunk_list, date_format, default_retrials): + def __init__(self, jobs_list, parser, date_list, member_list, chunk_list, date_format, default_retrials): self._date_list = date_list - self._joblist = joblist + self._jobs_list = jobs_list self._member_list = member_list self._chunk_list = chunk_list self._parser = parser @@ -863,7 +867,7 @@ class DicJobs: :type priority: int """ self._dic[section] = self.build_job(section, priority, None, None, None, default_job_type, jobs_data) - self._joblist.graph.add_node(self._dic[section].name) + self._jobs_list.graph.add_node(self._dic[section].name) def _create_jobs_startdate(self, section, priority, frequency, default_job_type, jobs_data=dict()): """ @@ -884,7 +888,7 @@ class DicJobs: if count % frequency == 0 or count == len(self._date_list): self._dic[section][date] = self.build_job(section, priority, date, None, None, default_job_type, jobs_data) - self._joblist.graph.add_node(self._dic[section][date].name) + self._jobs_list.graph.add_node(self._dic[section][date].name) def _create_jobs_member(self, section, priority, frequency, default_job_type, jobs_data=dict()): """ @@ -907,7 +911,7 @@ class DicJobs: if count % frequency == 0 or count == len(self._member_list): self._dic[section][date][member] = self.build_job(section, priority, date, member, None, default_job_type, jobs_data) - self._joblist.graph.add_node(self._dic[section][date][member].name) + self._jobs_list.graph.add_node(self._dic[section][date][member].name) ''' Maybe a good choice could be split this function or ascend the @@ -959,7 +963,7 @@ class DicJobs: else: self._dic[section][date][member][chunk] = self.build_job(section, priority, date, member, chunk, default_job_type, jobs_data) - self._joblist.graph.add_node(self._dic[section][date][member][chunk].name) + self._jobs_list.graph.add_node(self._dic[section][date][member][chunk].name) def get_jobs(self, section, date=None, member=None, chunk=None): """ @@ -1026,7 +1030,7 @@ class DicJobs: return jobs def build_job(self, section, priority, date, member, chunk, default_job_type, jobs_data=dict()): - name = self._joblist.expid + name = self._jobs_list.expid if date is not None: name += "_" + date2str(date, self._date_format) if member is not None: @@ -1077,7 +1081,7 @@ class DicJobs: if job.retrials == -1: job.retrials = None job.notify_on = [x.upper() for x in self.get_option(section, "NOTIFY_ON", '').split(' ')] - self._joblist.get_job_list().append(job) + self._jobs_list.get_job_list().append(job) return job def get_option(self, section, option, default): diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index 7321a273d..32e14269a 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -298,7 +298,7 @@ class TestDicJobs(TestCase): 'True', processors, threads, tasks, memory, wallclock, notify_on]) job_list_mock = Mock() job_list_mock.append = Mock() - self.dictionary._joblist.get_job_list = Mock(return_value=job_list_mock) + self.dictionary._jobs_list.get_job_list = Mock(return_value=job_list_mock) # act created_job = self.dictionary.build_job(section, priority, date, member, chunk, dict()) diff --git a/test/unit/test_job_list.py b/test/unit/test_job_list.py index 209734ae4..24f869c7a 100644 --- a/test/unit/test_job_list.py +++ b/test/unit/test_job_list.py @@ -218,7 +218,7 @@ class TestJobList(TestCase): graph_mock = Mock() job_list.graph = graph_mock # act - job_list.generate(date_list, member_list, num_chunks, parameters, 'H', 9999, Type.BASH) + job_list.generate(date_list, member_list, num_chunks, 1, parameters, 'H', 9999, Type.BASH) # assert self.assertEquals(job_list.parameters, parameters) -- GitLab From 1c6cac83484c11ca71699cc76c97040c927ad3fd Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 13:03:46 +0100 Subject: [PATCH 02/60] ACCOUNT & PARTITION directives were added --- autosubmit/config/files/platforms.conf | 4 +++ autosubmit/job/job.py | 2 ++ autosubmit/platforms/paramiko_platform.py | 4 +++ autosubmit/platforms/paramiko_submitter.py | 2 ++ autosubmit/platforms/platform.py | 2 ++ autosubmit/platforms/slurmplatform.py | 36 +++++++++++++++++++++- 6 files changed, 49 insertions(+), 1 deletion(-) diff --git a/autosubmit/config/files/platforms.conf b/autosubmit/config/files/platforms.conf index 84531137e..5194bb0ee 100644 --- a/autosubmit/config/files/platforms.conf +++ b/autosubmit/config/files/platforms.conf @@ -16,6 +16,10 @@ # ADD_PROJECT_TO_HOST = False ## User for the machine scheduler. Required # USER = +## Account for the machine scheduler. Optional +# ACCOUNT = +## Partition for the machine scheduler. Optional +# PARTITION = ## Path to the scratch directory for the machine. Required. # SCRATCH_DIR = /scratch ## If true, Autosubmit test command can use this queue as a main queue. Defaults to False diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 5181d240d..ab82fe141 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -577,6 +577,8 @@ class Job(object): parameters['CURRENT_HOST'] = job_platform.host parameters['CURRENT_QUEUE'] = self.queue parameters['CURRENT_USER'] = job_platform.user + parameters['CURRENT_ACCOUNT'] = job_platform.account + parameters['CURRENT_PARTITION'] = job_platform.partition parameters['CURRENT_PROJ'] = job_platform.project parameters['CURRENT_BUDG'] = job_platform.budget parameters['CURRENT_RESERVATION'] = job_platform.reservation diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index 6f7f86cb9..a5287bc9b 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -397,6 +397,10 @@ class ParamikoPlatform(Platform): header = header.replace('%SCRATCH_FREE_SPACE_DIRECTIVE%', self.header.get_scratch_free_space(job)) if hasattr(self.header, 'get_exclusivity'): header = header.replace('%EXCLUSIVITY_DIRECTIVE%', self.header.get_exclusivity(job)) + if hasattr(self.header, 'get_account_directive'): + header = header.replace('%ACCOUNT_DIRECTIVE%', self.header.get_account_directive(job)) + if hasattr(self.header, 'get_partition_directive'): + header = header.replace('%PARTITION_DIRECTIVE%', self.header.get_partition_directive(job)) return header def check_remote_log_dir(self): diff --git a/autosubmit/platforms/paramiko_submitter.py b/autosubmit/platforms/paramiko_submitter.py index 080b2a34c..8c1c645b8 100644 --- a/autosubmit/platforms/paramiko_submitter.py +++ b/autosubmit/platforms/paramiko_submitter.py @@ -118,6 +118,8 @@ class ParamikoSubmitter(Submitter): remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') remote_platform.exclusivity = AutosubmitConfig.get_option(parser, section, 'EXCLUSIVITY', '').lower() remote_platform.user = AutosubmitConfig.get_option(parser, section, 'USER', None) + remote_platform.account = AutosubmitConfig.get_option(parser, section, 'ACCOUNT', '') + remote_platform.partition = AutosubmitConfig.get_option(parser, section, 'PARTITION', '') remote_platform.scratch = AutosubmitConfig.get_option(parser, section, 'SCRATCH_DIR', None) remote_platform._default_queue = AutosubmitConfig.get_option(parser, section, 'QUEUE', None) remote_platform._serial_queue = AutosubmitConfig.get_option(parser, section, 'SERIAL_QUEUE', None) diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index ea3290cc9..e24b82e84 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -29,6 +29,8 @@ class Platform(object): self.scratch_free_space = None self.host = '' self.user = '' + self.account = '' + self.partition = '' self.project = '' self.budget = '' self.reservation = '' diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index 4bf4a03f8..f68e69e10 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -105,7 +105,7 @@ class SlurmHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str @@ -116,12 +116,44 @@ class SlurmHeader: else: return "SBATCH --qos {0}".format(job.parameters['CURRENT_QUEUE']) + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_account_directive(self, job): + """ + Returns account directive for the specified job + + :param job: job to create account directive for + :type job: Job + :return: account directive + :rtype: str + """ + # There is no account, so directive is empty + if job.parameters['CURRENT_ACCOUNT'] != '': + return "SBATCH -A {0}".format(job.parameters['CURRENT_ACCOUNT']) + return "" + + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_partition_directive(self, job): + """ + Returns partition directive for the specified job + + :param job: job to create partition directive for + :type job: Job + :return: partition directive + :rtype: str + """ + # There is no account, so directive is empty + if job.parameters['CURRENT_PARTITION'] != '': + return "SBATCH -p {0}".format(job.parameters['CURRENT_PARTITION']) + return "" + SERIAL = textwrap.dedent("""\ ############################################################################### # %TASKTYPE% %EXPID% EXPERIMENT ############################################################################### # #%QUEUE_DIRECTIVE% + #%ACCOUNT_DIRECTIVE% + #%PARTITION_DIRECTIVE% #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% @@ -137,6 +169,8 @@ class SlurmHeader: ############################################################################### # #%QUEUE_DIRECTIVE% + #%ACCOUNT_DIRECTIVE% + #%PARTITION_DIRECTIVE% #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% -- GitLab From f28c3a804824b0cd3c6793d5e80ad257dd6d29ee Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 14:13:58 +0100 Subject: [PATCH 03/60] MEMORY directive was added for Slurm & MEMORY_PER_TASK directive was added for Slurm & EC --- autosubmit/config/config_common.py | 12 +++++++- autosubmit/config/files/jobs.conf | 4 ++- autosubmit/job/job.py | 7 +++-- autosubmit/job/job_list.py | 1 + autosubmit/platforms/ecplatform.py | 16 ++++++++++ autosubmit/platforms/paramiko_platform.py | 4 +++ autosubmit/platforms/slurmplatform.py | 36 ++++++++++++++++++++++- 7 files changed, 74 insertions(+), 6 deletions(-) diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index f1ff163e0..34ae7d5dc 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -183,7 +183,17 @@ class AutosubmitConfig: :return: memory needed :rtype: str """ - return int(AutosubmitConfig.get_option(self.jobs_parser, section, 'MEMORY', 0)) + return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'MEMORY', '')) + + def get_memory_per_task(self, section): + """ + Gets memory per task needed for the given job type + :param section: job type + :type section: str + :return: memory per task needed + :rtype: str + """ + return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'MEMORY_PER_TASK', '')) def check_conf_files(self): """ diff --git a/autosubmit/config/files/jobs.conf b/autosubmit/config/files/jobs.conf index 6aa9c93d0..8caccb5cb 100644 --- a/autosubmit/config/files/jobs.conf +++ b/autosubmit/config/files/jobs.conf @@ -35,8 +35,10 @@ # THREADS = 1 ## Tasks number (number of processes per node) to be submitted to the HPC. If not specified, defaults to empty. # TASKS = 16 -## Memory requirements for the job in MB. If not specified, defaults to empty. +## Memory requirements for the job in MB. Optional. If not specified, then not defined for the scheduler. # MEMORY = 4096 +## Memory per task requirements for the job in MB. Optional. If not specified, then not defined for the scheduler. +# MEMORY_PER_TASK = 1024 ## Scratch free space requirements for the job in percentage (%). If not specified, it won't be defined on the template. # SCRATCH_FREE_SPACE = 10 ## Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.conf diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index ab82fe141..d8908e5d1 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -59,11 +59,11 @@ class Job(object): self.tasks = None self.threads = None self.processors = None - self.memory = None + self.memory = '' + self.memory_per_task = '' self.chunk = None self.member = None self.date = None - self.memory = None self.name = name self._long_name = None self.long_name = name @@ -559,6 +559,7 @@ class Job(object): if self.tasks == 0: self.tasks = job_platform.processors_per_node self.memory = as_conf.get_memory(self.section) + self.memory_per_task = as_conf.get_memory_per_task(self.section) self.wallclock = as_conf.get_wallclock(self.section) self.scratch_free_space = as_conf.get_scratch_free_space(self.section) if self.scratch_free_space == 0: @@ -566,11 +567,11 @@ class Job(object): parameters['NUMPROC'] = self.processors parameters['MEMORY'] = self.memory + parameters['MEMORY_PER_TASK'] = self.memory_per_task parameters['NUMTHREADS'] = self.threads parameters['NUMTASK'] = self.tasks parameters['WALLCLOCK'] = self.wallclock parameters['TASKTYPE'] = self.section - parameters['MEMORY'] = self.memory parameters['SCRATCH_FREE_SPACE'] = self.scratch_free_space parameters['CURRENT_ARCH'] = job_platform.name diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 6073e70b3..e0d4aa89d 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -1076,6 +1076,7 @@ class DicJobs: job.threads = self.get_option(section, "THREADS", '') job.tasks = self.get_option(section, "TASKS", '') job.memory = self.get_option(section, "MEMORY", '') + job.memory_per_task = self.get_option(section, "MEMORY_PER_TASK", '') job.wallclock = self.get_option(section, "WALLCLOCK", '') job.retrials = int(self.get_option(section, 'RETRIALS', -1)) if job.retrials == -1: diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py index e8d96d289..8e8ea6829 100644 --- a/autosubmit/platforms/ecplatform.py +++ b/autosubmit/platforms/ecplatform.py @@ -254,6 +254,21 @@ class EcCcaHeader: else: return '#PBS -l EC_threads_per_task={0}'.format(job.threads) + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_memory_per_task_directive(self, job): + """ + Returns memory per task directive for the specified job + + :param job: job to create memory per task directive for + :type job: Job + :return: memory per task directive + :rtype: str + """ + # There is no memory per task, so directive is empty + if job.parameters['MEMORY_PER_TASK'] != '': + return "#PBS -l EC_memory_per_task={0}mb".format(job.parameters['MEMORY_PER_TASK']) + return "" + SERIAL = textwrap.dedent("""\ ############################################################################### # %TASKTYPE% %EXPID% EXPERIMENT @@ -282,6 +297,7 @@ class EcCcaHeader: #PBS -l EC_total_tasks=%NUMPROC% %THREADS_PER_TASK_DIRECTIVE% %TASKS_PER_NODE_DIRECTIVE% + %MEMORY_PER_TASK_DIRECTIVE% #PBS -l walltime=%WALLCLOCK%:00 #PBS -l EC_billing_account=%CURRENT_BUDG% # diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index a5287bc9b..efbf0f37b 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -401,6 +401,10 @@ class ParamikoPlatform(Platform): header = header.replace('%ACCOUNT_DIRECTIVE%', self.header.get_account_directive(job)) if hasattr(self.header, 'get_partition_directive'): header = header.replace('%PARTITION_DIRECTIVE%', self.header.get_partition_directive(job)) + if hasattr(self.header, 'get_memory_directive'): + header = header.replace('%MEMORY_DIRECTIVE%', self.header.get_memory_directive(job)) + if hasattr(self.header, 'get_memory_per_task_directive'): + header = header.replace('%MEMORY_PER_TASK_DIRECTIVE%', self.header.get_memory_per_task_directive(job)) return header def check_remote_log_dir(self): diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index f68e69e10..4d3d2a2bf 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -141,11 +141,41 @@ class SlurmHeader: :return: partition directive :rtype: str """ - # There is no account, so directive is empty + # There is no partition, so directive is empty if job.parameters['CURRENT_PARTITION'] != '': return "SBATCH -p {0}".format(job.parameters['CURRENT_PARTITION']) return "" + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_memory_directive(self, job): + """ + Returns memory directive for the specified job + + :param job: job to create memory directive for + :type job: Job + :return: memory directive + :rtype: str + """ + # There is no memory, so directive is empty + if job.parameters['MEMORY'] != '': + return "SBATCH --mem {0}".format(job.parameters['MEMORY']) + return "" + + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_memory_per_task_directive(self, job): + """ + Returns memory per task directive for the specified job + + :param job: job to create memory per task directive for + :type job: Job + :return: memory per task directive + :rtype: str + """ + # There is no memory per task, so directive is empty + if job.parameters['MEMORY_PER_TASK'] != '': + return "SBATCH --mem-per-cpu {0}".format(job.parameters['MEMORY_PER_TASK']) + return "" + SERIAL = textwrap.dedent("""\ ############################################################################### # %TASKTYPE% %EXPID% EXPERIMENT @@ -154,6 +184,8 @@ class SlurmHeader: #%QUEUE_DIRECTIVE% #%ACCOUNT_DIRECTIVE% #%PARTITION_DIRECTIVE% + #%MEMORY_DIRECTIVE% + #%MEMORY_PER_TASK_DIRECTIVE% #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% @@ -171,6 +203,8 @@ class SlurmHeader: #%QUEUE_DIRECTIVE% #%ACCOUNT_DIRECTIVE% #%PARTITION_DIRECTIVE% + #%MEMORY_DIRECTIVE% + #%MEMORY_PER_TASK_DIRECTIVE% #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% -- GitLab From 0338c6d6554a4b09dc78d8b72dd57d6f950bffac Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 15:56:54 +0100 Subject: [PATCH 04/60] Some tests were fixed --- test/unit/test_autosubmit_ config.py | 4 ++-- test/unit/test_dic_jobs.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/test/unit/test_autosubmit_ config.py b/test/unit/test_autosubmit_ config.py index 30f75bad2..b6be7aeec 100644 --- a/test/unit/test_autosubmit_ config.py +++ b/test/unit/test_autosubmit_ config.py @@ -165,12 +165,12 @@ class TestAutosubmitConfig(TestCase): def test_get_memory(self): # arrange - expected_value = 99999 + expected_value = '99999' config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_memory(self.section) # assert - self._assert_get_option(parser_mock, 'MEMORY', expected_value, returned_value, int) + self._assert_get_option(parser_mock, 'MEMORY', expected_value, returned_value, str) def test_check_exists_case_true(self): # arrange diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index 32e14269a..162f66b99 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -289,13 +289,14 @@ class TestDicJobs(TestCase): processors = 111 threads = 222 tasks = 333 - memory = 444 + memory = memory_per_task = 444 wallclock = 555 notify_on = 'COMPLETED FAILED' self.parser_mock.has_option = Mock(side_effect=[True, True, True, True, True, True, True, True, True, True, - True, True, True, False, True]) + True, True, True, True, False, True]) self.parser_mock.get = Mock(side_effect=[frequency, 'True', 'True', 'bash', platform_name, filename, queue, - 'True', processors, threads, tasks, memory, wallclock, notify_on]) + 'True', processors, threads, tasks, memory, memory_per_task, + wallclock, notify_on]) job_list_mock = Mock() job_list_mock.append = Mock() self.dictionary._jobs_list.get_job_list = Mock(return_value=job_list_mock) -- GitLab From 9b617d9aa6dcc6d12fa2e0f1b0daced5f8726781 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 16:27:17 +0100 Subject: [PATCH 05/60] HYPERTHREADING directive was added --- autosubmit/job/job.py | 1 + autosubmit/platforms/ecplatform.py | 16 ++++++++++++++++ autosubmit/platforms/paramiko_platform.py | 2 ++ autosubmit/platforms/paramiko_submitter.py | 2 ++ autosubmit/platforms/platform.py | 1 + 5 files changed, 22 insertions(+) diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index d8908e5d1..ba468de60 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -584,6 +584,7 @@ class Job(object): parameters['CURRENT_BUDG'] = job_platform.budget parameters['CURRENT_RESERVATION'] = job_platform.reservation parameters['CURRENT_EXCLUSIVITY'] = job_platform.exclusivity + parameters['CURRENT_HYPERTHREADING'] = job_platform.hyperthreading parameters['CURRENT_TYPE'] = job_platform.type parameters['CURRENT_SCRATCH_DIR'] = job_platform.scratch parameters['CURRENT_ROOTDIR'] = job_platform.root_dir diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py index 8e8ea6829..5dd8ca472 100644 --- a/autosubmit/platforms/ecplatform.py +++ b/autosubmit/platforms/ecplatform.py @@ -269,6 +269,21 @@ class EcCcaHeader: return "#PBS -l EC_memory_per_task={0}mb".format(job.parameters['MEMORY_PER_TASK']) return "" + # noinspection PyMethodMayBeStatic,PyUnusedLocal + def get_hyperthreading_directive(self, job): + """ + Returns hyperthreading directive for the specified job + + :param job: job to create hyperthreading directive for + :type job: Job + :return: hyperthreading per task directive + :rtype: str + """ + # There is no memory per task, so directive is empty + if job.parameters['CURRENT_HYPERTHREADING'] == 'true': + return "#PBS -l EC_hyperthreads=2" + return "#PBS -l EC_hyperthreads=1" + SERIAL = textwrap.dedent("""\ ############################################################################### # %TASKTYPE% %EXPID% EXPERIMENT @@ -298,6 +313,7 @@ class EcCcaHeader: %THREADS_PER_TASK_DIRECTIVE% %TASKS_PER_NODE_DIRECTIVE% %MEMORY_PER_TASK_DIRECTIVE% + %HYPERTHREADING_DIRECTIVE% #PBS -l walltime=%WALLCLOCK%:00 #PBS -l EC_billing_account=%CURRENT_BUDG% # diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index efbf0f37b..a3630a36e 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -405,6 +405,8 @@ class ParamikoPlatform(Platform): header = header.replace('%MEMORY_DIRECTIVE%', self.header.get_memory_directive(job)) if hasattr(self.header, 'get_memory_per_task_directive'): header = header.replace('%MEMORY_PER_TASK_DIRECTIVE%', self.header.get_memory_per_task_directive(job)) + if hasattr(self.header, 'get_hyperthreading_directive'): + header = header.replace('%HYPERTHREADING_DIRECTIVE%', self.header.get_hyperthreading_directive(job)) return header def check_remote_log_dir(self): diff --git a/autosubmit/platforms/paramiko_submitter.py b/autosubmit/platforms/paramiko_submitter.py index 8c1c645b8..0e7a37594 100644 --- a/autosubmit/platforms/paramiko_submitter.py +++ b/autosubmit/platforms/paramiko_submitter.py @@ -113,6 +113,8 @@ class ParamikoSubmitter(Submitter): asconf.get_max_waiting_jobs())) remote_platform.total_jobs = int(AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS', asconf.get_total_jobs())) + remote_platform.hyperthreading = AutosubmitConfig.get_option(parser, section, 'HYPERTHREADING', + 'false').lower() remote_platform.project = AutosubmitConfig.get_option(parser, section, 'PROJECT', None) remote_platform.budget = AutosubmitConfig.get_option(parser, section, 'BUDGET', remote_platform.project) remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index e24b82e84..8b95ad4c2 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -41,6 +41,7 @@ class Platform(object): self.service = None self.scheduler = None self.directory = None + self.hyperthreading = 'false' self._allow_arrays = False @property -- GitLab From abe899c6007855c15ea8b7097a3e893b32a6f824 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 17:03:23 +0100 Subject: [PATCH 06/60] PROCESSORS variable was refactored to string --- autosubmit/config/config_common.py | 2 +- autosubmit/job/job.py | 14 +++++++------- autosubmit/job/job_list.py | 2 +- autosubmit/job/job_package.py | 2 +- autosubmit/monitor/monitor.py | 15 ++++++++++++--- autosubmit/platforms/paramiko_platform.py | 6 +++--- test/unit/test_autosubmit_ config.py | 4 ++-- test/unit/test_dic_jobs.py | 2 +- test/unit/test_job.py | 4 ++-- 9 files changed, 30 insertions(+), 21 deletions(-) diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 34ae7d5dc..022ded75c 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -143,7 +143,7 @@ class AutosubmitConfig: :return: wallclock time :rtype: str """ - return int(AutosubmitConfig.get_option(self.jobs_parser, section, 'PROCESSORS', 1)) + return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'PROCESSORS', 1)) def get_threads(self, section): """ diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index ba468de60..4abc34d71 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -58,7 +58,7 @@ class Job(object): self.wallclock = None self.tasks = None self.threads = None - self.processors = None + self.processors = '1' self.memory = '' self.memory_per_task = '' self.chunk = None @@ -132,10 +132,10 @@ class Job(object): :return HPCPlatform object for the job to use :rtype: HPCPlatform """ - if self.processors > 1: - return self._platform - else: + if str(self.processors) == '1': return self._platform.serial_platform + else: + return self._platform @platform.setter def platform(self, value): @@ -157,10 +157,10 @@ class Job(object): """ if self._queue is not None: return self._queue - if self.processors > 1: - return self._platform.queue - else: + if str(self.processors) == '1': return self._platform.serial_platform.serial_queue + else: + return self._platform.queue @queue.setter def queue(self, value): diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index e0d4aa89d..26a8d2a8a 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -1072,7 +1072,7 @@ class DicJobs: else: job.check = False - job.processors = self.get_option(section, "PROCESSORS", 1) + job.processors = str(self.get_option(section, "PROCESSORS", 1)) job.threads = self.get_option(section, "THREADS", '') job.tasks = self.get_option(section, "TASKS", '') job.memory = self.get_option(section, "MEMORY", '') diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 58c7d84a9..d177e44ac 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -125,7 +125,7 @@ class JobPackageArray(JobPackageBase): self._common_script = None self._array_size_id = "[1-" + str(len(jobs)) + "]" self._wallclock = '00:00' - self._num_processors = 1 + self._num_processors = '1' for job in jobs: if job.wallclock > self._wallclock: self._wallclock = job.wallclock diff --git a/autosubmit/monitor/monitor.py b/autosubmit/monitor/monitor.py index 7a3a0b663..27ad74aea 100644 --- a/autosubmit/monitor/monitor.py +++ b/autosubmit/monitor/monitor.py @@ -258,7 +258,11 @@ class Monitor: else: hours = 0 threshold = max(threshold, hours) - expected_cpu_consumption += hours * int(job.processors) + if ':' in job.processors: + processors = reduce(lambda x, y: int(x) + int(y), job.processors.split(':')) + else: + processors = job.processors + expected_cpu_consumption += hours * int(processors) expected_real_consumption += hours # These are constants, so they need to be CAPS. Suppress PyCharm warning # noinspection PyPep8Naming @@ -299,6 +303,11 @@ class Monitor: start_times = job.check_retrials_start_time() end_times = job.check_retrials_end_time() + if ':' in job.processors: + processors = reduce(lambda x, y: int(x) + int(y), job.processors.split(':')) + else: + processors = job.processors + for j, t in enumerate(submit_times): if j >= len(end_times): @@ -307,13 +316,13 @@ class Monitor: elif j == (len(submit_times) - 1) and job.status == Status.COMPLETED: queued[i] += start_times[j] - submit_times[j] run[i] += end_times[j] - start_times[j] - cpu_consumption += run[i] * int(job.processors) + cpu_consumption += run[i] * int(processors) real_consumption += run[i] else: failed_jobs[i] += 1 fail_queued[i] += start_times[j] - submit_times[j] fail_run[i] += end_times[j] - start_times[j] - cpu_consumption += fail_run[i] * int(job.processors) + cpu_consumption += fail_run[i] * int(processors) real_consumption += fail_run[i] total_jobs_run += len(start_times) total_jobs_failed += failed_jobs[i] diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index a3630a36e..ad6267efd 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -375,10 +375,10 @@ class ParamikoPlatform(Platform): :return: header to use :rtype: str """ - if job.processors > 1: - header = self.header.PARALLEL - else: + if str(job.processors) == '1': header = self.header.SERIAL + else: + header = self.header.PARALLEL str_datetime = date2str(datetime.datetime.now(), 'S') out_filename = "{0}.{1}.out".format(job.name, str_datetime) diff --git a/test/unit/test_autosubmit_ config.py b/test/unit/test_autosubmit_ config.py index b6be7aeec..0396c0478 100644 --- a/test/unit/test_autosubmit_ config.py +++ b/test/unit/test_autosubmit_ config.py @@ -138,12 +138,12 @@ class TestAutosubmitConfig(TestCase): def test_get_processors(self): # arrange - expected_value = 99999 + expected_value = '99999' config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_processors(self.section) # assert - self._assert_get_option(parser_mock, 'PROCESSORS', expected_value, returned_value, int) + self._assert_get_option(parser_mock, 'PROCESSORS', expected_value, returned_value, str) def test_get_threads(self): # arrange diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index 162f66b99..c4bf116dd 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -286,7 +286,7 @@ class TestDicJobs(TestCase): platform_name = 'fake-platform' filename = 'fake-fike' queue = 'fake-queue' - processors = 111 + processors = '111' threads = 222 tasks = 333 memory = memory_per_task = 444 diff --git a/test/unit/test_job.py b/test/unit/test_job.py index 25c0aa6f4..9757692c0 100644 --- a/test/unit/test_job.py +++ b/test/unit/test_job.py @@ -43,7 +43,7 @@ class TestJob(TestCase): platform.serial_platform = 'serial-platform' self.job._platform = platform - self.job.processors = 1 + self.job.processors = '1' returned_platform = self.job.platform @@ -90,7 +90,7 @@ class TestJob(TestCase): dummy_platform.queue = parallel_queue self.job.platform = dummy_platform - self.job.processors = 1 + self.job.processors = '1' self.assertIsNone(self.job._queue) -- GitLab From 0c241247950badf78b7f82840abf9eaea87b10b4 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 18:08:33 +0100 Subject: [PATCH 07/60] Regression tests were improved --- test/regression/default_conf/platforms.conf | 29 +++++++++++++++---- .../test_ecmwf_with_paramiko/conf/jobs.conf | 9 ++++++ .../conf/jobs.conf | 13 --------- .../src/TEST_NOLEAP.py | 5 ---- .../conf/jobs.conf | 13 --------- .../src/TEST_NOLEAP.py | 5 ---- .../conf/autosubmit.conf | 0 .../conf/expdef.conf | 4 +-- .../test_mistral_with_paramiko/conf/jobs.conf | 19 ++++++++++++ .../conf/proj.conf | 0 .../src/TEST_NOLEAP.sh | 6 ++++ .../conf/autosubmit.conf | 0 .../conf/expdef.conf | 4 +-- .../test_mistral_with_saga/conf/jobs.conf | 19 ++++++++++++ .../conf/proj.conf | 0 .../test_mistral_with_saga/src/TEST_NOLEAP.sh | 6 ++++ 16 files changed, 87 insertions(+), 45 deletions(-) delete mode 100644 test/regression/test_ecmwf_with_paramiko_python/conf/jobs.conf delete mode 100644 test/regression/test_ecmwf_with_paramiko_python/src/TEST_NOLEAP.py delete mode 100644 test/regression/test_ecmwf_with_saga_python/conf/jobs.conf delete mode 100644 test/regression/test_ecmwf_with_saga_python/src/TEST_NOLEAP.py rename test/regression/{test_ecmwf_with_paramiko_python => test_mistral_with_paramiko}/conf/autosubmit.conf (100%) rename test/regression/{test_ecmwf_with_paramiko_python => test_mistral_with_paramiko}/conf/expdef.conf (98%) create mode 100644 test/regression/test_mistral_with_paramiko/conf/jobs.conf rename test/regression/{test_ecmwf_with_paramiko_python => test_mistral_with_paramiko}/conf/proj.conf (100%) create mode 100644 test/regression/test_mistral_with_paramiko/src/TEST_NOLEAP.sh rename test/regression/{test_ecmwf_with_saga_python => test_mistral_with_saga}/conf/autosubmit.conf (100%) rename test/regression/{test_ecmwf_with_saga_python => test_mistral_with_saga}/conf/expdef.conf (98%) create mode 100644 test/regression/test_mistral_with_saga/conf/jobs.conf rename test/regression/{test_ecmwf_with_saga_python => test_mistral_with_saga}/conf/proj.conf (100%) create mode 100644 test/regression/test_mistral_with_saga/src/TEST_NOLEAP.sh diff --git a/test/regression/default_conf/platforms.conf b/test/regression/default_conf/platforms.conf index 7b2d75d9c..8a3a3c058 100644 --- a/test/regression/default_conf/platforms.conf +++ b/test/regression/default_conf/platforms.conf @@ -1,18 +1,28 @@ [ecmwf-cca] -# Queue type. Options: ps, SGE, LSF, SLURM, PBS, eceaccess TYPE = ecaccess VERSION = pbs HOST = cca PROJECT = spesiccf ADD_PROJECT_TO_HOST = false -USER = c3j +USER = c3d +SCRATCH_DIR = /scratch/ms +TEST_SUITE = True +PROCESSORS_PER_NODE = 24 + +[ecmwf-cca-hyperthreading] +TYPE = ecaccess +VERSION = pbs +HOST = cca +PROJECT = spesiccf +ADD_PROJECT_TO_HOST = false +USER = c3d +HYPERTHREADING = true SCRATCH_DIR = /scratch/ms TEST_SUITE = True PROCESSORS_PER_NODE = 24 [moore] -# Queue type. Options: ps, SGE, LSF, SLURM, PBS, eceaccess -TYPE = SLURM +TYPE = slurm HOST = moore PROJECT = Earth USER = jlope2 @@ -21,7 +31,6 @@ TEST_SUITE = False QUEUE = serial [marenostrum3] -# Queue type. Options: ps, SGE, LSF, SLURM, PBS, eceaccess TYPE = LSF VERSION = mn HOST = mn-bsc32 @@ -32,6 +41,16 @@ SCRATCH_DIR = /gpfs/scratch TEST_SUITE = True PROCESSORS_PER_NODE = 16 +[mistral] +TYPE = slurm +HOST = mistral.dkrz.de +PROJECT = mh0469 +QUEUE = compute +ADD_PROJECT_TO_HOST = false +USER = m300339 +SCRATCH_DIR = /mnt/lustre01/work +TEST_SUITE = True + [SEDEMA] # Queue type. Options: ps, SGE, LSF, SLURM, PBS, eceaccess TYPE = lsf diff --git a/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf b/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf index 58f8dadfd..34febdbf5 100644 --- a/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf +++ b/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf @@ -18,4 +18,13 @@ RUNNING = chunk WALLCLOCK = 00:10 TASKS = 24 PROCESSORS = 48 +DEPENDENCIES = REMOTE_PARALLEL_SETUP-1 + +[GEO_REMOTE] +FILE = TEST_NOLEAP.sh +RUNNING = chunk +WALLCLOCK = 00:10 +TASKS = 24 +PROCESSORS = 24:72:1:1 +MEMORY_PER_TASK = 2000 DEPENDENCIES = REMOTE_PARALLEL_SETUP-1 \ No newline at end of file diff --git a/test/regression/test_ecmwf_with_paramiko_python/conf/jobs.conf b/test/regression/test_ecmwf_with_paramiko_python/conf/jobs.conf deleted file mode 100644 index 5859cc76e..000000000 --- a/test/regression/test_ecmwf_with_paramiko_python/conf/jobs.conf +++ /dev/null @@ -1,13 +0,0 @@ -[LOCAL_SETUP] -FILE = TEST_NOLEAP.py -PLATFORM = LOCAL -RUNNING = chunk -WALLCLOCK = 00:10 -DEPENDENCIES = LOCAL_SETUP-1 - -[REMOTE_SETUP] -FILE = TEST_NOLEAP.py -RUNNING = chunk -WALLCLOCK = 00:10 -TASKS = 24 -DEPENDENCIES = REMOTE_SETUP-1 diff --git a/test/regression/test_ecmwf_with_paramiko_python/src/TEST_NOLEAP.py b/test/regression/test_ecmwf_with_paramiko_python/src/TEST_NOLEAP.py deleted file mode 100644 index 6428017b6..000000000 --- a/test/regression/test_ecmwf_with_paramiko_python/src/TEST_NOLEAP.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -print "%Chunk_END_DATE%" -print "%CHUNK%" -print "%PREV%" -print "%NUMMEMBERS%" diff --git a/test/regression/test_ecmwf_with_saga_python/conf/jobs.conf b/test/regression/test_ecmwf_with_saga_python/conf/jobs.conf deleted file mode 100644 index 5859cc76e..000000000 --- a/test/regression/test_ecmwf_with_saga_python/conf/jobs.conf +++ /dev/null @@ -1,13 +0,0 @@ -[LOCAL_SETUP] -FILE = TEST_NOLEAP.py -PLATFORM = LOCAL -RUNNING = chunk -WALLCLOCK = 00:10 -DEPENDENCIES = LOCAL_SETUP-1 - -[REMOTE_SETUP] -FILE = TEST_NOLEAP.py -RUNNING = chunk -WALLCLOCK = 00:10 -TASKS = 24 -DEPENDENCIES = REMOTE_SETUP-1 diff --git a/test/regression/test_ecmwf_with_saga_python/src/TEST_NOLEAP.py b/test/regression/test_ecmwf_with_saga_python/src/TEST_NOLEAP.py deleted file mode 100644 index 6428017b6..000000000 --- a/test/regression/test_ecmwf_with_saga_python/src/TEST_NOLEAP.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -print "%Chunk_END_DATE%" -print "%CHUNK%" -print "%PREV%" -print "%NUMMEMBERS%" diff --git a/test/regression/test_ecmwf_with_paramiko_python/conf/autosubmit.conf b/test/regression/test_mistral_with_paramiko/conf/autosubmit.conf similarity index 100% rename from test/regression/test_ecmwf_with_paramiko_python/conf/autosubmit.conf rename to test/regression/test_mistral_with_paramiko/conf/autosubmit.conf diff --git a/test/regression/test_ecmwf_with_paramiko_python/conf/expdef.conf b/test/regression/test_mistral_with_paramiko/conf/expdef.conf similarity index 98% rename from test/regression/test_ecmwf_with_paramiko_python/conf/expdef.conf rename to test/regression/test_mistral_with_paramiko/conf/expdef.conf index 32c7e6235..c05362da1 100644 --- a/test/regression/test_ecmwf_with_paramiko_python/conf/expdef.conf +++ b/test/regression/test_mistral_with_paramiko/conf/expdef.conf @@ -4,7 +4,7 @@ EXPID = EXPID-HERE # HPC name. # No need to change -HPCARCH = ecmwf-cca +HPCARCH = mistral [experiment] # Supply the list of start dates. Available formats: YYYYMMDD YYYYMMDDhh YYYYMMDDhhmm @@ -70,4 +70,4 @@ PROJECT_PATH = PROJECT-PATH-HERE FILE_PROJECT_CONF = # Where is JOBS CONFIGURATION file location relative to project root path FILE_JOBS_CONF = -JOB_SCRIPTS_TYPE = python +JOB_SCRIPTS_TYPE = bash diff --git a/test/regression/test_mistral_with_paramiko/conf/jobs.conf b/test/regression/test_mistral_with_paramiko/conf/jobs.conf new file mode 100644 index 000000000..e66d93ec6 --- /dev/null +++ b/test/regression/test_mistral_with_paramiko/conf/jobs.conf @@ -0,0 +1,19 @@ +[LOCAL_SETUP] +FILE = TEST_NOLEAP.sh +PLATFORM = LOCAL +RUNNING = chunk +WALLCLOCK = 00:10 +DEPENDENCIES = LOCAL_SETUP-1 + +[REMOTE_SETUP] +FILE = TEST_NOLEAP.sh +RUNNING = chunk +WALLCLOCK = 00:10 +DEPENDENCIES = REMOTE_SETUP-1 + +[REMOTE_PARALLEL_SETUP] +FILE = TEST_NOLEAP.sh +RUNNING = chunk +WALLCLOCK = 00:10 +PROCESSORS = 24 +DEPENDENCIES = REMOTE_PARALLEL_SETUP-1 \ No newline at end of file diff --git a/test/regression/test_ecmwf_with_paramiko_python/conf/proj.conf b/test/regression/test_mistral_with_paramiko/conf/proj.conf similarity index 100% rename from test/regression/test_ecmwf_with_paramiko_python/conf/proj.conf rename to test/regression/test_mistral_with_paramiko/conf/proj.conf diff --git a/test/regression/test_mistral_with_paramiko/src/TEST_NOLEAP.sh b/test/regression/test_mistral_with_paramiko/src/TEST_NOLEAP.sh new file mode 100644 index 000000000..e9a10ba64 --- /dev/null +++ b/test/regression/test_mistral_with_paramiko/src/TEST_NOLEAP.sh @@ -0,0 +1,6 @@ +#!/bin/sh +set -xvue +TEST=%Chunk_END_DATE% +TEST2=%CHUNK% +TEST3=%PREV% +TEST4=%NUMMEMBERS% diff --git a/test/regression/test_ecmwf_with_saga_python/conf/autosubmit.conf b/test/regression/test_mistral_with_saga/conf/autosubmit.conf similarity index 100% rename from test/regression/test_ecmwf_with_saga_python/conf/autosubmit.conf rename to test/regression/test_mistral_with_saga/conf/autosubmit.conf diff --git a/test/regression/test_ecmwf_with_saga_python/conf/expdef.conf b/test/regression/test_mistral_with_saga/conf/expdef.conf similarity index 98% rename from test/regression/test_ecmwf_with_saga_python/conf/expdef.conf rename to test/regression/test_mistral_with_saga/conf/expdef.conf index 32c7e6235..c8a943b25 100644 --- a/test/regression/test_ecmwf_with_saga_python/conf/expdef.conf +++ b/test/regression/test_mistral_with_saga/conf/expdef.conf @@ -4,7 +4,7 @@ EXPID = EXPID-HERE # HPC name. # No need to change -HPCARCH = ecmwf-cca +HPCARCH = moore [experiment] # Supply the list of start dates. Available formats: YYYYMMDD YYYYMMDDhh YYYYMMDDhhmm @@ -70,4 +70,4 @@ PROJECT_PATH = PROJECT-PATH-HERE FILE_PROJECT_CONF = # Where is JOBS CONFIGURATION file location relative to project root path FILE_JOBS_CONF = -JOB_SCRIPTS_TYPE = python +JOB_SCRIPTS_TYPE = bash diff --git a/test/regression/test_mistral_with_saga/conf/jobs.conf b/test/regression/test_mistral_with_saga/conf/jobs.conf new file mode 100644 index 000000000..e66d93ec6 --- /dev/null +++ b/test/regression/test_mistral_with_saga/conf/jobs.conf @@ -0,0 +1,19 @@ +[LOCAL_SETUP] +FILE = TEST_NOLEAP.sh +PLATFORM = LOCAL +RUNNING = chunk +WALLCLOCK = 00:10 +DEPENDENCIES = LOCAL_SETUP-1 + +[REMOTE_SETUP] +FILE = TEST_NOLEAP.sh +RUNNING = chunk +WALLCLOCK = 00:10 +DEPENDENCIES = REMOTE_SETUP-1 + +[REMOTE_PARALLEL_SETUP] +FILE = TEST_NOLEAP.sh +RUNNING = chunk +WALLCLOCK = 00:10 +PROCESSORS = 24 +DEPENDENCIES = REMOTE_PARALLEL_SETUP-1 \ No newline at end of file diff --git a/test/regression/test_ecmwf_with_saga_python/conf/proj.conf b/test/regression/test_mistral_with_saga/conf/proj.conf similarity index 100% rename from test/regression/test_ecmwf_with_saga_python/conf/proj.conf rename to test/regression/test_mistral_with_saga/conf/proj.conf diff --git a/test/regression/test_mistral_with_saga/src/TEST_NOLEAP.sh b/test/regression/test_mistral_with_saga/src/TEST_NOLEAP.sh new file mode 100644 index 000000000..e9a10ba64 --- /dev/null +++ b/test/regression/test_mistral_with_saga/src/TEST_NOLEAP.sh @@ -0,0 +1,6 @@ +#!/bin/sh +set -xvue +TEST=%Chunk_END_DATE% +TEST2=%CHUNK% +TEST3=%PREV% +TEST4=%NUMMEMBERS% -- GitLab From 8d0fdeb1c1b7b01c8dd77f484332502ad7e01da4 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Thu, 22 Dec 2016 18:14:37 +0100 Subject: [PATCH 08/60] A typo was fixed --- autosubmit/config/files/platforms.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autosubmit/config/files/platforms.conf b/autosubmit/config/files/platforms.conf index 5194bb0ee..da68dac3d 100644 --- a/autosubmit/config/files/platforms.conf +++ b/autosubmit/config/files/platforms.conf @@ -27,7 +27,7 @@ ## If given, Autosubmit will add jobs to the given queue. Required for some platforms. # QUEUE = ## Optional. If given, Autosubmit will submit the serial jobs with the exclusivity directive. -# QUEUE = +# EXCLUSIVITY = ## Optional. If specified, autosubmit will run jobs with only one processor in the specified platform. # SERIAL_PLATFORM = SERIAL_PLATFORM_NAME ## Optional. If specified, autosubmit will run jobs with only one processor in the specified queue. -- GitLab From cbc9d317a0de04a3b6c039433b443b2b892ba566 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Fri, 23 Dec 2016 08:59:24 +0100 Subject: [PATCH 09/60] Queue directive was refactored --- autosubmit/config/files/platforms.conf | 4 ---- autosubmit/job/job.py | 2 -- autosubmit/platforms/ecplatform.py | 4 ++-- autosubmit/platforms/locplatform.py | 2 +- autosubmit/platforms/lsfplatform.py | 2 +- autosubmit/platforms/paramiko_platform.py | 2 -- autosubmit/platforms/paramiko_submitter.py | 2 -- autosubmit/platforms/pbsplatform.py | 6 +++--- autosubmit/platforms/platform.py | 2 -- autosubmit/platforms/psplatform.py | 2 +- autosubmit/platforms/sgeplatform.py | 2 +- autosubmit/platforms/slurmplatform.py | 25 ++++------------------ 12 files changed, 13 insertions(+), 42 deletions(-) diff --git a/autosubmit/config/files/platforms.conf b/autosubmit/config/files/platforms.conf index da68dac3d..b03cb2ce0 100644 --- a/autosubmit/config/files/platforms.conf +++ b/autosubmit/config/files/platforms.conf @@ -16,10 +16,6 @@ # ADD_PROJECT_TO_HOST = False ## User for the machine scheduler. Required # USER = -## Account for the machine scheduler. Optional -# ACCOUNT = -## Partition for the machine scheduler. Optional -# PARTITION = ## Path to the scratch directory for the machine. Required. # SCRATCH_DIR = /scratch ## If true, Autosubmit test command can use this queue as a main queue. Defaults to False diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 4abc34d71..eeb75b71f 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -578,8 +578,6 @@ class Job(object): parameters['CURRENT_HOST'] = job_platform.host parameters['CURRENT_QUEUE'] = self.queue parameters['CURRENT_USER'] = job_platform.user - parameters['CURRENT_ACCOUNT'] = job_platform.account - parameters['CURRENT_PARTITION'] = job_platform.partition parameters['CURRENT_PROJ'] = job_platform.project parameters['CURRENT_BUDG'] = job_platform.budget parameters['CURRENT_RESERVATION'] = job_platform.reservation diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py index 5dd8ca472..47875e142 100644 --- a/autosubmit/platforms/ecplatform.py +++ b/autosubmit/platforms/ecplatform.py @@ -173,7 +173,7 @@ class EcHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str @@ -232,7 +232,7 @@ class EcCcaHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/locplatform.py b/autosubmit/platforms/locplatform.py index b4bca2412..6feda7827 100644 --- a/autosubmit/platforms/locplatform.py +++ b/autosubmit/platforms/locplatform.py @@ -160,7 +160,7 @@ class LocalHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/lsfplatform.py b/autosubmit/platforms/lsfplatform.py index a3751cd92..9dec0f5f3 100644 --- a/autosubmit/platforms/lsfplatform.py +++ b/autosubmit/platforms/lsfplatform.py @@ -94,7 +94,7 @@ class LsfHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index ad6267efd..c34a2fda6 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -399,8 +399,6 @@ class ParamikoPlatform(Platform): header = header.replace('%EXCLUSIVITY_DIRECTIVE%', self.header.get_exclusivity(job)) if hasattr(self.header, 'get_account_directive'): header = header.replace('%ACCOUNT_DIRECTIVE%', self.header.get_account_directive(job)) - if hasattr(self.header, 'get_partition_directive'): - header = header.replace('%PARTITION_DIRECTIVE%', self.header.get_partition_directive(job)) if hasattr(self.header, 'get_memory_directive'): header = header.replace('%MEMORY_DIRECTIVE%', self.header.get_memory_directive(job)) if hasattr(self.header, 'get_memory_per_task_directive'): diff --git a/autosubmit/platforms/paramiko_submitter.py b/autosubmit/platforms/paramiko_submitter.py index 0e7a37594..c1dd37312 100644 --- a/autosubmit/platforms/paramiko_submitter.py +++ b/autosubmit/platforms/paramiko_submitter.py @@ -120,8 +120,6 @@ class ParamikoSubmitter(Submitter): remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') remote_platform.exclusivity = AutosubmitConfig.get_option(parser, section, 'EXCLUSIVITY', '').lower() remote_platform.user = AutosubmitConfig.get_option(parser, section, 'USER', None) - remote_platform.account = AutosubmitConfig.get_option(parser, section, 'ACCOUNT', '') - remote_platform.partition = AutosubmitConfig.get_option(parser, section, 'PARTITION', '') remote_platform.scratch = AutosubmitConfig.get_option(parser, section, 'SCRATCH_DIR', None) remote_platform._default_queue = AutosubmitConfig.get_option(parser, section, 'QUEUE', None) remote_platform._serial_queue = AutosubmitConfig.get_option(parser, section, 'SERIAL_QUEUE', None) diff --git a/autosubmit/platforms/pbsplatform.py b/autosubmit/platforms/pbsplatform.py index d94d3d5aa..a515b512f 100644 --- a/autosubmit/platforms/pbsplatform.py +++ b/autosubmit/platforms/pbsplatform.py @@ -108,7 +108,7 @@ class Pbs12Header: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str @@ -151,7 +151,7 @@ class Pbs10Header: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str @@ -195,7 +195,7 @@ class Pbs11Header: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 8b95ad4c2..ca23d2cb2 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -29,8 +29,6 @@ class Platform(object): self.scratch_free_space = None self.host = '' self.user = '' - self.account = '' - self.partition = '' self.project = '' self.budget = '' self.reservation = '' diff --git a/autosubmit/platforms/psplatform.py b/autosubmit/platforms/psplatform.py index 8ba0f1b29..1771c0ef0 100644 --- a/autosubmit/platforms/psplatform.py +++ b/autosubmit/platforms/psplatform.py @@ -88,7 +88,7 @@ class PsHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/sgeplatform.py b/autosubmit/platforms/sgeplatform.py index 73aa130b8..b7b1aeb8d 100644 --- a/autosubmit/platforms/sgeplatform.py +++ b/autosubmit/platforms/sgeplatform.py @@ -93,7 +93,7 @@ class SgeHeader: """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index 4d3d2a2bf..048796867 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -37,7 +37,7 @@ class SlurmPlatform(ParamikoPlatform): """ Returns queue directive for the specified job - :param job: job to create queue directibve for + :param job: job to create queue directive for :type job: Job :return: queue directive :rtype: str @@ -114,7 +114,7 @@ class SlurmHeader: if job.parameters['CURRENT_QUEUE'] == '': return "" else: - return "SBATCH --qos {0}".format(job.parameters['CURRENT_QUEUE']) + return "SBATCH -p {0}".format(job.parameters['CURRENT_QUEUE']) # noinspection PyMethodMayBeStatic,PyUnusedLocal def get_account_directive(self, job): @@ -127,23 +127,8 @@ class SlurmHeader: :rtype: str """ # There is no account, so directive is empty - if job.parameters['CURRENT_ACCOUNT'] != '': - return "SBATCH -A {0}".format(job.parameters['CURRENT_ACCOUNT']) - return "" - - # noinspection PyMethodMayBeStatic,PyUnusedLocal - def get_partition_directive(self, job): - """ - Returns partition directive for the specified job - - :param job: job to create partition directive for - :type job: Job - :return: partition directive - :rtype: str - """ - # There is no partition, so directive is empty - if job.parameters['CURRENT_PARTITION'] != '': - return "SBATCH -p {0}".format(job.parameters['CURRENT_PARTITION']) + if job.parameters['CURRENT_PROJ'] != '': + return "SBATCH -A {0}".format(job.parameters['CURRENT_PROJ']) return "" # noinspection PyMethodMayBeStatic,PyUnusedLocal @@ -183,7 +168,6 @@ class SlurmHeader: # #%QUEUE_DIRECTIVE% #%ACCOUNT_DIRECTIVE% - #%PARTITION_DIRECTIVE% #%MEMORY_DIRECTIVE% #%MEMORY_PER_TASK_DIRECTIVE% #SBATCH -n %NUMPROC% @@ -202,7 +186,6 @@ class SlurmHeader: # #%QUEUE_DIRECTIVE% #%ACCOUNT_DIRECTIVE% - #%PARTITION_DIRECTIVE% #%MEMORY_DIRECTIVE% #%MEMORY_PER_TASK_DIRECTIVE% #SBATCH -n %NUMPROC% -- GitLab From f20524f823a81722886ffb1af217f02157f04d9c Mon Sep 17 00:00:00 2001 From: jlope2 Date: Fri, 23 Dec 2016 10:59:24 +0100 Subject: [PATCH 10/60] Some changes in regression tests were done --- test/regression/README | 10 ++++ .../test_ecmwf_with_paramiko/conf/jobs.conf | 1 + test/regression/tests.conf | 47 ++++++++++--------- 3 files changed, 37 insertions(+), 21 deletions(-) diff --git a/test/regression/README b/test/regression/README index a7e26a023..62059634b 100644 --- a/test/regression/README +++ b/test/regression/README @@ -69,3 +69,13 @@ python tests_runner.py --only "test1 test2 test3" As you can see on the example above, the list should be passed between quotes (") and the items should be separated by a whitespace. Just to remind, the name of the tests should be the same as the section on the 'tests.conf' file. + + +##################### +How to add a new test +##################### + +1) Create a folder with the test files (configuration and sources) following the same structure +as the other test folders. + +2) Add the new test configuration in the `tests.conf` file following the same INI style. diff --git a/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf b/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf index 34febdbf5..a3154b323 100644 --- a/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf +++ b/test/regression/test_ecmwf_with_paramiko/conf/jobs.conf @@ -17,6 +17,7 @@ FILE = TEST_NOLEAP.sh RUNNING = chunk WALLCLOCK = 00:10 TASKS = 24 +PLATFORM = ecmwf-cca-hyperthreading PROCESSORS = 48 DEPENDENCIES = REMOTE_PARALLEL_SETUP-1 diff --git a/test/regression/tests.conf b/test/regression/tests.conf index 1ec683308..abc69541b 100644 --- a/test/regression/tests.conf +++ b/test/regression/tests.conf @@ -1,6 +1,6 @@ -########################## -## Tests on Marenostrum ## -########################## +#Tests on Marenostrum +###################### + [test_mn_with_paramiko] HPCARCH = marenostrum3 DESCRIPTION = "Simple experiment on MN3 with paramiko" @@ -22,9 +22,10 @@ DESCRIPTION = "Simple experiment on MN3 with saga and python" SRC_PATH = test_mn_with_saga_python -########################## -##### Tests on ECMWF ##### -########################## + +#Tests on ECMWF +################ + [test_ecmwf_with_paramiko] HPCARCH = ecmwf-cca DESCRIPTION = "Simple experiment on ECMWF with paramiko" @@ -35,21 +36,11 @@ HPCARCH = ecmwf-cca DESCRIPTION = "Simple experiment on ECMWF with saga" SRC_PATH = test_ecmwf_with_saga -[test_ecmwf_with_paramiko_python] -HPCARCH = ecmwf-cca -DESCRIPTION = "Simple experiment on ECMWF with paramiko and python" -SRC_PATH = test_ecmwf_with_paramiko_python - -[test_ecmwf_with_saga_python] -HPCARCH = ecmwf-cca -DESCRIPTION = "Simple experiment on ECMWF with saga and python" -SRC_PATH = test_ecmwf_with_saga_python +#Tests on moore +################ -########################## -##### Tests on moore ##### -########################## [test_moore_with_paramiko] HPCARCH = moore DESCRIPTION = "Simple experiment on moore with paramiko" @@ -82,9 +73,9 @@ SRC_PATH = test_large_experiment_on_moore_with_saga -########################## -##### Tests on SEDEMA #### -########################## +#Tests on SEDEMA +################# + [test_sedema_with_paramiko] HPCARCH = SEDEMA DESCRIPTION = "Simple experiment on SEDEMA with paramiko" @@ -105,3 +96,17 @@ HPCARCH = SEDEMA DESCRIPTION = "Simple experiment on SEDEMA with saga and python" SRC_PATH = test_sedema_with_saga_python + + +#Tests on mistral +################## + +[test_mistral_with_paramiko] +HPCARCH = mistral +DESCRIPTION = "Simple experiment on mistral with paramiko" +SRC_PATH = test_mistral_with_paramiko + +[test_mistral_with_saga] +HPCARCH = mistral +DESCRIPTION = "Simple experiment on mistral with saga" +SRC_PATH = test_mistral_with_saga \ No newline at end of file -- GitLab From 6e5c98adb96f0d03ec1ff40c1d0e81f0baa480be Mon Sep 17 00:00:00 2001 From: jlope2 Date: Fri, 23 Dec 2016 11:01:46 +0100 Subject: [PATCH 11/60] A minor was fixed --- autosubmit/platforms/slurmplatform.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py index 048796867..7db745702 100644 --- a/autosubmit/platforms/slurmplatform.py +++ b/autosubmit/platforms/slurmplatform.py @@ -173,8 +173,8 @@ class SlurmHeader: #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% - #SBATCH -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.out - #SBATCH -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.err + #SBATCH -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%OUT_LOG_DIRECTIVE% + #SBATCH -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%ERR_LOG_DIRECTIVE% # ############################################################################### """) @@ -191,8 +191,8 @@ class SlurmHeader: #SBATCH -n %NUMPROC% #SBATCH -t %WALLCLOCK%:00 #SBATCH -J %JOBNAME% - #SBATCH -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.out - #SBATCH -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.err + #SBATCH -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%OUT_LOG_DIRECTIVE% + #SBATCH -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%ERR_LOG_DIRECTIVE% # ############################################################################### """) \ No newline at end of file -- GitLab From 1a7fb8d8a15b50460edf669aa92f1f23ba54e502 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Wed, 4 Jan 2017 15:28:28 +0100 Subject: [PATCH 12/60] Minor bug with SAGA platforms was fixed --- autosubmit/platforms/saga_platform.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/autosubmit/platforms/saga_platform.py b/autosubmit/platforms/saga_platform.py index eeec8f6ec..0975a22a1 100644 --- a/autosubmit/platforms/saga_platform.py +++ b/autosubmit/platforms/saga_platform.py @@ -46,7 +46,8 @@ class SagaPlatform(Platform): raise Exception("Could't send file {0} to {1}:{2}".format(os.path.join(self.tmp_path, filename), self.host, self.get_files_path())) # noinspection PyTypeChecker - out = saga.filesystem.File("file://{0}".format(os.path.join(self.tmp_path, filename))) + out = saga.filesystem.File("file://{0}".format(os.path.join(self.tmp_path, filename)), + session=self.service.session) if self.type == 'local': out.copy("file://{0}".format(os.path.join(self.tmp_path, 'LOG_' + self.expid, filename)), saga.filesystem.CREATE_PARENTS) @@ -144,7 +145,8 @@ class SagaPlatform(Platform): 'LOG_' + self.expid))) else: # noinspection PyTypeChecker - self.directory = saga.filesystem.Directory("sftp://{0}{1}".format(self.host, self.get_files_path())) + self.directory = saga.filesystem.Directory("sftp://{0}{1}".format(self.host, self.get_files_path()), + session=self.session) except: return False @@ -184,7 +186,8 @@ class SagaPlatform(Platform): else: # noinspection PyTypeChecker out = saga.filesystem.File("sftp://{0}{1}".format(self.host, os.path.join(self.get_files_path(), - filename))) + filename)), + session=self.service.session) out.remove() out.close() return True -- GitLab From 069bfb29a0e4d33d3b18920b63075f413401faa2 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Fri, 13 Jan 2017 18:14:50 +0100 Subject: [PATCH 13/60] Paramiko send file action was refactored in order to send with same permissions --- autosubmit/platforms/paramiko_platform.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index c34a2fda6..95d2b84cc 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -91,6 +91,8 @@ class ParamikoPlatform(Platform): try: ftp = self._ssh.open_sftp() ftp.put(os.path.join(self.tmp_path, filename), os.path.join(self.get_files_path(), filename)) + ftp.chmod(os.path.join(self.get_files_path(), filename), + os.stat(os.path.join(self.tmp_path, filename)).st_mode) ftp.close() return True except BaseException as e: -- GitLab From 22bc1eb016fe2ba3ae4ab91ea55a8f53a46ac134 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Fri, 13 Jan 2017 18:15:45 +0100 Subject: [PATCH 14/60] First version of the threads-based Jobs Wrapper was uploaded --- autosubmit/job/job_list.py | 5 +-- autosubmit/job/job_package.py | 60 +++++++++++++++++++++++++++++ autosubmit/platforms/lsfplatform.py | 50 ++++++++++++++++++++++++ 3 files changed, 112 insertions(+), 3 deletions(-) diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 26a8d2a8a..d58daaf3b 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -32,8 +32,7 @@ from shutil import move from autosubmit.job.job_common import Status, Type from autosubmit.job.job import Job -from autosubmit.job.job_package import JobPackageSimple -from autosubmit.job.job_package import JobPackageArray +from autosubmit.job.job_package import JobPackageSimple, JobPackageArray, JobPackageThread from autosubmit.config.log import Log from autosubmit.date.chunk_date_lib import date2str, parse_date @@ -775,7 +774,7 @@ class JobList: packages_to_submit = list() if platform.allow_arrays: for section_list in jobs_to_submit_by_section.values(): - packages_to_submit.append(JobPackageArray(section_list)) + packages_to_submit.append(JobPackageThread(section_list)) return packages_to_submit for job in jobs_to_submit: packages_to_submit.append(JobPackageSimple([job])) diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index d177e44ac..503032eae 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -177,3 +177,63 @@ class JobPackageArray(JobPackageBase): self.jobs[i - 1].id = str(package_id) + '[{0}]'.format(i) self.jobs[i - 1].status = Status.SUBMITTED self.jobs[i - 1].write_submit_time() + + +class JobPackageThread(JobPackageBase): + """ + Class to manage the package of jobs to be submitted by autosubmit + """ + FILE_PREFIX = 'ASThread' + + def __init__(self, jobs): + self._job_scripts = {} + self._common_script = None + self._wallclock = '00:00' + self._num_processors = '0' + for job in jobs: + if job.wallclock > self._wallclock: + self._wallclock = job.wallclock + self._num_processors = str(int(self._num_processors) + int(job.processors)) + super(JobPackageThread, self).__init__(jobs) + + def _create_scripts(self, configuration): + timestamp = str(int(time.time())) + filename = self.FILE_PREFIX + "_{0}_{1}_{2}".format(timestamp, self._num_processors, len(self.jobs)) + for i in range(1, len(self.jobs) + 1): + self._job_scripts[self.jobs[i - 1].name] = self.jobs[i - 1].create_script(configuration) + self.jobs[i - 1].remote_logs = ( + self._job_scripts[self.jobs[i - 1].name] + ".{0}.out".format(i - 1), + self._job_scripts[self.jobs[i - 1].name] + ".{0}.err".format(i - 1) + ) + self._common_script = self._create_common_script(filename) + + def _create_common_script(self, filename): + + script_content = self.platform.header.thread_header(filename, self._wallclock, + self._num_processors, len(self.jobs), + self._job_scripts.values()) + filename += '.cmd' + open(os.path.join(self._tmp_path, filename), 'w').write(script_content) + os.chmod(os.path.join(self._tmp_path, filename), 0o775) + return filename + + def _send_files(self): + for job in self.jobs: + self.platform.send_file(self._job_scripts[job.name]) + self.platform.send_file(self._common_script) + + def _do_submission(self): + for job in self.jobs: + self.platform.remove_stat_file(job.name) + self.platform.remove_completed_file(job.name) + + package_id = self.platform.submit_job(None, self._common_script) + + if package_id is None: + raise Exception('Submission failed') + + for i in range(1, len(self.jobs) + 1): + Log.info("{0} submitted", self.jobs[i - 1].name) + self.jobs[i - 1].id = str(package_id) + self.jobs[i - 1].status = Status.SUBMITTED + self.jobs[i - 1].write_submit_time() diff --git a/autosubmit/platforms/lsfplatform.py b/autosubmit/platforms/lsfplatform.py index 9dec0f5f3..cda8819f9 100644 --- a/autosubmit/platforms/lsfplatform.py +++ b/autosubmit/platforms/lsfplatform.py @@ -146,6 +146,56 @@ class LsfHeader: ./$SCRIPT """.format(filename, array_id, wallclock, num_processors)) + @classmethod + def thread_header(cls, filename, wallclock, num_processors, num_jobs, job_scripts): + return textwrap.dedent("""\ + #!/usr/bin/env python + ############################################################################### + # {0} + ############################################################################### + # + #BSUB -J {0} + #BSUB -o {0}.out + #BSUB -e {0}.err + #BSUB -W {1} + #BSUB -n {2} + # + ############################################################################### + + import os + import sys + from threading import Thread + from commands import getstatusoutput + + class JobThread(Thread): + def __init__ (self, template, id_run): + Thread.__init__(self) + self.template = template + self.id_run = id_run + + def run(self): + out = str(self.template) + "." + str(self.id_run) + ".out" + err = str(self.template) + "." + str(self.id_run) + ".err" + command = str(self.template) + " " + str(self.id_run) + " " + os.getcwd() + (self.status) = getstatusoutput(command + " > " + out + " 2> " + err) + + # Splitting the original hosts file + os.system("cat {5} | split -a 2 -d -l {3} - mlist-{6}-") + + pid_list = [] + scripts = {4} + + for i in range(len(scripts)): + current = JobThread(scripts[i], i) + pid_list.append(current) + current.start() + + for pid in pid_list: + pid.join() + print "Status from ", pid.template,"is", pid.status + """.format(filename, wallclock, num_processors, (int(num_processors) / num_jobs), str(job_scripts), + "${LSB_DJOB_HOSTFILE}", "${LSB_JOBID}")) + SERIAL = textwrap.dedent("""\ ############################################################################### # %TASKTYPE% %EXPID% EXPERIMENT -- GitLab From 0b3f81dbb732da2a6103360f626f8954676c1d14 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Mon, 16 Jan 2017 11:07:17 +0100 Subject: [PATCH 15/60] A typo was fixed --- autosubmit/job/job_package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 503032eae..86aa38626 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -193,7 +193,7 @@ class JobPackageThread(JobPackageBase): for job in jobs: if job.wallclock > self._wallclock: self._wallclock = job.wallclock - self._num_processors = str(int(self._num_processors) + int(job.processors)) + self._num_processors = str(int(self._num_processors) + int(job.processors)) super(JobPackageThread, self).__init__(jobs) def _create_scripts(self, configuration): -- GitLab From 89333caaa0a1e86b7375a9991092ba9f15f71472 Mon Sep 17 00:00:00 2001 From: jlope2 Date: Tue, 17 Jan 2017 16:28:19 +0100 Subject: [PATCH 16/60] Minor was fixed in get_file action for Paramiko platforms --- autosubmit/platforms/paramiko_platform.py | 3 +++ autosubmit/platforms/platform.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index 95d2b84cc..2aee83e0f 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -128,6 +128,9 @@ class ParamikoPlatform(Platform): ftp.close() return True except BaseException: + # ftp.get creates a local file anyway + if os.path.exists(local_path): + os.remove(local_path) if must_exist: raise Exception('File {0} does not exists'.format(filename)) return False diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index ca23d2cb2..747d2db93 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -217,7 +217,7 @@ class Platform(object): :param job_name: name of job to check :type job_name: str - :return: True if succesful, False otherwise + :return: True if successful, False otherwise :rtype: bool """ filename = job_name + '_STAT' @@ -232,7 +232,7 @@ class Platform(object): :param job_name: name of job to check :type job_name: str - :return: True if succesful, False otherwise + :return: True if successful, False otherwise :rtype: bool """ filename = job_name + '_COMPLETED' -- GitLab From dba39d4ff8afe206eb4cd2b2e5519f5c67f9b02a Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Mon, 23 Jan 2017 18:02:24 +0100 Subject: [PATCH 17/60] Minor changes were done in check_script process --- autosubmit/job/job.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index eeb75b71f..635924782 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -699,9 +699,6 @@ class Job(object): if not out: Log.warning("The following set of variables to be substituted in template script is not part of " "parameters set: {0}", str(set(variables) - set(parameters))) - else: - self.create_script(as_conf) - return out def write_submit_time(self): -- GitLab From 5d7656a1d8f368829fe74ae3de8703b9b406e4c8 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Wed, 25 Jan 2017 11:55:53 +0100 Subject: [PATCH 18/60] A bug in CHUNKINI parsing process was fixed. Fixes #234 --- autosubmit/config/config_common.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 022ded75c..65d74c45c 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -653,14 +653,18 @@ class AutosubmitConfig: """ return int(self._exp_parser.get('experiment', 'NUMCHUNKS')) - def get_chunk_ini(self): + def get_chunk_ini(self, default=1): """ Returns the first chunk from where the experiment will start + :param default: :return: initial chunk :rtype: int """ - return int(self.get_option(self._exp_parser, 'experiment', 'CHUNKINI', 1)) + chunk_ini = self.get_option(self._exp_parser, 'experiment', 'CHUNKINI', default) + if chunk_ini == '': + return default + return int(chunk_ini) def get_chunk_size_unit(self): """ -- GitLab From 779c856f677fb94a7e9ffdc23c08497f1c7a18d5 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Wed, 25 Jan 2017 12:04:18 +0100 Subject: [PATCH 19/60] Some unit tests were fixed --- test/unit/test_job.py | 4 ---- test/unit/test_saga_platform.py | 6 ++++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/test/unit/test_job.py b/test/unit/test_job.py index 9757692c0..9bfe2bf8e 100644 --- a/test/unit/test_job.py +++ b/test/unit/test_job.py @@ -225,16 +225,12 @@ class TestJob(TestCase): config = Mock(spec=AutosubmitConfig) config.get_project_dir = Mock(return_value='/project/dir') - create_script_mock = Mock() - self.job.create_script = create_script_mock - # act checked = self.job.check_script(config, self.job.parameters) # assert update_parameters_mock.assert_called_with(config, self.job.parameters) update_content_mock.assert_called_with(config) - create_script_mock.assert_called_with(config) self.assertTrue(checked) def test_exists_completed_file_then_sets_status_to_completed(self): diff --git a/test/unit/test_saga_platform.py b/test/unit/test_saga_platform.py index 3d03a203b..2a576ad7c 100644 --- a/test/unit/test_saga_platform.py +++ b/test/unit/test_saga_platform.py @@ -26,6 +26,8 @@ class TestSagaPlatform(TestCase): def setUp(self): self.experiment_id = 'random-id' self.platform = SagaPlatform(self.experiment_id, 'test', FakeBasicConfig) + self.platform.service = Mock() + self.platform.service.session = Mock() def test_check_status_returns_completed_if_job_id_not_exists(self): # arrange @@ -153,8 +155,8 @@ class TestSagaPlatform(TestCase): self.assertTrue(deleted) sys.modules['saga'].filesystem.File.assert_called_once_with( - "sftp://{0}{1}".format(self.platform.host, os.path.join(self.platform.get_files_path(), - 'file/path'))) + "sftp://{0}{1}".format(self.platform.host, os.path.join(self.platform.get_files_path(),'file/path')), + session=self.platform.service.session) out_mock.remove.assert_called_once_with() out_mock.close.assert_called_once_with() -- GitLab From 9f36e584673ccb625eae8f676dece5d18eb27029 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Thu, 26 Jan 2017 17:28:19 +0100 Subject: [PATCH 20/60] First implementation of the es/auto-ecearth/#67 was developed --- autosubmit/autosubmit.py | 70 +++++++++++++++------------- autosubmit/config/config_common.py | 32 +++++++------ autosubmit/job/job.py | 15 +++--- autosubmit/job/job_exceptions.py | 29 ++++++++++++ autosubmit/job/job_list.py | 11 ++--- autosubmit/job/job_package.py | 5 ++ test/unit/test_autosubmit_ config.py | 5 +- test/unit/test_job.py | 10 ---- 8 files changed, 106 insertions(+), 71 deletions(-) create mode 100644 autosubmit/job/job_exceptions.py diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index d97121137..2d4c014ec 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -80,7 +80,7 @@ from notifications.mail_notifier import MailNotifier from notifications.notifier import Notifier from platforms.saga_submitter import SagaSubmitter from platforms.paramiko_submitter import ParamikoSubmitter - +from autosubmit.job.job_exceptions import WrongTemplateException # noinspection PyUnusedLocal def signal_handler(signal_received, frame): @@ -563,20 +563,20 @@ class Autosubmit: :rtype: bool """ if expid is None: - Log.critical("Missing expid.") + Log.critical("Missing experiment id") BasicConfig.read() exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid) tmp_path = os.path.join(exp_path, BasicConfig.LOCAL_TMP_DIR) if not os.path.exists(exp_path): - Log.critical("The directory %s is needed and does not exist." % exp_path) + Log.critical("The directory %s is needed and does not exist" % exp_path) Log.warning("Does an experiment with the given id exist?") return 1 # checking if there is a lock file to avoid multiple running on the same expid try: with portalocker.Lock(os.path.join(tmp_path, 'autosubmit.lock'), timeout=1): - Log.info("Preparing .lock file to avoid multiple instances with same expid.") + Log.info("Preparing .lock file to avoid multiple instances with same experiment id") Log.set_file(os.path.join(tmp_path, 'run.log')) os.system('clear') @@ -610,7 +610,7 @@ class Autosubmit: job_list = Autosubmit.load_job_list(expid, as_conf) Log.debug("Starting from job list restored from {0} files", pkl_dir) - Log.debug("Length of joblist: {0}", len(job_list)) + Log.debug("Length of the jobs list: {0}", len(job_list)) Autosubmit._load_parameters(as_conf, job_list, submitter.platforms) @@ -682,7 +682,7 @@ class Autosubmit: Log.info("No more jobs to run.") if len(job_list.get_failed()) > 0: - Log.info("Some jobs have failed and reached maximun retrials") + Log.info("Some jobs have failed and reached maximum retrials") return False else: Log.result("Run successful") @@ -709,9 +709,12 @@ class Autosubmit: try: job_package.submit(as_conf, job_list.parameters) save = True + except WrongTemplateException as e: + Log.error("Invalid parameter substitution in {0} template", e.job_name) + raise except Exception: Log.error("{0} submission failed", platform.name) - continue + raise return save @staticmethod @@ -948,11 +951,11 @@ class Autosubmit: def recovery(expid, noplot, save, all_jobs, hide): """ Method to check all active jobs. If COMPLETED file is found, job status will be changed to COMPLETED, - otherwise it will be set to WAITING. It will also update the joblist. + otherwise it will be set to WAITING. It will also update the jobs list. :param expid: identifier of the experiment to recover :type expid: str - :param save: If true, recovery saves changes to joblist + :param save: If true, recovery saves changes to the jobs list :type save: bool :param all_jobs: if True, it tries to get completed files for all jobs, not only active. :type all_jobs: bool @@ -1022,7 +1025,7 @@ class Autosubmit: Log.info("CHANGED job '{0}' status to WAITING".format(job.name)) end = datetime.datetime.now() Log.info("Time spent: '{0}'".format(end - start)) - Log.info("Updating joblist") + Log.info("Updating the jobs list") sys.setrecursionlimit(50000) job_list.update_list(as_conf) @@ -1034,31 +1037,34 @@ class Autosubmit: Log.result("Recovery finalized") if not noplot: - Log.info("\nPloting joblist...") + Log.info("\nPlotting the jobs list...") monitor_exp = Monitor() monitor_exp.generate_output(expid, job_list.get_job_list(), show=not hide) return True @staticmethod - def check(expid): + def check(experiment_id): """ Checks experiment configuration and warns about any detected error or inconsistency. - :param expid: experiment identifier: - :type expid: str + :param experiment_id: experiment identifier: + :type experiment_id: str """ BasicConfig.read() - exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid) + exp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id) if not os.path.exists(exp_path): - Log.critical("The directory %s is needed and does not exist." % exp_path) + Log.critical("The directory {0} is needed and does not exist.", exp_path) Log.warning("Does an experiment with the given id exist?") - return 1 + return False - Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, BasicConfig.LOCAL_TMP_DIR, 'check_exp.log')) - as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory()) + log_file = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id, BasicConfig.LOCAL_TMP_DIR, 'check_exp.log') + Log.set_file(log_file) + + as_conf = AutosubmitConfig(experiment_id, BasicConfig, ConfigParserFactory()) if not as_conf.check_conf_files(): return False + project_type = as_conf.get_project_type() if project_type != "none": if not as_conf.check_proj(): @@ -1069,17 +1075,16 @@ class Autosubmit: if len(submitter.platforms) == 0: return False - pkl_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, 'pkl') - job_list = Autosubmit.load_job_list(expid, as_conf) + pkl_dir = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id, 'pkl') + job_list = Autosubmit.load_job_list(experiment_id, as_conf) Log.debug("Job list restored from {0} files", pkl_dir) Autosubmit._load_parameters(as_conf, job_list, submitter.platforms) - hpcarch = as_conf.get_platform() + hpc_architecture = as_conf.get_platform() for job in job_list.get_job_list(): if job.platform_name is None: - job.platform_name = hpcarch - # noinspection PyTypeChecker + job.platform_name = hpc_architecture job.platform = submitter.platforms[job.platform_name.lower()] job.update_parameters(as_conf, job_list.parameters) @@ -1586,10 +1591,10 @@ class Autosubmit: :param expid: experiment identifier :type expid: str - :param noplot: if True, method omits final ploting of joblist. Only needed on large experiments when plotting - time can be much larger than creation time. + :param noplot: if True, method omits final plotting of the jobs list. Only needed on large experiments when + plotting time can be much larger than creation time. :type noplot: bool - :return: True if succesful, False if not + :return: True if successful, False if not :rtype: bool :param hide: hides plot window :type hide: bool @@ -1648,7 +1653,7 @@ class Autosubmit: return False rerun = as_conf.get_rerun() - Log.info("\nCreating joblist...") + Log.info("\nCreating the jobs list...") job_list = JobList(expid, BasicConfig, ConfigParserFactory(), Autosubmit._get_job_list_persistence(expid, as_conf)) @@ -1660,7 +1665,8 @@ class Autosubmit: date_format = 'H' if date.minute > 1: date_format = 'M' - job_list.generate(date_list, member_list, num_chunks, chunk_ini, parameters, date_format, as_conf.get_retrials(), + job_list.generate(date_list, member_list, num_chunks, chunk_ini, parameters, date_format, + as_conf.get_retrials(), as_conf.get_default_job_type()) if rerun == "true": chunk_list = Autosubmit._create_json(as_conf.get_chunk_list()) @@ -1668,14 +1674,14 @@ class Autosubmit: else: job_list.remove_rerun_only_jobs() - Log.info("\nSaving joblist...") + Log.info("\nSaving the jobs list...") job_list.save() if not noplot: - Log.info("\nPloting joblist...") + Log.info("\nPlotting the jobs list...") monitor_exp = Monitor() monitor_exp.generate_output(expid, job_list.get_job_list(), output, not hide) - Log.result("\nJob list created succesfully") + Log.result("\nJob list created successfully") Log.user_warning("Remember to MODIFY the MODEL config files!") return True diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 65d74c45c..7450aecb4 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -33,7 +33,7 @@ from autosubmit.config.log import Log from autosubmit.config.basicConfig import BasicConfig -class AutosubmitConfig: +class AutosubmitConfig(object): """ Class to handle experiment configuration coming from file or database @@ -48,19 +48,28 @@ class AutosubmitConfig: self.parser_factory = parser_factory + self._conf_parser = None self._conf_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "autosubmit_" + expid + ".conf") + self._exp_parser = None self._exp_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "expdef_" + expid + ".conf") + self._platforms_parser = None self._platforms_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "platforms_" + expid + ".conf") + self._jobs_parser = None self._jobs_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "jobs_" + expid + ".conf") + self._proj_parser = None self._proj_parser_file = os.path.join(self.basic_config.LOCAL_ROOT_DIR, expid, "conf", "proj_" + expid + ".conf") self.check_proj_file() + @property + def jobs_parser(self): + return self._jobs_parser + @property def experiment_file(self): """ @@ -133,7 +142,7 @@ class AutosubmitConfig: :return: wallclock time :rtype: str """ - return AutosubmitConfig.get_option(self.jobs_parser, section, 'WALLCLOCK', '') + return AutosubmitConfig.get_option(self._jobs_parser, section, 'WALLCLOCK', '') def get_processors(self, section): """ @@ -143,7 +152,7 @@ class AutosubmitConfig: :return: wallclock time :rtype: str """ - return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'PROCESSORS', 1)) + return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'PROCESSORS', 1)) def get_threads(self, section): """ @@ -153,7 +162,7 @@ class AutosubmitConfig: :return: threads needed :rtype: str """ - return int(AutosubmitConfig.get_option(self.jobs_parser, section, 'THREADS', 1)) + return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'THREADS', 1)) def get_tasks(self, section): """ @@ -163,7 +172,7 @@ class AutosubmitConfig: :return: tasks (processes) per host :rtype: int """ - return int(AutosubmitConfig.get_option(self.jobs_parser, section, 'TASKS', 0)) + return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'TASKS', 0)) def get_scratch_free_space(self, section): """ @@ -173,7 +182,7 @@ class AutosubmitConfig: :return: percentage of scratch free space needed :rtype: int """ - return int(AutosubmitConfig.get_option(self.jobs_parser, section, 'SCRATCH_FREE_SPACE', 0)) + return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'SCRATCH_FREE_SPACE', 0)) def get_memory(self, section): """ @@ -183,7 +192,7 @@ class AutosubmitConfig: :return: memory needed :rtype: str """ - return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'MEMORY', '')) + return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'MEMORY', '')) def get_memory_per_task(self, section): """ @@ -193,7 +202,7 @@ class AutosubmitConfig: :return: memory per task needed :rtype: str """ - return str(AutosubmitConfig.get_option(self.jobs_parser, section, 'MEMORY_PER_TASK', '')) + return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'MEMORY_PER_TASK', '')) def check_conf_files(self): """ @@ -267,9 +276,6 @@ class AutosubmitConfig: result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'PROJECT') result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'USER') - # if platform_type in ['pbs', 'ecaccess']: - # result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'VERSION') - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'HOST') result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'SCRATCH_DIR') result = result and AutosubmitConfig.check_is_boolean(self._platforms_parser, section, @@ -293,7 +299,7 @@ class AutosubmitConfig: :rtype: bool """ result = True - parser = self.jobs_parser + parser = self._jobs_parser sections = parser.sections() platforms = self._platforms_parser.sections() platforms.append('LOCAL') @@ -411,7 +417,7 @@ class AutosubmitConfig: """ self._conf_parser = AutosubmitConfig.get_parser(self.parser_factory, self._conf_parser_file) self._platforms_parser = AutosubmitConfig.get_parser(self.parser_factory, self._platforms_parser_file) - self.jobs_parser = AutosubmitConfig.get_parser(self.parser_factory, self._jobs_parser_file) + self._jobs_parser = AutosubmitConfig.get_parser(self.parser_factory, self._jobs_parser_file) self._exp_parser = AutosubmitConfig.get_parser(self.parser_factory, self._exp_parser_file) if self._proj_parser_file == '': self._proj_parser = None diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 635924782..18db854f2 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -47,6 +47,8 @@ class Job(object): :type priority: int """ + CHECK_ON_SUBMISSION = 'on_submission' + def __str__(self): return "{0} STATUS: {1}".format(self.name, self.status) @@ -85,7 +87,7 @@ class Job(object): self._tmp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid, BasicConfig.LOCAL_TMP_DIR) self.write_start = False self._platform = None - self.check = True + self.check = 'True' def __getstate__(self): odict = self.__dict__ @@ -669,11 +671,11 @@ class Job(object): flags=re.IGNORECASE) template_content = template_content.replace("%%", "%") - scriptname = self.name + '.cmd' - open(os.path.join(self._tmp_path, scriptname), 'w').write(template_content) - os.chmod(os.path.join(self._tmp_path, scriptname), 0o775) + script_name = self.name + '.cmd' + open(os.path.join(self._tmp_path, script_name), 'w').write(template_content) + os.chmod(os.path.join(self._tmp_path, script_name), 0o775) - return scriptname + return script_name def check_script(self, as_conf, parameters): """ @@ -686,9 +688,6 @@ class Job(object): :return: true if not problem has been detected, false otherwise :rtype: bool """ - if not self.check: - Log.info('Template {0} will not be checked'.format(self.section)) - return True parameters = self.update_parameters(as_conf, parameters) template_content = self.update_content(as_conf) diff --git a/autosubmit/job/job_exceptions.py b/autosubmit/job/job_exceptions.py new file mode 100644 index 000000000..7bd32c8f9 --- /dev/null +++ b/autosubmit/job/job_exceptions.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +# Copyright 2017 Earth Sciences Department, BSC-CNS + +# This file is part of Autosubmit. + +# Autosubmit is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# Autosubmit is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with Autosubmit. If not, see . + + +class WrongTemplateException(Exception): + """ + Class to alert when the template checking fails for a given job + """ + + def __init__(self, job_name): + super(WrongTemplateException, self).__init__() + self.job_name = job_name + diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index d58daaf3b..0a426aa8d 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -634,9 +634,12 @@ class JobList: for job in self._job_list: if job.section in sections_checked: continue + if job.check.lower() != 'true': + Log.warning('Template {0} will not be checked'.format(job.section)) + continue if not job.check_script(as_conf, self.parameters): out = False - Log.warning("Invalid parameter substitution in {0} template!!!", job.section) + Log.warning("Invalid parameter substitution in {0} template", job.section) sections_checked.add(job.section) if out: Log.result("Scripts OK") @@ -1066,11 +1069,7 @@ class DicJobs: job.platform_name = job.platform_name job.file = self.get_option(section, "FILE", None) job.queue = self.get_option(section, "QUEUE", None) - if self.get_option(section, "CHECK", 'True').lower() == 'true': - job.check = True - else: - job.check = False - + job.check = self.get_option(section, "CHECK", 'True').lower() job.processors = str(self.get_option(section, "PROCESSORS", 1)) job.threads = self.get_option(section, "THREADS", '') job.tasks = self.get_option(section, "TASKS", '') diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 86aa38626..9b282e3c9 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -27,6 +27,8 @@ import time import os from autosubmit.job.job_common import Status from autosubmit.config.log import Log +from autosubmit.job.job_exceptions import WrongTemplateException +from autosubmit.job.job import Job class JobPackageBase(object): @@ -70,6 +72,9 @@ class JobPackageBase(object): def submit(self, configuration, parameters): for job in self.jobs: + if job.check.lower() == Job.CHECK_ON_SUBMISSION: + if not job.check_script(configuration, parameters): + raise WrongTemplateException(job.name) job.update_parameters(configuration, parameters) self._create_scripts(configuration) self._send_files() diff --git a/test/unit/test_autosubmit_ config.py b/test/unit/test_autosubmit_ config.py index 0396c0478..1eb237d67 100644 --- a/test/unit/test_autosubmit_ config.py +++ b/test/unit/test_autosubmit_ config.py @@ -193,11 +193,12 @@ class TestAutosubmitConfig(TestCase): def test_that_reload_must_load_parsers(self): # arrange config = AutosubmitConfig(self.any_expid, FakeBasicConfig, ConfigParserFactory()) - parsers = ['_conf_parser', '_platforms_parser', 'jobs_parser', '_exp_parser', '_proj_parser'] + parsers = ['_conf_parser', '_platforms_parser', '_jobs_parser', '_exp_parser', '_proj_parser'] # pre-act assertions for parser in parsers: - self.assertFalse(hasattr(config, parser)) + self.assertTrue(hasattr(config, parser)) + self.assertIsNone(getattr(config, parser)) # act config.reload() diff --git a/test/unit/test_job.py b/test/unit/test_job.py index 9bfe2bf8e..0bcf44044 100644 --- a/test/unit/test_job.py +++ b/test/unit/test_job.py @@ -180,16 +180,6 @@ class TestJob(TestCase): write_mock.write.assert_called_with('some-content: 999, 777, 666 % %') chmod_mock.assert_called_with(os.path.join(self.job._tmp_path, self.job.name + '.cmd'), 0o775) - def test_that_check_script_returns_true_when_it_is_not_needed(self): - # arrange - self.job.check = False - - # act - result = self.job.check_script(Mock(), dict()) - - # assert - self.assertTrue(result) - def test_that_check_script_returns_false_when_there_is_an_unbound_template_variable(self): # arrange update_content_mock = Mock(return_value='some-content: %UNBOUND%') -- GitLab From 7c51735ac71e955160c3aec4dfe27db55caeaf2e Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Fri, 27 Jan 2017 12:19:57 +0100 Subject: [PATCH 21/60] Minor typo from 9f36e584 fixed --- autosubmit/autosubmit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 2d4c014ec..9f5b6299d 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -80,7 +80,7 @@ from notifications.mail_notifier import MailNotifier from notifications.notifier import Notifier from platforms.saga_submitter import SagaSubmitter from platforms.paramiko_submitter import ParamikoSubmitter -from autosubmit.job.job_exceptions import WrongTemplateException +from job.job_exceptions import WrongTemplateException # noinspection PyUnusedLocal def signal_handler(signal_received, frame): -- GitLab From fba98f6b320bfca06f1b4fc8e67431b0e103178e Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Fri, 27 Jan 2017 12:29:01 +0100 Subject: [PATCH 22/60] es/auto-ecearth/#67 implementation improved --- autosubmit/job/job_list.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 0a426aa8d..ea943e359 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -636,10 +636,10 @@ class JobList: continue if job.check.lower() != 'true': Log.warning('Template {0} will not be checked'.format(job.section)) - continue - if not job.check_script(as_conf, self.parameters): - out = False - Log.warning("Invalid parameter substitution in {0} template", job.section) + else: + if not job.check_script(as_conf, self.parameters): + out = False + Log.warning("Invalid parameter substitution in {0} template", job.section) sections_checked.add(job.section) if out: Log.result("Scripts OK") -- GitLab From 1de8779dea3e81ffa8b7f9e035bcabf4e9d7b225 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Fri, 27 Jan 2017 12:59:57 +0100 Subject: [PATCH 23/60] Experiment' folder checking was removed from the UNARCHIVE process. Fixes #235 --- autosubmit/autosubmit.py | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 9f5b6299d..bcc17f3f1 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -1495,35 +1495,30 @@ class Autosubmit: Autosubmit.unarchive(expid) return False - Log.result("Experiment archived succesfully") + Log.result("Experiment archived successfully") return True @staticmethod - def unarchive(expid): + def unarchive(experiment_id): """ Unarchives an experiment: uncompress folder from tar.gz and moves to experiments root folder - :param expid: experiment identifier - :type expid: str + :param experiment_id: experiment identifier + :type experiment_id: str """ BasicConfig.read() - exp_folder = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid) - if not os.path.exists(exp_folder): - Log.critical("The directory %s is needed and does not exist." % exp_folder) - Log.warning("Does an experiment with the given id exist?") - return 1 - - Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'unarchive{0}.log'.format(expid))) + Log.set_file(os.path.join(BasicConfig.LOCAL_ROOT_DIR, "ASlogs", 'unarchive{0}.log'.format(experiment_id))) + exp_folder = os.path.join(BasicConfig.LOCAL_ROOT_DIR, experiment_id) if os.path.exists(exp_folder): - Log.error("Experiment {0} is not archived", expid) + Log.error("Experiment {0} is not archived", experiment_id) return False # Searching by year. We will store it on database year = datetime.datetime.today().year archive_path = None while year > 2000: - archive_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, str(year), '{0}.tar.gz'.format(expid)) + archive_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, str(year), '{0}.tar.gz'.format(experiment_id)) if os.path.exists(archive_path): break year -= 1 @@ -1545,7 +1540,7 @@ class Autosubmit: Log.critical("Can not extract tar file: {0}".format(e)) return False - Log.info("Unpacking finished.") + Log.info("Unpacking finished") try: os.remove(archive_path) @@ -1553,7 +1548,7 @@ class Autosubmit: Log.error("Can not remove archived file folder: {0}".format(e)) return False - Log.result("Experiment {0} unarchived succesfully", expid) + Log.result("Experiment {0} unarchived successfully", experiment_id) return True @staticmethod -- GitLab From 5c5979e9abebf623330447e800c1bc33b99bb422 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Mon, 30 Jan 2017 13:19:15 +0100 Subject: [PATCH 24/60] Some functions to work with hours were included --- autosubmit/date/chunk_date_lib.py | 14 ++++++++++++++ test/unit/test_chunk_date_lib.py | 12 ++++++++++++ 2 files changed, 26 insertions(+) diff --git a/autosubmit/date/chunk_date_lib.py b/autosubmit/date/chunk_date_lib.py index deae15aad..297c22510 100755 --- a/autosubmit/date/chunk_date_lib.py +++ b/autosubmit/date/chunk_date_lib.py @@ -328,3 +328,17 @@ def date2str(date, date_format=''): date.second) else: return "{0:04}{1:02}{2:02}".format(date.year, date.month, date.day) + + +def sum_str_hours(str_hour1, str_hour2): + hours1, minutes1 = split_str_hours(str_hour1) + hours2, minutes2 = split_str_hours(str_hour2) + total_minutes = minutes1 + minutes2 + (hours1 * 60) + (hours2 * 60) + return "%02d:%02d" % (total_minutes / 60, total_minutes % 60) + + +def split_str_hours(str_hour): + str_splitted = str_hour.split(':') + if len(str_splitted) == 2: + return int(str_splitted[0]), int(str_splitted[1]) + raise Exception('Incorrect input. Usage: \'HH:MM\'') diff --git a/test/unit/test_chunk_date_lib.py b/test/unit/test_chunk_date_lib.py index a1e4ee7f0..63ddfd9f1 100644 --- a/test/unit/test_chunk_date_lib.py +++ b/test/unit/test_chunk_date_lib.py @@ -198,3 +198,15 @@ class TestChunkDateLib(TestCase): self.assertEqual(date2str(datetime(2000, 1, 1), 'H'), '2000010100') self.assertEqual(date2str(datetime(2000, 1, 1), 'M'), '200001010000') self.assertEqual(date2str(datetime(2000, 1, 1), 'S'), '20000101000000') + + def test_sum_str_hours(self): + self.assertEqual(sum_str_hours('00:30', '00:30'), '01:00') + self.assertEqual(sum_str_hours('14:30', '14:30'), '29:00') + self.assertEqual(sum_str_hours('50:45', '50:30'), '101:15') + + def test_split_str_hours(self): + self.assertEqual(split_str_hours('00:30'), (0, 30)) + self.assertEqual(split_str_hours('12:55'), (12, 55)) + with self.assertRaises(Exception): + parse_date('30') + -- GitLab From d707f0bdc52bba9e54427592e7e45a810c200fd7 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Mon, 30 Jan 2017 15:51:56 +0100 Subject: [PATCH 25/60] A double check for FAILED jobs was added --- autosubmit/job/job.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 18db854f2..e87ed97ac 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -436,7 +436,7 @@ class Job(object): previous_status = self.status if new_status == Status.COMPLETED: - Log.debug("This job seems to have completed...checking") + Log.debug("This job seems to have completed: checking...") self.platform.get_completed_files(self.name) self.check_completion() else: @@ -448,9 +448,14 @@ class Job(object): elif self.status is Status.COMPLETED: Log.result("Job {0} is COMPLETED", self.name) elif self.status is Status.FAILED: - Log.user_warning("Job {0} is FAILED", self.name) + Log.user_warning("Job {0} is FAILED. Checking completed files to confirm the failure...", self.name) + self.platform.get_completed_files(self.name) + self.check_completion() + if self.status is Status.COMPLETED: + Log.warning('Job {0} seems to have failed but there is a COMPLETED file', self.name) + Log.result("Job {0} is COMPLETED", self.name) elif self.status is Status.UNKNOWN: - Log.debug("Job {0} in UNKNOWN status. Checking completed files", self.name) + Log.debug("Job {0} in UNKNOWN status. Checking completed files...", self.name) self.platform.get_completed_files(self.name) self.check_completion(Status.UNKNOWN) if self.status is Status.UNKNOWN: @@ -483,7 +488,7 @@ class Job(object): if os.path.exists(log_name): self.status = Status.COMPLETED else: - Log.warning("Job {0} seemed to be completed but there is no COMPLETED file", self.name) + Log.warning("Job {0} completion check failed. There is no COMPLETED file", self.name) self.status = default_status def update_parameters(self, as_conf, parameters, -- GitLab From 54e507be9d4f56435992d9ca6ed9262da2815c1b Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Thu, 2 Feb 2017 07:55:19 +0100 Subject: [PATCH 26/60] A simple implementation of vertical wrapper was implemented +max_wallclock --- autosubmit/config/config_common.py | 8 ++++ autosubmit/job/job_list.py | 50 ++++++++++++++++++---- autosubmit/job/job_package.py | 16 +++++-- autosubmit/platforms/lsfplatform.py | 30 ++++++------- autosubmit/platforms/paramiko_submitter.py | 3 ++ autosubmit/platforms/platform.py | 1 + autosubmit/platforms/saga_submitter.py | 5 ++- 7 files changed, 83 insertions(+), 30 deletions(-) diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 7450aecb4..54a6b4720 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -774,6 +774,14 @@ class AutosubmitConfig(object): """ return int(self._conf_parser.get('config', 'TOTALJOBS')) + def get_max_wallclock(self): + """ + Returns max wallclock from autosubmit's config file + + :rtype: str + """ + return self.get_option(self._conf_parser, 'config', 'MAX_WALLCLOCK', '') + def get_max_waiting_jobs(self): """ Returns max number of waitng jobs from autosubmit's config file diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index ea943e359..fd617714e 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -34,7 +34,7 @@ from autosubmit.job.job_common import Status, Type from autosubmit.job.job import Job from autosubmit.job.job_package import JobPackageSimple, JobPackageArray, JobPackageThread from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import date2str, parse_date +from autosubmit.date.chunk_date_lib import date2str, parse_date, sum_str_hours from networkx import DiGraph from autosubmit.job.job_utils import transitive_reduction @@ -766,7 +766,7 @@ class JobList: # Logging obtained data Log.debug("Number of jobs ready: {0}", len(jobs_available)) Log.debug("Number of jobs available: {0}", max_wait_jobs_to_submit) - Log.info("Jobs to submit: {0}", min(max_wait_jobs_to_submit, len(jobs_available))) + Log.info("Jobs READY to submit: {0}", min(max_wait_jobs_to_submit, len(jobs_available))) # If can submit jobs if max_wait_jobs_to_submit > 0 and max_jobs_to_submit > 0: available_sorted = sorted(jobs_available, key=lambda k: k.long_name.split('_')[1][:6]) @@ -775,10 +775,16 @@ class JobList: jobs_to_submit = list_of_available[0:num_jobs_to_submit] jobs_to_submit_by_section = self.divide_list_by_section(jobs_to_submit) packages_to_submit = list() + if platform.allow_arrays: + max_jobs = min(max_wait_jobs_to_submit, max_jobs_to_submit) for section_list in jobs_to_submit_by_section.values(): - packages_to_submit.append(JobPackageThread(section_list)) + built_packages, max_jobs = JobList.build_vertical_packages(section_list, + max_jobs, + platform.max_wallclock) + packages_to_submit += built_packages return packages_to_submit + for job in jobs_to_submit: packages_to_submit.append(JobPackageSimple([job])) return packages_to_submit @@ -800,22 +806,50 @@ class JobList: by_section[job.section].append(job) return by_section + @staticmethod + def build_vertical_packages(section_list, max_jobs, max_wallclock): + packages = [] + for job in section_list: + if max_jobs > 0: + jobs_list = JobList.build_vertical_package(job, [job], job.wallclock, max_jobs, max_wallclock) + packages.append(JobPackageThread(jobs_list)) + max_jobs -= len(jobs_list) + else: + break + return packages, max_jobs + + @staticmethod + def build_vertical_package(job, jobs_list, total_wallclock, max_jobs, max_wallclock): + if len(jobs_list) >= max_jobs: + return jobs_list + for child in job.children: + if child.section != job.section: + continue + if len(child.parents) > 1: + continue + total_wallclock = sum_str_hours(total_wallclock, child.wallclock) + if total_wallclock > max_wallclock: + return jobs_list + jobs_list.append(child) + return JobList.build_vertical_package(child, jobs_list, total_wallclock, max_jobs, max_wallclock) + return jobs_list + class DicJobs: """ - Class to create jobs from conf file and to find jobs by stardate, member and chunk + Class to create jobs from conf file and to find jobs by start date, member and chunk - :param joblist: joblist to use - :type joblist: JobList + :param jobs_list: jobs list to use + :type job_list: JobList :param parser: jobs conf file parser :type parser: SafeConfigParser - :param date_list: startdates + :param date_list: start dates :type date_list: list :param member_list: member :type member_list: list :param chunk_list: chunks :type chunk_list: list - :param date_format: option to formate dates + :param date_format: option to format dates :type date_format: str :param default_retrials: default retrials for ech job :type default_retrials: int diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 9b282e3c9..602637d79 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -29,6 +29,7 @@ from autosubmit.job.job_common import Status from autosubmit.config.log import Log from autosubmit.job.job_exceptions import WrongTemplateException from autosubmit.job.job import Job +from autosubmit.date.chunk_date_lib import sum_str_hours class JobPackageBase(object): @@ -196,9 +197,9 @@ class JobPackageThread(JobPackageBase): self._wallclock = '00:00' self._num_processors = '0' for job in jobs: - if job.wallclock > self._wallclock: - self._wallclock = job.wallclock - self._num_processors = str(int(self._num_processors) + int(job.processors)) + if job.processors > self._num_processors: + self._num_processors = job.processors + self._wallclock = sum_str_hours(self._wallclock, job.wallclock) super(JobPackageThread, self).__init__(jobs) def _create_scripts(self, configuration): @@ -216,7 +217,7 @@ class JobPackageThread(JobPackageBase): script_content = self.platform.header.thread_header(filename, self._wallclock, self._num_processors, len(self.jobs), - self._job_scripts.values()) + self.jobs_scripts) filename += '.cmd' open(os.path.join(self._tmp_path, filename), 'w').write(script_content) os.chmod(os.path.join(self._tmp_path, filename), 0o775) @@ -242,3 +243,10 @@ class JobPackageThread(JobPackageBase): self.jobs[i - 1].id = str(package_id) self.jobs[i - 1].status = Status.SUBMITTED self.jobs[i - 1].write_submit_time() + + @property + def jobs_scripts(self): + jobs_scripts = [] + for job in self.jobs: + jobs_scripts.append(self._job_scripts[job.name]) + return jobs_scripts diff --git a/autosubmit/platforms/lsfplatform.py b/autosubmit/platforms/lsfplatform.py index cda8819f9..6cddcfada 100644 --- a/autosubmit/platforms/lsfplatform.py +++ b/autosubmit/platforms/lsfplatform.py @@ -174,27 +174,25 @@ class LsfHeader: self.id_run = id_run def run(self): - out = str(self.template) + "." + str(self.id_run) + ".out" - err = str(self.template) + "." + str(self.id_run) + ".err" - command = str(self.template) + " " + str(self.id_run) + " " + os.getcwd() - (self.status) = getstatusoutput(command + " > " + out + " 2> " + err) + out = str(self.template) + '.' + str(self.id_run) + '.out' + err = str(self.template) + '.' + str(self.id_run) + '.err' + command = str(self.template) + ' ' + str(self.id_run) + ' ' + os.getcwd() + (self.status) = getstatusoutput(command + ' > ' + out + ' 2> ' + err) - # Splitting the original hosts file - os.system("cat {5} | split -a 2 -d -l {3} - mlist-{6}-") - - pid_list = [] - scripts = {4} + scripts = {3} for i in range(len(scripts)): current = JobThread(scripts[i], i) - pid_list.append(current) current.start() - - for pid in pid_list: - pid.join() - print "Status from ", pid.template,"is", pid.status - """.format(filename, wallclock, num_processors, (int(num_processors) / num_jobs), str(job_scripts), - "${LSB_DJOB_HOSTFILE}", "${LSB_JOBID}")) + current.join() + completed_filename = scripts[i].replace('.cmd', '_COMPLETED') + completed_path = os.path.join(os.getcwd(), completed_filename) + if os.path.exists(completed_path): + print "The job ", current.template," has been COMPLETED" + else: + print "The job ", current.template," has FAILED" + os._exit(1) + """.format(filename, wallclock, num_processors, str(job_scripts))) SERIAL = textwrap.dedent("""\ ############################################################################### diff --git a/autosubmit/platforms/paramiko_submitter.py b/autosubmit/platforms/paramiko_submitter.py index c1dd37312..1281f1731 100644 --- a/autosubmit/platforms/paramiko_submitter.py +++ b/autosubmit/platforms/paramiko_submitter.py @@ -64,6 +64,7 @@ class ParamikoSubmitter(Submitter): platforms = dict() local_platform = LocalPlatform(asconf.expid, 'local', BasicConfig) + local_platform.max_wallclock = asconf.get_max_wallclock() local_platform.max_waiting_jobs = asconf.get_max_waiting_jobs() local_platform.total_jobs = asconf.get_total_jobs() local_platform.scratch = os.path.join(BasicConfig.LOCAL_ROOT_DIR, asconf.expid, BasicConfig.LOCAL_TMP_DIR) @@ -109,6 +110,8 @@ class ParamikoSubmitter(Submitter): host = AutosubmitConfig.get_option(parser, section, 'HOST', None) remote_platform.host = host + remote_platform.max_wallclock = AutosubmitConfig.get_option(parser, section, 'MAX_WALLCLOCK', + asconf.get_max_wallclock()) remote_platform.max_waiting_jobs = int(AutosubmitConfig.get_option(parser, section, 'MAX_WAITING_JOBS', asconf.get_max_waiting_jobs())) remote_platform.total_jobs = int(AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS', diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 747d2db93..04d9001d5 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -40,6 +40,7 @@ class Platform(object): self.scheduler = None self.directory = None self.hyperthreading = 'false' + self.max_wallclock = '' self._allow_arrays = False @property diff --git a/autosubmit/platforms/saga_submitter.py b/autosubmit/platforms/saga_submitter.py index 8bfdd9e90..14f95cd08 100644 --- a/autosubmit/platforms/saga_submitter.py +++ b/autosubmit/platforms/saga_submitter.py @@ -82,6 +82,7 @@ class SagaSubmitter(Submitter): time.sleep(5) local_platform.type = 'local' local_platform.queue = '' + local_platform.max_wallclock = asconf.get_max_wallclock() local_platform.max_waiting_jobs = asconf.get_max_waiting_jobs() local_platform.total_jobs = asconf.get_total_jobs() local_platform.scratch = os.path.join(BasicConfig.LOCAL_ROOT_DIR, asconf.expid, BasicConfig.LOCAL_TMP_DIR) @@ -160,12 +161,12 @@ class SagaSubmitter(Submitter): remote_platform.service._adaptor.host = remote_platform.host # noinspection PyProtectedMember remote_platform.service._adaptor.scheduler = remote_platform.scheduler - + remote_platform.max_wallclock = AutosubmitConfig.get_option(parser, section, 'MAX_WALLCLOCK', + asconf.get_max_wallclock()) remote_platform.max_waiting_jobs = int(AutosubmitConfig.get_option(parser, section, 'MAX_WAITING_JOBS', asconf.get_max_waiting_jobs())) remote_platform.total_jobs = int(AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS', asconf.get_total_jobs())) - remote_platform.project = AutosubmitConfig.get_option(parser, section, 'PROJECT', None) remote_platform.budget = AutosubmitConfig.get_option(parser, section, 'BUDGET', remote_platform.project) remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') -- GitLab From 1996fd0273ab2f0abaff98ce4aa20a6c994303ab Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Thu, 9 Feb 2017 12:51:12 +0100 Subject: [PATCH 27/60] A better implementation of vertical wrapper +dependencies --- autosubmit/autosubmit.py | 2 ++ autosubmit/job/job.py | 17 +++++++++--- autosubmit/job/job_list.py | 43 +++++++++++++++++++++-------- autosubmit/job/job_package.py | 39 ++++++++++++++++---------- autosubmit/platforms/lsfplatform.py | 5 ++-- autosubmit/platforms/platform.py | 4 +-- 6 files changed, 76 insertions(+), 34 deletions(-) diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index bcc17f3f1..05c9f98fe 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -656,6 +656,8 @@ class Autosubmit: for platform in platforms_to_test: for job in job_list.get_in_queue(platform): prev_status = job.status + if job.status == Status.FAILED: + continue if prev_status != job.update_status(platform.check_job(job.id), as_conf.get_copy_remote_logs() == 'true'): diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index e87ed97ac..0a04f360a 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -425,7 +425,7 @@ class Job(object): """ return self._get_from_total_stats(1) - def update_status(self, new_status, copy_remote_logs): + def update_status(self, new_status, copy_remote_logs=False): """ Updates job status, checking COMPLETED file if needed @@ -454,6 +454,8 @@ class Job(object): if self.status is Status.COMPLETED: Log.warning('Job {0} seems to have failed but there is a COMPLETED file', self.name) Log.result("Job {0} is COMPLETED", self.name) + else: + self.update_children_status() elif self.status is Status.UNKNOWN: Log.debug("Job {0} in UNKNOWN status. Checking completed files...", self.name) self.platform.get_completed_files(self.name) @@ -477,6 +479,13 @@ class Job(object): self.platform.get_logs_files(self.expid, self.remote_logs) return self.status + def update_children_status(self): + children = list(self.children) + for child in children: + if child.status in [Status.SUBMITTED, Status.RUNNING, Status.QUEUING, Status.UNKNOWN]: + child.status = Status.FAILED + children += list(child.children) + def check_completion(self, default_status=Status.FAILED): """ Check the presence of *COMPLETED* file. @@ -723,10 +732,10 @@ class Job(object): :return: True if succesful, False otherwise :rtype: bool """ - if self.platform.get_stat_file(self.name, retries=5): + if self.platform.get_stat_file(self.name, retries=0): start_time = self.check_start_time() else: - Log.warning('Could not get start time for {0}. Using current time as an aproximation', self.name) + Log.warning('Could not get start time for {0}. Using current time as an approximation', self.name) start_time = time.time() path = os.path.join(self._tmp_path, self.name + '_TOTAL_STATS') @@ -742,7 +751,7 @@ class Job(object): :param completed: True if job was completed succesfuly, False otherwise :type completed: bool """ - self.platform.get_stat_file(self.name, retries=5) + self.platform.get_stat_file(self.name, retries=0) end_time = self.check_end_time() path = os.path.join(self._tmp_path, self.name + '_TOTAL_STATS') f = open(path, 'a') diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index fd617714e..5bdae1f69 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -517,7 +517,7 @@ class JobList: def update_from_file(self, store_change=True): """ - Updates joblist on the fly from and update file + Updates jobs list on the fly from and update file :param store_change: if True, renames the update file to avoid reloading it at the next iteration """ if os.path.exists(os.path.join(self._persistence_path, self._update_file)): @@ -809,11 +809,20 @@ class JobList: @staticmethod def build_vertical_packages(section_list, max_jobs, max_wallclock): packages = [] + potential_dependency = None for job in section_list: if max_jobs > 0: jobs_list = JobList.build_vertical_package(job, [job], job.wallclock, max_jobs, max_wallclock) - packages.append(JobPackageThread(jobs_list)) max_jobs -= len(jobs_list) + if job.status is Status.READY: + packages.append(JobPackageThread(jobs_list)) + else: + packages.append(JobPackageThread(jobs_list, potential_dependency)) + if True: # not true, should be a config param + child = JobList.get_wrappable_child(jobs_list[-1], JobList.is_wrappable) + if child is not None: + section_list.insert(section_list.index(job) + 1, child) + potential_dependency = packages[-1].name else: break return packages, max_jobs @@ -822,18 +831,30 @@ class JobList: def build_vertical_package(job, jobs_list, total_wallclock, max_jobs, max_wallclock): if len(jobs_list) >= max_jobs: return jobs_list - for child in job.children: - if child.section != job.section: - continue - if len(child.parents) > 1: - continue + child = JobList.get_wrappable_child(job, JobList.is_wrappable) + if child is not None: total_wallclock = sum_str_hours(total_wallclock, child.wallclock) - if total_wallclock > max_wallclock: - return jobs_list - jobs_list.append(child) - return JobList.build_vertical_package(child, jobs_list, total_wallclock, max_jobs, max_wallclock) + if total_wallclock <= max_wallclock: + jobs_list.append(child) + return JobList.build_vertical_package(child, jobs_list, total_wallclock, max_jobs, max_wallclock) return jobs_list + @staticmethod + def get_wrappable_child(job, check_function): + for child in job.children: + if check_function(job, child): + return child + continue + return None + + @staticmethod + def is_wrappable(parent, child): + if child.section != parent.section: + return False + if len(child.parents) > 1: + return False + return True + class DicJobs: """ diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 602637d79..510c60fd0 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -23,8 +23,9 @@ except ImportError: # noinspection PyCompatibility from ConfigParser import SafeConfigParser -import time import os +import time +import random from autosubmit.job.job_common import Status from autosubmit.config.log import Log from autosubmit.job.job_exceptions import WrongTemplateException @@ -191,8 +192,10 @@ class JobPackageThread(JobPackageBase): """ FILE_PREFIX = 'ASThread' - def __init__(self, jobs): + def __init__(self, jobs, dependency=None): + super(JobPackageThread, self).__init__(jobs) self._job_scripts = {} + self._job_dependency = dependency self._common_script = None self._wallclock = '00:00' self._num_processors = '0' @@ -200,28 +203,27 @@ class JobPackageThread(JobPackageBase): if job.processors > self._num_processors: self._num_processors = job.processors self._wallclock = sum_str_hours(self._wallclock, job.wallclock) - super(JobPackageThread, self).__init__(jobs) + self._name = self.FILE_PREFIX + "_{0}_{1}_{2}".format(str(int(time.time())) + str(random.randint(1, 10000)), + self._num_processors, + len(self._jobs)) def _create_scripts(self, configuration): - timestamp = str(int(time.time())) - filename = self.FILE_PREFIX + "_{0}_{1}_{2}".format(timestamp, self._num_processors, len(self.jobs)) for i in range(1, len(self.jobs) + 1): self._job_scripts[self.jobs[i - 1].name] = self.jobs[i - 1].create_script(configuration) self.jobs[i - 1].remote_logs = ( self._job_scripts[self.jobs[i - 1].name] + ".{0}.out".format(i - 1), self._job_scripts[self.jobs[i - 1].name] + ".{0}.err".format(i - 1) ) - self._common_script = self._create_common_script(filename) + self._common_script = self._create_common_script() - def _create_common_script(self, filename): - - script_content = self.platform.header.thread_header(filename, self._wallclock, - self._num_processors, len(self.jobs), - self.jobs_scripts) - filename += '.cmd' - open(os.path.join(self._tmp_path, filename), 'w').write(script_content) - os.chmod(os.path.join(self._tmp_path, filename), 0o775) - return filename + def _create_common_script(self): + script_content = self.platform.header.thread_header(self._name, self._wallclock, + self._num_processors, self.jobs_scripts, + self._dependency_directive()) + script_file = self.name + '.cmd' + open(os.path.join(self._tmp_path, script_file), 'w').write(script_content) + os.chmod(os.path.join(self._tmp_path, script_file), 0o775) + return script_file def _send_files(self): for job in self.jobs: @@ -244,6 +246,13 @@ class JobPackageThread(JobPackageBase): self.jobs[i - 1].status = Status.SUBMITTED self.jobs[i - 1].write_submit_time() + def _dependency_directive(self): + return '#' if self._job_dependency is None else '#BSUB -w \'done("{0}")\' [-ti]'.format(self._job_dependency) + + @property + def name(self): + return self._name + @property def jobs_scripts(self): jobs_scripts = [] diff --git a/autosubmit/platforms/lsfplatform.py b/autosubmit/platforms/lsfplatform.py index 6cddcfada..3611c1071 100644 --- a/autosubmit/platforms/lsfplatform.py +++ b/autosubmit/platforms/lsfplatform.py @@ -147,7 +147,7 @@ class LsfHeader: """.format(filename, array_id, wallclock, num_processors)) @classmethod - def thread_header(cls, filename, wallclock, num_processors, num_jobs, job_scripts): + def thread_header(cls, filename, wallclock, num_processors, job_scripts, dependency_directive): return textwrap.dedent("""\ #!/usr/bin/env python ############################################################################### @@ -159,6 +159,7 @@ class LsfHeader: #BSUB -e {0}.err #BSUB -W {1} #BSUB -n {2} + {4} # ############################################################################### @@ -192,7 +193,7 @@ class LsfHeader: else: print "The job ", current.template," has FAILED" os._exit(1) - """.format(filename, wallclock, num_processors, str(job_scripts))) + """.format(filename, wallclock, num_processors, str(job_scripts), dependency_directive)) SERIAL = textwrap.dedent("""\ ############################################################################### diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 04d9001d5..518dcaa0e 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -192,7 +192,7 @@ class Platform(object): (job_out_filename, job_err_filename) = remote_logs self.get_files([job_out_filename, job_err_filename], False, 'LOG_{0}'.format(exp_id)) - def get_completed_files(self, job_name, retries=5): + def get_completed_files(self, job_name, retries=0): """ Get the COMPLETED file of the given job @@ -242,7 +242,7 @@ class Platform(object): return True return False - def get_stat_file(self, job_name, retries=1): + def get_stat_file(self, job_name, retries=0): """ Copies *STAT* files from remote to local -- GitLab From 961110884f549f4a607c0a0764303dc24497d4da Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Thu, 9 Feb 2017 16:35:10 +0100 Subject: [PATCH 28/60] Date & log libraries were replaced by bscearth.utils --- autosubmit/autosubmit.py | 4 +- autosubmit/config/basicConfig.py | 2 +- autosubmit/config/config_common.py | 4 +- autosubmit/config/log.py | 246 --------------- autosubmit/database/db_common.py | 2 +- autosubmit/date/__init__.py | 0 autosubmit/date/chunk_date_lib.py | 344 --------------------- autosubmit/experiment/experiment_common.py | 2 +- autosubmit/git/autosubmit_git.py | 2 +- autosubmit/job/job.py | 2 +- autosubmit/job/job_list.py | 4 +- autosubmit/job/job_list_persistence.py | 2 +- autosubmit/job/job_package.py | 4 +- autosubmit/monitor/monitor.py | 2 +- autosubmit/notifications/mail_notifier.py | 2 +- autosubmit/platforms/ecplatform.py | 2 +- autosubmit/platforms/locplatform.py | 2 +- autosubmit/platforms/paramiko_platform.py | 4 +- autosubmit/platforms/pbsplatform.py | 2 +- autosubmit/platforms/platform.py | 2 +- autosubmit/platforms/saga_platform.py | 4 +- docs/source/codedoc/date.rst | 4 - setup.py | 8 +- test/unit/test_chunk_date_lib.py | 2 +- 24 files changed, 27 insertions(+), 625 deletions(-) delete mode 100644 autosubmit/config/log.py delete mode 100644 autosubmit/date/__init__.py delete mode 100755 autosubmit/date/chunk_date_lib.py delete mode 100644 docs/source/codedoc/date.rst diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 05c9f98fe..bd105d779 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -68,14 +68,14 @@ from job.job_list import JobList from job.job_list_persistence import JobListPersistenceDb from job.job_list_persistence import JobListPersistencePkl # noinspection PyPackageRequirements -from config.log import Log +from bscearth.utils.log import Log from database.db_common import create_db from experiment.experiment_common import new_experiment from experiment.experiment_common import copy_experiment from database.db_common import delete_experiment from database.db_common import get_autosubmit_version from monitor.monitor import Monitor -from date.chunk_date_lib import date2str +from bscearth.utils.date import date2str from notifications.mail_notifier import MailNotifier from notifications.notifier import Notifier from platforms.saga_submitter import SagaSubmitter diff --git a/autosubmit/config/basicConfig.py b/autosubmit/config/basicConfig.py index f2f467c81..08db17589 100755 --- a/autosubmit/config/basicConfig.py +++ b/autosubmit/config/basicConfig.py @@ -24,7 +24,7 @@ except ImportError: from ConfigParser import SafeConfigParser import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log class BasicConfig: diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 54a6b4720..0332788ea 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -28,8 +28,8 @@ import subprocess from pyparsing import nestedExpr -from autosubmit.date.chunk_date_lib import parse_date -from autosubmit.config.log import Log +from bscearth.utils.date import parse_date +from bscearth.utils.log import Log from autosubmit.config.basicConfig import BasicConfig diff --git a/autosubmit/config/log.py b/autosubmit/config/log.py deleted file mode 100644 index 9e2527e97..000000000 --- a/autosubmit/config/log.py +++ /dev/null @@ -1,246 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 Earth Sciences Department, BSC-CNS - -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . - -import logging -import os -import sys -from datetime import datetime - - -class LogFormatter: - """ - Class to format log output. - - :param to_file: If True, creates a LogFormatter for files; if False, for console - :type to_file: bool - """ - RESULT = '\033[32m' - WARNING = '\033[33m' - ERROR = '\033[31m' - CRITICAL = '\033[1m \033[31m' - DEFAULT = '\033[0m\033[39m' - - def __init__(self, to_file=False): - """ - Initializer for LogFormatter - - - """ - self._file = to_file - if self._file: - self._formatter = logging.Formatter('%(asctime)s %(message)s') - else: - self._formatter = logging.Formatter('%(message)s') - - def format(self, record): - """ - Format log output, adding labels if needed for log level. If logging to console, also manages font color. - If logging to file adds timestamp - - :param record: log record to format - :type record: LogRecord - :return: formatted record - :rtype: str - """ - header = '' - if record.levelno == Log.RESULT: - if not self._file: - header = LogFormatter.RESULT - elif record.levelno == Log.USER_WARNING: - if not self._file: - header = LogFormatter.WARNING - elif record.levelno == Log.WARNING: - if not self._file: - header = LogFormatter.WARNING - header += "[WARNING] " - elif record.levelno == Log.ERROR: - if not self._file: - header = LogFormatter.ERROR - header += "[ERROR] " - elif record.levelno == Log.CRITICAL: - if not self._file: - header = LogFormatter.ERROR - header += "[CRITICAL] " - - msg = self._formatter.format(record) - if header != '' and not self._file: - msg += LogFormatter.DEFAULT - return header + msg - - -class Log: - """ - Static class to manage the log for the application. Messages will be sent to console and to file if it is - configured. Levels can be set for each output independently. These levels are (from lower to higher priority): - - - EVERYTHING : this level is just defined to show every output - - DEBUG - - INFO - - RESULT - - USER_WARNING - - WARNING - - ERROR - - CRITICAL - - NO_LOG : this level is just defined to remove every output - - """ - EVERYTHING = 0 - DEBUG = logging.DEBUG - INFO = logging.INFO - RESULT = 25 - USER_WARNING = 29 - WARNING = logging.WARNING - ERROR = logging.ERROR - CRITICAL = logging.CRITICAL - NO_LOG = CRITICAL + 1 - - logging.basicConfig() - - log = logging.Logger('Autosubmit', EVERYTHING) - - console_handler = logging.StreamHandler(sys.stdout) - console_handler.setLevel(INFO) - console_handler.setFormatter(LogFormatter(False)) - log.addHandler(console_handler) - - file_handler = None - file_level = INFO - - @staticmethod - def set_file(file_path): - """ - Configure the file to store the log. If another file was specified earlier, new messages will only go to the - new file. - - :param file_path: file to store the log - :type file_path: str - """ - (directory, filename) = os.path.split(file_path) - if not os.path.exists(directory): - os.mkdir(directory) - files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f)) and - f.endswith(filename)] - if len(files) >= 5: - files.sort() - os.remove(os.path.join(directory, files[0])) - file_path = os.path.join(directory, '{0:%Y%m%d_%H%M%S}_'.format(datetime.now()) + filename) - if Log.file_handler is not None: - Log.log.removeHandler(Log.file_handler) - Log.file_handler = logging.FileHandler(file_path, 'w') - Log.file_handler.setLevel(Log.file_level) - Log.file_handler.setFormatter(LogFormatter(True)) - Log.log.addHandler(Log.file_handler) - os.chmod(file_path, 0o775) - - @staticmethod - def set_console_level(level): - """ - Sets log level for logging to console. Every output of level equal or higher to parameter level will be - printed on console - - :param level: new level for console - :return: None - """ - if type(level) is str: - level = getattr(Log, level) - Log.console_handler.level = level - - @staticmethod - def set_file_level(level): - """ - Sets log level for logging to file. Every output of level equal or higher to parameter level will be - added to log file - - :param level: new level for log file - """ - if type(level) is str: - level = getattr(Log, level) - Log.file_level = level - if Log.file_handler is not None: - Log.file_handler.level = level - - @staticmethod - def debug(msg, *args): - """ - Sends debug information to the log - - :param msg: message to show - :param args: arguments for message formating (it will be done using format() method on str) - """ - Log.log.debug(msg.format(*args)) - - @staticmethod - def info(msg, *args): - """ - Sends information to the log - - :param msg: message to show - :param args: arguments for message formatting (it will be done using format() method on str) - """ - Log.log.info(msg.format(*args)) - - @staticmethod - def result(msg, *args): - """ - Sends results information to the log. It will be shown in green in the console. - - :param msg: message to show - :param args: arguments for message formating (it will be done using format() method on str) - """ - Log.log.log(Log.RESULT, msg.format(*args)) - - @staticmethod - def user_warning(msg, *args): - """ - Sends warnings for the user to the log. It will be shown in yellow in the console. - - :param msg: message to show - :param args: arguments for message formating (it will be done using format() method on str) - """ - Log.log.log(Log.USER_WARNING, msg.format(*args)) - - @staticmethod - def warning(msg, *args): - """ - Sends program warnings to the log. It will be shown in yellow in the console. - - :param msg: message to show - :param args: arguments for message formatting (it will be done using format() method on str) - """ - Log.log.warning(msg.format(*args)) - - @staticmethod - def error(msg, *args): - """ - Sends errors to the log. It will be shown in red in the console. - - :param msg: message to show - :param args: arguments for message formatting (it will be done using format() method on str) - """ - Log.log.error(msg.format(*args)) - - @staticmethod - def critical(msg, *args): - """ - Sends critical errors to the log. It will be shown in red in the console. - - :param msg: message to show - :param args: arguments for message formatting (it will be done using format() method on str) - """ - Log.log.critical(msg.format(*args)) diff --git a/autosubmit/database/db_common.py b/autosubmit/database/db_common.py index d58df1888..97bbea6d1 100644 --- a/autosubmit/database/db_common.py +++ b/autosubmit/database/db_common.py @@ -23,7 +23,7 @@ Module containing functions to manage autosubmit's database. import os import sqlite3 -from autosubmit.config.log import Log +from bscearth.utils.log import Log from autosubmit.config.basicConfig import BasicConfig CURRENT_DATABASE_VERSION = 1 diff --git a/autosubmit/date/__init__.py b/autosubmit/date/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/autosubmit/date/chunk_date_lib.py b/autosubmit/date/chunk_date_lib.py deleted file mode 100755 index 297c22510..000000000 --- a/autosubmit/date/chunk_date_lib.py +++ /dev/null @@ -1,344 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 Earth Sciences Department, BSC-CNS - -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . -""" -In this python script there are tools to manipulate the dates and make mathematical -operations between them. -""" - -import datetime -import calendar -from dateutil.relativedelta import * - -from autosubmit.config.log import Log - - -def add_time(date, total_size, chunk_unit, cal): - """ - Adds given time to a date - - :param date: base date - :type date: datetime.datetime - :param total_size: time to add - :type total_size: int - :param chunk_unit: unit of time to add - :type chunk_unit: str - :param cal: calendar to use - :type cal: str - :return: result of adding time to base date - :rtype: datetime.datetime - """ - if chunk_unit == 'year': - return add_years(date, total_size) - elif chunk_unit == 'month': - return add_months(date, total_size, cal) - elif chunk_unit == 'day': - return add_days(date, total_size, cal) - elif chunk_unit == 'hour': - return add_hours(date, total_size, cal) - else: - Log.critical('Chunk unit not valid: {0}'.format(chunk_unit)) - - -def add_years(date, number_of_years): - """ - Adds years to a date - - :param date: base date - :type date: datetime.datetime - :param number_of_years: number of years to add - :type number_of_years: int - :return: base date plus added years - :rtype: date - """ - return date + relativedelta(years=number_of_years) - - -def add_months(date, number_of_months, cal): - """ - Adds months to a date - - :param date: base date - :type date: datetime.datetime - :param number_of_months: number of months to add - :type number_of_months: int - :param cal: calendar to use - :type cal: str - :return: base date plus added months - :rtype: date - """ - result = date + relativedelta(months=number_of_months) - if cal == 'noleap': - if result.month == 2 and result.day == 29: - result = result - relativedelta(days=1) - return result - - -def add_days(date, number_of_days, cal): - """ - Adds days to a date - - :param date: base date - :type date: datetime.datetime - :param number_of_days: number of days to add - :type number_of_days: int - :param cal: calendar to use - :type cal: str - :return: base date plus added days - :rtype: date - """ - result = date + relativedelta(days=number_of_days) - if cal == 'noleap': - year = date.year - if date.month > 2: - year += 1 - - while year <= result.year: - if calendar.isleap(year): - if result.year == year and result < datetime.datetime(year, 2, 29): - year += 1 - continue - result += relativedelta(days=1) - year += 1 - if result.month == 2 and result.day == 29: - result += relativedelta(days=1) - return result - - -def sub_days(start_date, number_of_days, cal): - """ - Substract days to a date - - :param start_date: base date - :type start_date: datetime.datetime - :param number_of_days: number of days to subtract - :type number_of_days: int - :param cal: calendar to use - :type cal: str - :return: base date minus subtracted days - :rtype: datetime.datetime - """ - result = start_date - relativedelta(days=number_of_days) - if cal == 'noleap': - # checks if crossing the day 29th - if start_date > result: - # case subtraction - while datetime.datetime(start_date.year, start_date.month, start_date.day) >= \ - datetime.datetime(result.year, result.month, result.day): - if calendar.isleap(start_date.year): - if start_date.month == 2 and start_date.day == 29: - result -= relativedelta(days=1) - start_date -= relativedelta(days=1) - else: - start_date -= relativedelta(months=1) - elif start_date < result: - # case addition - while datetime.datetime(start_date.year, start_date.month, start_date.day) <= \ - datetime.datetime(result.year, result.month, result.day): - if calendar.isleap(start_date.year): - if start_date.month == 2 and start_date.day == 29: - result += relativedelta(days=1) - start_date += relativedelta(days=1) - else: - start_date += relativedelta(months=1) - - return result - - -def add_hours(date, number_of_hours, cal): - """ - Adds hours to a date - - :param date: base date - :type date: datetime.datetime - :param number_of_hours: number of hours to add - :type number_of_hours: int - :param cal: calendar to use - :type cal: str - :return: base date plus added hours - :rtype: datetime - """ - result = date + relativedelta(hours=number_of_hours) - if cal == 'noleap': - year = date.year - if date.month > 2: - year += 1 - - while year <= result.year: - if calendar.isleap(year): - if result.year == year and result < datetime.datetime(year, 2, 29): - year += 1 - continue - result += relativedelta(days=1) - year += 1 - if result.month == 2 and result.day == 29: - result += relativedelta(days=1) - return result - - -def subs_dates(start_date, end_date, cal): - """ - Gets days between start_date and end_date - - :param start_date: interval's start date - :type start_date: datetime.datetime - :param end_date: interval's end date - :type end_date: datetime.datetime - :param cal: calendar to use - :type cal: str - :return: interval length in days - :rtype: int - """ - result = end_date - start_date - if cal == 'noleap': - year = start_date.year - if start_date.month > 2: - year += 1 - - while year <= end_date.year: - if calendar.isleap(year): - if end_date.year == year and end_date < datetime.datetime(year, 2, 29): - year += 1 - continue - result -= datetime.timedelta(days=1) - year += 1 - return result.days - - -def chunk_start_date(date, chunk, chunk_length, chunk_unit, cal): - """ - Gets chunk's interval start date - - :param date: start date for member - :type date: datetime.datetime - :param chunk: number of chunk - :type chunk: int - :param chunk_length: length of chunks - :type chunk_length: int - :param chunk_unit: chunk length unit - :type chunk_unit: str - :param cal: calendar to use - :type cal: str - :return: chunk's start date - :rtype: datetime.datetime - """ - chunk_1 = chunk - 1 - total_months = chunk_1 * chunk_length - result = add_time(date, total_months, chunk_unit, cal) - return result - - -def chunk_end_date(start_date, chunk_length, chunk_unit, cal): - """ - Gets chunk interval end date - - :param start_date: chunk's start date - :type start_date: datetime.datetime - :param chunk_length: length of the chunks - :type chunk_length: int - :param chunk_unit: chunk length unit - :type chunk_unit: str - :param cal: calendar to use - :type cal: str - :return: chunk's end date - :rtype: datetime.datetime - """ - return add_time(start_date, chunk_length, chunk_unit, cal) - - -def previous_day(date, cal): - """ - Gets previous day - - :param date: base date - :type date: datetime.datetime - :param cal: calendar to use - :type cal: str - :return: base date minus one day - :rtype: datetime.datetime - """ - return sub_days(date, 1, cal) - - -def parse_date(string_date): - """ - Parses a string into a datetime object - - :param string_date: string to parse - :type string_date: str - :rtype: datetime.datetime - """ - if string_date is None or string_date == '': - return None - length = len(string_date) - # Date and time can be given as year, year+month, year+month+day, year+month+day+hour or year+month+day+hour+minute - if length == 4: - return datetime.datetime.strptime(string_date, "%Y") - if length == 6: - return datetime.datetime.strptime(string_date, "%Y%m") - if length == 8: - return datetime.datetime.strptime(string_date, "%Y%m%d") - elif length == 10: - return datetime.datetime.strptime(string_date, "%Y%m%d%H") - elif length == 12: - return datetime.datetime.strptime(string_date, "%Y%m%d%H%M") - elif length == 14: - return datetime.datetime.strptime(string_date, "%Y%m%d%H%M%S") - elif length == 19: - return datetime.datetime.strptime(string_date, "%Y-%m-%d %H:%M:%S") - else: - raise ValueError("String '{0}' can not be converted to date".format(string_date)) - - -def date2str(date, date_format=''): - """ - Converts a datetime object to a str - - :param date_format: specifies format for date time convcersion. It can be H to show hours, - M to show hour and minute. Other values will return only the date. - :type date_format: str - :param date: date to convert - :type date: datetime.datetime - :rtype: str - """ - # Can not use strftime because it not works with dates prior to 1-1-1900 - if date is None: - return '' - if date_format == 'H': - return "{0:04}{1:02}{2:02}{3:02}".format(date.year, date.month, date.day, date.hour) - elif date_format == 'M': - return "{0:04}{1:02}{2:02}{3:02}{4:02}".format(date.year, date.month, date.day, date.hour, date.minute) - elif date_format == 'S': - return "{0:04}{1:02}{2:02}{3:02}{4:02}{5:02}".format(date.year, date.month, date.day, date.hour, date.minute, - date.second) - else: - return "{0:04}{1:02}{2:02}".format(date.year, date.month, date.day) - - -def sum_str_hours(str_hour1, str_hour2): - hours1, minutes1 = split_str_hours(str_hour1) - hours2, minutes2 = split_str_hours(str_hour2) - total_minutes = minutes1 + minutes2 + (hours1 * 60) + (hours2 * 60) - return "%02d:%02d" % (total_minutes / 60, total_minutes % 60) - - -def split_str_hours(str_hour): - str_splitted = str_hour.split(':') - if len(str_splitted) == 2: - return int(str_splitted[0]), int(str_splitted[1]) - raise Exception('Incorrect input. Usage: \'HH:MM\'') diff --git a/autosubmit/experiment/experiment_common.py b/autosubmit/experiment/experiment_common.py index 38bc38a62..43e78191f 100644 --- a/autosubmit/experiment/experiment_common.py +++ b/autosubmit/experiment/experiment_common.py @@ -22,7 +22,7 @@ Module containing functions to manage autosubmit's experiments. """ import string import autosubmit.database.db_common as db_common -from autosubmit.config.log import Log +from bscearth.utils.log import Log def new_experiment(description, version, test=False, operational=False): diff --git a/autosubmit/git/autosubmit_git.py b/autosubmit/git/autosubmit_git.py index 323d1cbb6..283644fc4 100644 --- a/autosubmit/git/autosubmit_git.py +++ b/autosubmit/git/autosubmit_git.py @@ -24,7 +24,7 @@ import subprocess import shutil from autosubmit.config.basicConfig import BasicConfig -from autosubmit.config.log import Log +from bscearth.utils.log import Log class AutosubmitGit: diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 0a04f360a..2de13b76a 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -27,7 +27,7 @@ import time from autosubmit.job.job_common import Status, Type from autosubmit.job.job_common import StatisticsSnippetBash, StatisticsSnippetPython, StatisticsSnippetR from autosubmit.config.basicConfig import BasicConfig -from autosubmit.date.chunk_date_lib import * +from bscearth.utils.date import * class Job(object): diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py index 5bdae1f69..a04435540 100644 --- a/autosubmit/job/job_list.py +++ b/autosubmit/job/job_list.py @@ -33,8 +33,8 @@ from shutil import move from autosubmit.job.job_common import Status, Type from autosubmit.job.job import Job from autosubmit.job.job_package import JobPackageSimple, JobPackageArray, JobPackageThread -from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import date2str, parse_date, sum_str_hours +from bscearth.utils.log import Log +from bscearth.utils.date import date2str, parse_date, sum_str_hours from networkx import DiGraph from autosubmit.job.job_utils import transitive_reduction diff --git a/autosubmit/job/job_list_persistence.py b/autosubmit/job/job_list_persistence.py index 23008451c..9ff4adee7 100644 --- a/autosubmit/job/job_list_persistence.py +++ b/autosubmit/job/job_list_persistence.py @@ -21,7 +21,7 @@ from sys import setrecursionlimit import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log from autosubmit.database.db_manager import DbManager diff --git a/autosubmit/job/job_package.py b/autosubmit/job/job_package.py index 510c60fd0..9f116dec6 100644 --- a/autosubmit/job/job_package.py +++ b/autosubmit/job/job_package.py @@ -27,10 +27,10 @@ import os import time import random from autosubmit.job.job_common import Status -from autosubmit.config.log import Log +from bscearth.utils.log import Log from autosubmit.job.job_exceptions import WrongTemplateException from autosubmit.job.job import Job -from autosubmit.date.chunk_date_lib import sum_str_hours +from bscearth.utils.date import sum_str_hours class JobPackageBase(object): diff --git a/autosubmit/monitor/monitor.py b/autosubmit/monitor/monitor.py index 27ad74aea..cd480e036 100644 --- a/autosubmit/monitor/monitor.py +++ b/autosubmit/monitor/monitor.py @@ -40,7 +40,7 @@ import subprocess from autosubmit.job.job_common import Status from autosubmit.config.basicConfig import BasicConfig -from autosubmit.config.log import Log +from bscearth.utils.log import Log class Monitor: diff --git a/autosubmit/notifications/mail_notifier.py b/autosubmit/notifications/mail_notifier.py index fb4872579..1e3791b16 100644 --- a/autosubmit/notifications/mail_notifier.py +++ b/autosubmit/notifications/mail_notifier.py @@ -20,7 +20,7 @@ import smtplib import email.utils from email.mime.text import MIMEText -from autosubmit.config.log import Log +from bscearth.utils.log import Log class MailNotifier: diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py index 47875e142..586402add 100644 --- a/autosubmit/platforms/ecplatform.py +++ b/autosubmit/platforms/ecplatform.py @@ -21,7 +21,7 @@ import os import subprocess from autosubmit.platforms.paramiko_platform import ParamikoPlatform, ParamikoPlatformException -from autosubmit.config.log import Log +from bscearth.utils.log import Log class EcPlatform(ParamikoPlatform): diff --git a/autosubmit/platforms/locplatform.py b/autosubmit/platforms/locplatform.py index 6feda7827..222c6ed2f 100644 --- a/autosubmit/platforms/locplatform.py +++ b/autosubmit/platforms/locplatform.py @@ -24,7 +24,7 @@ import subprocess from autosubmit.platforms.paramiko_platform import ParamikoPlatform from autosubmit.config.basicConfig import BasicConfig -from autosubmit.config.log import Log +from bscearth.utils.log import Log class LocalPlatform(ParamikoPlatform): diff --git a/autosubmit/platforms/paramiko_platform.py b/autosubmit/platforms/paramiko_platform.py index 2aee83e0f..308ae1acf 100644 --- a/autosubmit/platforms/paramiko_platform.py +++ b/autosubmit/platforms/paramiko_platform.py @@ -4,11 +4,11 @@ import os import paramiko import datetime -from autosubmit.config.log import Log +from bscearth.utils.log import Log from autosubmit.job.job_common import Status from autosubmit.job.job_common import Type from autosubmit.platforms.platform import Platform -from autosubmit.date.chunk_date_lib import date2str +from bscearth.utils.date import date2str class ParamikoPlatform(Platform): diff --git a/autosubmit/platforms/pbsplatform.py b/autosubmit/platforms/pbsplatform.py index a515b512f..74479c79d 100644 --- a/autosubmit/platforms/pbsplatform.py +++ b/autosubmit/platforms/pbsplatform.py @@ -20,7 +20,7 @@ import textwrap import os from autosubmit.platforms.paramiko_platform import ParamikoPlatform, ParamikoPlatformException -from autosubmit.config.log import Log +from bscearth.utils.log import Log class PBSPlatform(ParamikoPlatform): diff --git a/autosubmit/platforms/platform.py b/autosubmit/platforms/platform.py index 518dcaa0e..3c9bf6501 100644 --- a/autosubmit/platforms/platform.py +++ b/autosubmit/platforms/platform.py @@ -2,7 +2,7 @@ from time import sleep import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log from autosubmit.job.job_common import Status diff --git a/autosubmit/platforms/saga_platform.py b/autosubmit/platforms/saga_platform.py index 0975a22a1..afc0974be 100644 --- a/autosubmit/platforms/saga_platform.py +++ b/autosubmit/platforms/saga_platform.py @@ -5,8 +5,8 @@ from time import sleep import os import saga -from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import date2str +from bscearth.utils.log import Log +from bscearth.utils.date import date2str from autosubmit.job.job_common import Status, Type from autosubmit.platforms.platform import Platform diff --git a/docs/source/codedoc/date.rst b/docs/source/codedoc/date.rst deleted file mode 100644 index 90738c2be..000000000 --- a/docs/source/codedoc/date.rst +++ /dev/null @@ -1,4 +0,0 @@ -autosubmit.date -=============== -.. automodule:: autosubmit.date.chunk_date_lib - :members: diff --git a/setup.py b/setup.py index 5aa452e37..96527c8f3 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,7 @@ setup( keywords=['climate', 'weather', 'workflow', 'HPC'], install_requires=['argparse>=1.2,<2', 'python-dateutil>2', 'pydotplus>=2', 'pyparsing>=2.0.1', 'numpy', 'matplotlib', 'saga-python>=0.40', 'paramiko==1.15', - 'mock>=1.3.0', 'portalocker>=0.5.7', 'networkx'], + 'mock>=1.3.0', 'portalocker>=0.5.7', 'networkx', 'bscearth.utils'], extras_require={ 'dialog': ["python2-pythondialog>=3.3.0"] }, @@ -58,9 +58,5 @@ setup( 'docs/autosubmit.pdf' ] }, - scripts=['bin/autosubmit'], # data_files = [ - # ('', ['VERSION']), - # ('conf', ['lib/autosubmit/config/files/autosubmit.conf','lib/autosubmit/config/files/expdef.conf']), - # ('data', ['lib/autosubmit/database/data/autosubmit.sql']) # ] #entry_points = { - # 'console_scripts' : ['check_exp = bin/check_exp.py'] # 'gui_scripts' : ['monitor = monitor.py'] # } + scripts=['bin/autosubmit'] ) diff --git a/test/unit/test_chunk_date_lib.py b/test/unit/test_chunk_date_lib.py index 63ddfd9f1..af5101e0b 100644 --- a/test/unit/test_chunk_date_lib.py +++ b/test/unit/test_chunk_date_lib.py @@ -1,5 +1,5 @@ from unittest import TestCase -from autosubmit.date.chunk_date_lib import * +from bscearth.utils.date import * from datetime import datetime -- GitLab From b39afd5bb0d04fb79625f99e70d72a0b7ce9afd5 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Mon, 13 Feb 2017 18:19:18 +0100 Subject: [PATCH 29/60] Config parser replaced by bscearth.utils --- autosubmit/autosubmit.py | 6 +- autosubmit/config/config_common.py | 317 +++++---------------- autosubmit/config/parser_factory.py | 34 --- autosubmit/platforms/paramiko_submitter.py | 60 ++-- autosubmit/platforms/saga_submitter.py | 63 ++-- test/unit/test_autosubmit_ config.py | 170 +---------- test/unit/test_dic_jobs.py | 2 +- test/unit/test_job_list.py | 2 +- 8 files changed, 135 insertions(+), 519 deletions(-) delete mode 100644 autosubmit/config/parser_factory.py diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index bd105d779..de81a87bc 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -61,7 +61,7 @@ import saga from config.basicConfig import BasicConfig # noinspection PyPackageRequirements from config.config_common import AutosubmitConfig -from config.parser_factory import ConfigParserFactory +from bscearth.utils.config_parser import ConfigParserFactory from job.job_common import Status from git.autosubmit_git import AutosubmitGit from job.job_list import JobList @@ -2168,7 +2168,7 @@ class Autosubmit: def _change_conf(testid, hpc, start_date, member, chunks, branch, random_select=False): as_conf = AutosubmitConfig(testid, BasicConfig, ConfigParserFactory()) exp_parser = as_conf.get_parser(ConfigParserFactory(), as_conf.experiment_file) - if AutosubmitConfig.get_bool_option(exp_parser, 'rerun', "RERUN", True): + if exp_parser.get_bool_option('rerun', "RERUN", True): Log.error('Can not test a RERUN experiment') return False @@ -2178,7 +2178,7 @@ class Autosubmit: platforms_parser = as_conf.get_parser(ConfigParserFactory(), as_conf.platforms_file) test_platforms = list() for section in platforms_parser.sections(): - if AutosubmitConfig.get_option(platforms_parser, section, 'TEST_SUITE', 'false').lower() == 'true': + if platforms_parser.get_option(section, 'TEST_SUITE', 'false').lower() == 'true': test_platforms.append(section) if len(test_platforms) == 0: Log.critical('No test HPC defined') diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py index 0332788ea..6bb1e2cfd 100644 --- a/autosubmit/config/config_common.py +++ b/autosubmit/config/config_common.py @@ -142,7 +142,7 @@ class AutosubmitConfig(object): :return: wallclock time :rtype: str """ - return AutosubmitConfig.get_option(self._jobs_parser, section, 'WALLCLOCK', '') + return self._jobs_parser.get_option(section, 'WALLCLOCK', '') def get_processors(self, section): """ @@ -152,7 +152,7 @@ class AutosubmitConfig(object): :return: wallclock time :rtype: str """ - return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'PROCESSORS', 1)) + return str(self._jobs_parser.get_option(section, 'PROCESSORS', 1)) def get_threads(self, section): """ @@ -162,7 +162,7 @@ class AutosubmitConfig(object): :return: threads needed :rtype: str """ - return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'THREADS', 1)) + return int(self._jobs_parser.get_option(section, 'THREADS', 1)) def get_tasks(self, section): """ @@ -172,7 +172,7 @@ class AutosubmitConfig(object): :return: tasks (processes) per host :rtype: int """ - return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'TASKS', 0)) + return int(self._jobs_parser.get_option(section, 'TASKS', 0)) def get_scratch_free_space(self, section): """ @@ -182,7 +182,7 @@ class AutosubmitConfig(object): :return: percentage of scratch free space needed :rtype: int """ - return int(AutosubmitConfig.get_option(self._jobs_parser, section, 'SCRATCH_FREE_SPACE', 0)) + return int(self._jobs_parser.get_option(section, 'SCRATCH_FREE_SPACE', 0)) def get_memory(self, section): """ @@ -192,7 +192,7 @@ class AutosubmitConfig(object): :return: memory needed :rtype: str """ - return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'MEMORY', '')) + return str(self._jobs_parser.get_option(section, 'MEMORY', '')) def get_memory_per_task(self, section): """ @@ -202,7 +202,7 @@ class AutosubmitConfig(object): :return: memory per task needed :rtype: str """ - return str(AutosubmitConfig.get_option(self._jobs_parser, section, 'MEMORY_PER_TASK', '')) + return str(self._jobs_parser.get_option(section, 'MEMORY_PER_TASK', '')) def check_conf_files(self): """ @@ -234,12 +234,12 @@ class AutosubmitConfig(object): result = True self._conf_parser.read(self._conf_parser_file) - result = result and AutosubmitConfig.check_exists(self._conf_parser, 'config', 'AUTOSUBMIT_VERSION') - result = result and AutosubmitConfig.check_is_int(self._conf_parser, 'config', 'MAXWAITINGJOBS', True) - result = result and AutosubmitConfig.check_is_int(self._conf_parser, 'config', 'TOTALJOBS', True) - result = result and AutosubmitConfig.check_is_int(self._conf_parser, 'config', 'SAFETYSLEEPTIME', True) - result = result and AutosubmitConfig.check_is_int(self._conf_parser, 'config', 'RETRIALS', True) - result = result and AutosubmitConfig.check_is_boolean(self._conf_parser, 'mail', 'NOTIFICATIONS', False) + result = result and self._conf_parser.check_exists('config', 'AUTOSUBMIT_VERSION') + result = result and self._conf_parser.check_is_int('config', 'MAXWAITINGJOBS', True) + result = result and self._conf_parser.check_is_int('config', 'TOTALJOBS', True) + result = result and self._conf_parser.check_is_int('config', 'SAFETYSLEEPTIME', True) + result = result and self._conf_parser.check_is_int('config', 'RETRIALS', True) + result = result and self._conf_parser.check_is_boolean('mail', 'NOTIFICATIONS', False) result = result and self.is_valid_communications_library() result = result and self.is_valid_storage_type() @@ -270,20 +270,20 @@ class AutosubmitConfig(object): Log.error('There are repeated platforms names') for section in self._platforms_parser.sections(): - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'TYPE') - platform_type = AutosubmitConfig.get_option(self._platforms_parser, section, 'TYPE', '').lower() + result = result and self._platforms_parser.check_exists(section, 'TYPE') + platform_type = self._platforms_parser.get_option(section, 'TYPE', '').lower() if platform_type != 'ps': - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'PROJECT') - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'USER') - - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'HOST') - result = result and AutosubmitConfig.check_exists(self._platforms_parser, section, 'SCRATCH_DIR') - result = result and AutosubmitConfig.check_is_boolean(self._platforms_parser, section, - 'ADD_PROJECT_TO_HOST', False) - result = result and AutosubmitConfig.check_is_boolean(self._platforms_parser, section, 'TEST_SUITE', False) - result = result and AutosubmitConfig.check_is_int(self._platforms_parser, section, 'MAX_WAITING_JOBS', - False) - result = result and AutosubmitConfig.check_is_int(self._platforms_parser, section, 'TOTAL_JOBS', False) + result = result and self._platforms_parser.check_exists(section, 'PROJECT') + result = result and self._platforms_parser.check_exists(section, 'USER') + + result = result and self._platforms_parser.check_exists(section, 'HOST') + result = result and self._platforms_parser.check_exists(section, 'SCRATCH_DIR') + result = result and self._platforms_parser.check_is_boolean(section, + 'ADD_PROJECT_TO_HOST', False) + result = result and self._platforms_parser.check_is_boolean(section, 'TEST_SUITE', False) + result = result and self._platforms_parser.check_is_int(section, 'MAX_WAITING_JOBS', + False) + result = result and self._platforms_parser.check_is_int(section, 'TOTAL_JOBS', False) if not result: Log.critical("{0} is not a valid config file".format(os.path.basename(self._platforms_parser_file))) @@ -310,14 +310,14 @@ class AutosubmitConfig(object): Log.error('There are repeated job names') for section in sections: - result = result and AutosubmitConfig.check_exists(parser, section, 'FILE') - result = result and AutosubmitConfig.check_is_boolean(parser, section, 'RERUN_ONLY', False) + result = result and parser.check_exists(section, 'FILE') + result = result and parser.check_is_boolean(section, 'RERUN_ONLY', False) if parser.has_option(section, 'PLATFORM'): - result = result and AutosubmitConfig.check_is_choice(parser, section, 'PLATFORM', False, platforms) + result = result and parser.check_is_choice(section, 'PLATFORM', False, platforms) if parser.has_option(section, 'DEPENDENCIES'): - for dependency in str(AutosubmitConfig.get_option(parser, section, 'DEPENDENCIES', '')).split(' '): + for dependency in str(parser.get_option(section, 'DEPENDENCIES', '')).split(' '): if '-' in dependency: dependency = dependency.split('-')[0] elif '+' in dependency: @@ -328,15 +328,15 @@ class AutosubmitConfig(object): dependency)) if parser.has_option(section, 'RERUN_DEPENDENCIES'): - for dependency in str(AutosubmitConfig.get_option(parser, section, 'RERUN_DEPENDENCIES', - '')).split(' '): + for dependency in str(parser.get_option(section, 'RERUN_DEPENDENCIES', + '')).split(' '): if '-' in dependency: dependency = dependency.split('-')[0] if dependency not in sections: Log.error( 'Job {0} depends on job {1} that is not defined. It will be ignored.'.format(section, dependency)) - result = result and AutosubmitConfig.check_is_choice(parser, section, 'RUNNING', False, + result = result and parser.check_is_choice(section, 'RUNNING', False, ['once', 'date', 'member', 'chunk']) if not result: @@ -356,35 +356,35 @@ class AutosubmitConfig(object): result = True parser = self._exp_parser - result = result and AutosubmitConfig.check_exists(parser, 'DEFAULT', 'EXPID') - result = result and AutosubmitConfig.check_exists(parser, 'DEFAULT', 'HPCARCH') + result = result and parser.check_exists('DEFAULT', 'EXPID') + result = result and parser.check_exists('DEFAULT', 'HPCARCH') - result = result and AutosubmitConfig.check_exists(parser, 'experiment', 'DATELIST') - result = result and AutosubmitConfig.check_exists(parser, 'experiment', 'MEMBERS') - result = result and AutosubmitConfig.check_is_choice(parser, 'experiment', 'CHUNKSIZEUNIT', True, - ['year', 'month', 'day', 'hour']) - result = result and AutosubmitConfig.check_is_int(parser, 'experiment', 'CHUNKSIZE', True) - result = result and AutosubmitConfig.check_is_int(parser, 'experiment', 'NUMCHUNKS', True) - result = result and AutosubmitConfig.check_is_choice(parser, 'experiment', 'CALENDAR', True, - ['standard', 'noleap']) + result = result and parser.check_exists('experiment', 'DATELIST') + result = result and parser.check_exists('experiment', 'MEMBERS') + result = result and parser.check_is_choice('experiment', 'CHUNKSIZEUNIT', True, + ['year', 'month', 'day', 'hour']) + result = result and parser.check_is_int('experiment', 'CHUNKSIZE', True) + result = result and parser.check_is_int('experiment', 'NUMCHUNKS', True) + result = result and parser.check_is_choice('experiment', 'CALENDAR', True, + ['standard', 'noleap']) - result = result and AutosubmitConfig.check_is_boolean(parser, 'rerun', 'RERUN', True) + result = result and parser.check_is_boolean('rerun', 'RERUN', True) - if AutosubmitConfig.check_is_choice(parser, 'project', 'PROJECT_TYPE', True, - ['none', 'git', 'svn', 'local']): - project_type = AutosubmitConfig.get_option(parser, 'project', 'PROJECT_TYPE', '') + if parser.check_is_choice('project', 'PROJECT_TYPE', True, + ['none', 'git', 'svn', 'local']): + project_type = parser.get_option('project', 'PROJECT_TYPE', '') if project_type == 'git': - result = result and AutosubmitConfig.check_exists(parser, 'git', 'PROJECT_ORIGIN') - result = result and AutosubmitConfig.check_exists(parser, 'git', 'PROJECT_BRANCH') + result = result and parser.check_exists('git', 'PROJECT_ORIGIN') + result = result and parser.check_exists('git', 'PROJECT_BRANCH') elif project_type == 'svn': - result = result and AutosubmitConfig.check_exists(parser, 'svn', 'PROJECT_URL') - result = result and AutosubmitConfig.check_exists(parser, 'svn', 'PROJECT_REVISION') + result = result and parser.check_exists('svn', 'PROJECT_URL') + result = result and parser.check_exists('svn', 'PROJECT_REVISION') elif project_type == 'local': - result = result and AutosubmitConfig.check_exists(parser, 'local', 'PROJECT_PATH') + result = result and parser.check_exists('local', 'PROJECT_PATH') if project_type != 'none': - result = result and AutosubmitConfig.check_exists(parser, 'project_files', 'FILE_PROJECT_CONF') + result = result and parser.check_exists('project_files', 'FILE_PROJECT_CONF') else: result = False @@ -510,7 +510,7 @@ class AutosubmitConfig(object): :return: path to project config file :rtype: str """ - return AutosubmitConfig.get_option(self._exp_parser, 'project_files', 'FILE_JOBS_CONF', '') + return self._exp_parser.get_option('project_files', 'FILE_JOBS_CONF', '') def get_git_project_origin(self): """ @@ -519,7 +519,7 @@ class AutosubmitConfig(object): :return: git origin :rtype: str """ - return AutosubmitConfig.get_option(self._exp_parser, 'git', 'PROJECT_ORIGIN', '') + return self._exp_parser.get_option('git', 'PROJECT_ORIGIN', '') def get_git_project_branch(self): """ @@ -528,7 +528,7 @@ class AutosubmitConfig(object): :return: git branch :rtype: str """ - return AutosubmitConfig.get_option(self._exp_parser, 'git', 'PROJECT_BRANCH', None) + return self._exp_parser.get_option('git', 'PROJECT_BRANCH', None) def get_git_project_commit(self): """ @@ -537,7 +537,7 @@ class AutosubmitConfig(object): :return: git commit :rtype: str """ - return AutosubmitConfig.get_option(self._exp_parser, 'git', 'PROJECT_COMMIT', None) + return self._exp_parser.get_option('git', 'PROJECT_COMMIT', None) def get_project_destination(self): """ @@ -667,7 +667,7 @@ class AutosubmitConfig(object): :return: initial chunk :rtype: int """ - chunk_ini = self.get_option(self._exp_parser, 'experiment', 'CHUNKINI', default) + chunk_ini = self._exp_parser.get_option('experiment', 'CHUNKINI', default) if chunk_ini == '': return default return int(chunk_ini) @@ -780,7 +780,7 @@ class AutosubmitConfig(object): :rtype: str """ - return self.get_option(self._conf_parser, 'config', 'MAX_WALLCLOCK', '') + return self._conf_parser.get_option('config', 'MAX_WALLCLOCK', '') def get_max_waiting_jobs(self): """ @@ -798,7 +798,7 @@ class AutosubmitConfig(object): :return: default type such as bash, python, r.. :rtype: str """ - return self.get_option(self._exp_parser, 'project_files', 'JOB_SCRIPTS_TYPE', 'bash') + return self._exp_parser.get_option('project_files', 'JOB_SCRIPTS_TYPE', 'bash') def get_safetysleeptime(self): """ @@ -837,7 +837,7 @@ class AutosubmitConfig(object): :return: if notifications :rtype: bool """ - return self.get_option(self._conf_parser, 'mail', 'NOTIFICATIONS', 'false').lower() + return self._conf_parser.get_option('mail', 'NOTIFICATIONS', 'false').lower() def get_copy_remote_logs(self): """ @@ -846,7 +846,7 @@ class AutosubmitConfig(object): :return: if logs local copy :rtype: bool """ - return self.get_option(self._conf_parser, 'storage', 'COPY_REMOTE_LOGS', 'true').lower() + return self._conf_parser.get_option('storage', 'COPY_REMOTE_LOGS', 'true').lower() def get_mails_to(self): """ @@ -855,7 +855,7 @@ class AutosubmitConfig(object): :return: mail address :rtype: [str] """ - return [str(x) for x in self.get_option(self._conf_parser, 'mail', 'TO', '').split(' ')] + return [str(x) for x in self._conf_parser.get_option('mail', 'TO', '').split(' ')] def get_communications_library(self): """ @@ -864,7 +864,7 @@ class AutosubmitConfig(object): :return: communications library :rtype: str """ - return self.get_option(self._conf_parser, 'communications', 'API', 'paramiko').lower() + return self._conf_parser.get_option('communications', 'API', 'paramiko').lower() def get_storage_type(self): """ @@ -873,7 +873,7 @@ class AutosubmitConfig(object): :return: communications library :rtype: str """ - return self.get_option(self._conf_parser, 'storage', 'TYPE', 'pkl').lower() + return self._conf_parser.get_option('storage', 'TYPE', 'pkl').lower() @staticmethod def is_valid_mail_address(mail_address): @@ -891,7 +891,7 @@ class AutosubmitConfig(object): return storage_type in ['pkl', 'db'] def is_valid_git_repository(self): - origin_exists = self.check_exists(self._exp_parser, 'git', 'PROJECT_ORIGIN') + origin_exists = self._exp_parser.check_exists('git', 'PROJECT_ORIGIN') branch = self.get_git_project_branch() commit = self.get_git_project_commit() return origin_exists and (branch is not None or commit is not None) @@ -911,188 +911,3 @@ class AutosubmitConfig(object): parser.optionxform = str parser.read(file_path) return parser - - @staticmethod - def get_option(parser, section, option, default): - """ - Gets an option from given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: object - :return: option value - :rtype: str - """ - if parser.has_option(section, option): - return parser.get(section, option) - else: - return default - - @staticmethod - def get_bool_option(parser, section, option, default): - """ - Gets a boolean option from given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: bool - :return: option value - :rtype: bool - """ - if parser.has_option(section, option): - return parser.get(section, option).lower().strip() == 'true' - else: - return default - - @staticmethod - def check_exists(parser, section, option): - """ - Checks if an option exists in given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :return: True if option exists, False otherwise - :rtype: bool - """ - if parser.has_option(section, option): - return True - else: - Log.error('Option {0} in section {1} not found'.format(option, section)) - return False - - @staticmethod - def check_is_boolean(parser, section, option, must_exist): - """ - Checks if an option is a boolean value in given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :return: True if option value is boolean, False otherwise - :rtype: bool - """ - if must_exist and not AutosubmitConfig.check_exists(parser, section, option): - Log.error('Option {0} in section {1} must exist'.format(option, section)) - return False - if AutosubmitConfig.get_option(parser, section, option, 'false').lower() not in ['false', 'true']: - Log.error('Option {0} in section {1} must be true or false'.format(option, section)) - return False - return True - - @staticmethod - def check_is_choice(parser, section, option, must_exist, choices): - """ - Checks if an option is a valid choice in given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :param choices: valid choices - :type choices: list - :return: True if option value is a valid choice, False otherwise - :rtype: bool - """ - if must_exist and not AutosubmitConfig.check_exists(parser, section, option): - return False - value = AutosubmitConfig.get_option(parser, section, option, choices[0]) - if value not in choices: - Log.error('Value {2} in option {0} in section {1} is not a valid choice'.format(option, section, value)) - return False - return True - - @staticmethod - def check_is_int(parser, section, option, must_exist): - """ - Checks if an option is an integer value in given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :return: True if option value is integer, False otherwise - :rtype: bool - """ - if must_exist and not AutosubmitConfig.check_exists(parser, section, option): - return False - value = AutosubmitConfig.get_option(parser, section, option, '1') - try: - int(value) - except ValueError: - Log.error('Option {0} in section {1} is not valid an integer'.format(option, section)) - return False - return True - - @staticmethod - def check_regex(parser, section, option, must_exist, regex): - """ - Checks if an option complies with a regular expression in given parser - - :param parser: parser to use - :type parser: SafeConfigParser - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :param regex: regular expression to check - :type regex: str - :return: True if option complies with regex, False otherwise - :rtype: bool - """ - if must_exist and not AutosubmitConfig.check_exists(parser, section, option): - return False - prog = re.compile(regex) - value = AutosubmitConfig.get_option(parser, section, option, '1') - if not prog.match(value): - Log.error('Option {0} in section {1} is not valid: {2}'.format(option, section, value)) - return False - return True - - @staticmethod - def check_json(key, value): - """ - Checks if value is a valid json - - :param key: key to check - :type key: str - :param value: value - :type value: str - :return: True if value is a valid json, False otherwise - :rtype: bool - """ - # noinspection PyBroadException - try: - nestedExpr('[', ']').parseString(value).asList() - return True - except: - Log.error("Invalid value {0}: {1}", key, value) - return False diff --git a/autosubmit/config/parser_factory.py b/autosubmit/config/parser_factory.py deleted file mode 100644 index 35841cd10..000000000 --- a/autosubmit/config/parser_factory.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 Earth Sciences Department, BSC-CNS - -# This file is part of Autosubmit. - -# Autosubmit is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# Autosubmit is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with Autosubmit. If not, see . - -try: - # noinspection PyCompatibility - from configparser import SafeConfigParser -except ImportError: - # noinspection PyCompatibility - from ConfigParser import SafeConfigParser - - -class ConfigParserFactory: - - def __init__(self): - pass - - def create_parser(self): - return SafeConfigParser() diff --git a/autosubmit/platforms/paramiko_submitter.py b/autosubmit/platforms/paramiko_submitter.py index 1281f1731..8e4d95bc2 100644 --- a/autosubmit/platforms/paramiko_submitter.py +++ b/autosubmit/platforms/paramiko_submitter.py @@ -56,7 +56,7 @@ class ParamikoSubmitter(Submitter): job_parser = asconf.jobs_parser for job in job_parser.sections(): - hpc = AutosubmitConfig.get_option(job_parser, job, 'PLATFORM', hpcarch).lower() + hpc = job_parser.get_option(job, 'PLATFORM', hpcarch).lower() if hpc not in platforms_used: platforms_used.append(hpc) @@ -78,8 +78,8 @@ class ParamikoSubmitter(Submitter): if section.lower() not in platforms_used: continue - platform_type = AutosubmitConfig.get_option(parser, section, 'TYPE', '').lower() - platform_version = AutosubmitConfig.get_option(parser, section, 'VERSION', '') + platform_type = parser.get_option(section, 'TYPE', '').lower() + platform_version = parser.get_option(section, 'VERSION', '') try: if platform_type == 'pbs': remote_platform = PBSPlatform(asconf.expid, section.lower(), BasicConfig, platform_version) @@ -103,33 +103,33 @@ class ParamikoSubmitter(Submitter): remote_platform.type = platform_type remote_platform._version = platform_version - if AutosubmitConfig.get_option(parser, section, 'ADD_PROJECT_TO_HOST', '').lower() == 'true': - host = '{0}-{1}'.format(AutosubmitConfig.get_option(parser, section, 'HOST', None), - AutosubmitConfig.get_option(parser, section, 'PROJECT', None)) + if parser.get_option(section, 'ADD_PROJECT_TO_HOST', '').lower() == 'true': + host = '{0}-{1}'.format(parser.get_option(section, 'HOST', None), + parser.get_option(section, 'PROJECT', None)) else: - host = AutosubmitConfig.get_option(parser, section, 'HOST', None) + host = parser.get_option(section, 'HOST', None) remote_platform.host = host - remote_platform.max_wallclock = AutosubmitConfig.get_option(parser, section, 'MAX_WALLCLOCK', - asconf.get_max_wallclock()) - remote_platform.max_waiting_jobs = int(AutosubmitConfig.get_option(parser, section, 'MAX_WAITING_JOBS', - asconf.get_max_waiting_jobs())) - remote_platform.total_jobs = int(AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS', - asconf.get_total_jobs())) - remote_platform.hyperthreading = AutosubmitConfig.get_option(parser, section, 'HYPERTHREADING', - 'false').lower() - remote_platform.project = AutosubmitConfig.get_option(parser, section, 'PROJECT', None) - remote_platform.budget = AutosubmitConfig.get_option(parser, section, 'BUDGET', remote_platform.project) - remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') - remote_platform.exclusivity = AutosubmitConfig.get_option(parser, section, 'EXCLUSIVITY', '').lower() - remote_platform.user = AutosubmitConfig.get_option(parser, section, 'USER', None) - remote_platform.scratch = AutosubmitConfig.get_option(parser, section, 'SCRATCH_DIR', None) - remote_platform._default_queue = AutosubmitConfig.get_option(parser, section, 'QUEUE', None) - remote_platform._serial_queue = AutosubmitConfig.get_option(parser, section, 'SERIAL_QUEUE', None) - remote_platform.processors_per_node = AutosubmitConfig.get_option(parser, section, 'PROCESSORS_PER_NODE', - None) - remote_platform.scratch_free_space = AutosubmitConfig.get_option(parser, section, 'SCRATCH_FREE_SPACE', - None) + remote_platform.max_wallclock = parser.get_option(section, 'MAX_WALLCLOCK', + asconf.get_max_wallclock()) + remote_platform.max_waiting_jobs = int(parser.get_option(section, 'MAX_WAITING_JOBS', + asconf.get_max_waiting_jobs())) + remote_platform.total_jobs = int(parser.get_option(section, 'TOTAL_JOBS', + asconf.get_total_jobs())) + remote_platform.hyperthreading = parser.get_option(section, 'HYPERTHREADING', + 'false').lower() + remote_platform.project = parser.get_option(section, 'PROJECT', None) + remote_platform.budget = parser.get_option(section, 'BUDGET', remote_platform.project) + remote_platform.reservation = parser.get_option(section, 'RESERVATION', '') + remote_platform.exclusivity = parser.get_option(section, 'EXCLUSIVITY', '').lower() + remote_platform.user = parser.get_option(section, 'USER', None) + remote_platform.scratch = parser.get_option(section, 'SCRATCH_DIR', None) + remote_platform._default_queue = parser.get_option(section, 'QUEUE', None) + remote_platform._serial_queue = parser.get_option(section, 'SERIAL_QUEUE', None) + remote_platform.processors_per_node = parser.get_option(section, 'PROCESSORS_PER_NODE', + None) + remote_platform.scratch_free_space = parser.get_option(section, 'SCRATCH_FREE_SPACE', + None) remote_platform.root_dir = os.path.join(remote_platform.scratch, remote_platform.project, remote_platform.user, remote_platform.expid) remote_platform.update_cmds() @@ -137,8 +137,8 @@ class ParamikoSubmitter(Submitter): for section in parser.sections(): if parser.has_option(section, 'SERIAL_PLATFORM'): - platforms[section.lower()].serial_platform = platforms[AutosubmitConfig.get_option(parser, section, - 'SERIAL_PLATFORM', - None).lower()] + platforms[section.lower()].serial_platform = platforms[parser.get_option(section, + 'SERIAL_PLATFORM', + None).lower()] self.platforms = platforms diff --git a/autosubmit/platforms/saga_submitter.py b/autosubmit/platforms/saga_submitter.py index 14f95cd08..cec9b32ce 100644 --- a/autosubmit/platforms/saga_submitter.py +++ b/autosubmit/platforms/saga_submitter.py @@ -33,6 +33,7 @@ class SagaSubmitter(Submitter): """ Class to manage the experiments platform """ + def load_platforms(self, asconf, retries=10): """ Create all the platforms object that will be used by the experiment @@ -61,7 +62,7 @@ class SagaSubmitter(Submitter): job_parser = asconf.jobs_parser for job in job_parser.sections(): - hpc = AutosubmitConfig.get_option(job_parser, job, 'PLATFORM', hpcarch).lower() + hpc = job_parser.get_option(job, 'PLATFORM', hpcarch).lower() if hpc not in platforms_used: platforms_used.append(hpc) @@ -102,12 +103,12 @@ class SagaSubmitter(Submitter): if section.lower() not in platforms_used: continue - platform_type = AutosubmitConfig.get_option(parser, section, 'TYPE', '').lower() + platform_type = parser.get_option(section, 'TYPE', '').lower() remote_platform = SagaPlatform(asconf.expid, section.lower(), BasicConfig) remote_platform.type = platform_type - platform_version = AutosubmitConfig.get_option(parser, section, 'VERSION', '') + platform_version = parser.get_option(section, 'VERSION', '') if platform_type == 'pbs': adaptor = 'pbs+ssh' elif platform_type == 'sge': @@ -121,7 +122,7 @@ class SagaSubmitter(Submitter): adaptor = 'lsf+ssh' elif platform_type == 'ecaccess': adaptor = 'ecaccess' - remote_platform.scheduler = AutosubmitConfig.get_option(parser, section, 'SCHEDULER', 'pbs').lower() + remote_platform.scheduler = parser.get_option(section, 'SCHEDULER', 'pbs').lower() elif platform_type == 'slurm': adaptor = 'slurm+ssh' elif platform_type == '': @@ -129,15 +130,15 @@ class SagaSubmitter(Submitter): else: adaptor = platform_type - if AutosubmitConfig.get_option(parser, section, 'ADD_PROJECT_TO_HOST', '').lower() == 'true': - host = '{0}-{1}'.format(AutosubmitConfig.get_option(parser, section, 'HOST', None), - AutosubmitConfig.get_option(parser, section, 'PROJECT', None)) + if parser.get_option(section, 'ADD_PROJECT_TO_HOST', '').lower() == 'true': + host = '{0}-{1}'.format(parser.get_option(section, 'HOST', None), + parser.get_option(section, 'PROJECT', None)) else: - host = AutosubmitConfig.get_option(parser, section, 'HOST', None) + host = parser.get_option(section, 'HOST', None) if adaptor.endswith('ssh'): ctx = saga.Context('ssh') - ctx.user_id = AutosubmitConfig.get_option(parser, section, 'USER', None) + ctx.user_id = parser.get_option(section, 'USER', None) session = saga.Session(False) session.add_context(ctx) else: @@ -161,34 +162,32 @@ class SagaSubmitter(Submitter): remote_platform.service._adaptor.host = remote_platform.host # noinspection PyProtectedMember remote_platform.service._adaptor.scheduler = remote_platform.scheduler - remote_platform.max_wallclock = AutosubmitConfig.get_option(parser, section, 'MAX_WALLCLOCK', - asconf.get_max_wallclock()) - remote_platform.max_waiting_jobs = int(AutosubmitConfig.get_option(parser, section, 'MAX_WAITING_JOBS', - asconf.get_max_waiting_jobs())) - remote_platform.total_jobs = int(AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS', - asconf.get_total_jobs())) - remote_platform.project = AutosubmitConfig.get_option(parser, section, 'PROJECT', None) - remote_platform.budget = AutosubmitConfig.get_option(parser, section, 'BUDGET', remote_platform.project) - remote_platform.reservation = AutosubmitConfig.get_option(parser, section, 'RESERVATION', '') - remote_platform.exclusivity = AutosubmitConfig.get_option(parser, section, 'EXCLUSIVITY', '').lower() - remote_platform.user = AutosubmitConfig.get_option(parser, section, 'USER', None) - remote_platform.scratch = AutosubmitConfig.get_option(parser, section, 'SCRATCH_DIR', None) - remote_platform._default_queue = AutosubmitConfig.get_option(parser, section, 'QUEUE', None) - remote_platform._serial_queue = AutosubmitConfig.get_option(parser, section, 'SERIAL_QUEUE', None) - remote_platform.processors_per_node = AutosubmitConfig.get_option(parser, section, 'PROCESSORS_PER_NODE', - None) - remote_platform.scratch_free_space = AutosubmitConfig.get_option(parser, section, 'SCRATCH_FREE_SPACE', - None) + remote_platform.max_wallclock = parser.get_option(section, 'MAX_WALLCLOCK', + asconf.get_max_wallclock()) + remote_platform.max_waiting_jobs = int(parser.get_option(section, 'MAX_WAITING_JOBS', + asconf.get_max_waiting_jobs())) + remote_platform.total_jobs = int(parser.get_option(section, 'TOTAL_JOBS', + asconf.get_total_jobs())) + remote_platform.project = parser.get_option(section, 'PROJECT', None) + remote_platform.budget = parser.get_option(section, 'BUDGET', remote_platform.project) + remote_platform.reservation = parser.get_option(section, 'RESERVATION', '') + remote_platform.exclusivity = parser.get_option(section, 'EXCLUSIVITY', '').lower() + remote_platform.user = parser.get_option(section, 'USER', None) + remote_platform.scratch = parser.get_option(section, 'SCRATCH_DIR', None) + remote_platform._default_queue = parser.get_option(section, 'QUEUE', None) + remote_platform._serial_queue = parser.get_option(section, 'SERIAL_QUEUE', None) + remote_platform.processors_per_node = parser.get_option(section, 'PROCESSORS_PER_NODE', + None) + remote_platform.scratch_free_space = parser.get_option(section, 'SCRATCH_FREE_SPACE', + None) remote_platform.root_dir = os.path.join(remote_platform.scratch, remote_platform.project, remote_platform.user, remote_platform.expid) platforms[section.lower()] = remote_platform for section in parser.sections(): if parser.has_option(section, 'SERIAL_PLATFORM'): - platforms[section.lower()].serial_platform = platforms[AutosubmitConfig.get_option(parser, section, - 'SERIAL_PLATFORM', - None).lower()] + platforms[section.lower()].serial_platform = platforms[parser.get_option(section, + 'SERIAL_PLATFORM', + None).lower()] self.platforms = platforms - - diff --git a/test/unit/test_autosubmit_ config.py b/test/unit/test_autosubmit_ config.py index 1eb237d67..34d3290a9 100644 --- a/test/unit/test_autosubmit_ config.py +++ b/test/unit/test_autosubmit_ config.py @@ -1,6 +1,6 @@ from unittest import TestCase from autosubmit.config.config_common import AutosubmitConfig -from autosubmit.config.parser_factory import ConfigParserFactory +from bscearth.utils.config_parser import ConfigParserFactory from mock import Mock from mock import patch from mock import mock_open @@ -53,40 +53,6 @@ class TestAutosubmitConfig(TestCase): factory_mock.create_parser.assert_called_with() parser_mock.read.assert_called_with(file_path) - def test_get_option(self): - # arrange - section = 'any-section' - option = 'any-option' - default = 'dummy-default' - option_value = 'dummy-value' - - parser_mock = self._create_parser_mock(True, option_value) - - # act - returned_option = AutosubmitConfig.get_option(parser_mock, section, option, default) - - # assert - parser_mock.has_option.assert_called_once_with(section, option) - self.assertTrue(isinstance(returned_option, str)) - self.assertNotEqual(default, returned_option) - self.assertEqual(option_value, returned_option) - - def test_get_option_case_default(self): - # arrange - section = 'any-section' - option = 'any-option' - default = 'dummy-default' - - parser_mock = self._create_parser_mock(False) - - # act - returned_option = AutosubmitConfig.get_option(parser_mock, section, option, default) - - # assert - parser_mock.has_option.assert_called_once_with(section, option) - self.assertTrue(isinstance(returned_option, str)) - self.assertEqual(default, returned_option) - def test_experiment_file(self): self.assertEqual(self.config.experiment_file, os.path.join(FakeBasicConfig.LOCAL_ROOT_DIR, self.any_expid, "conf", @@ -176,7 +142,7 @@ class TestAutosubmitConfig(TestCase): # arrange parser_mock = self._create_parser_mock(True) # act - exists = AutosubmitConfig.check_exists(parser_mock, self.section, self.option) + exists = parser_mock.check_exists(self.section, self.option) # assert parser_mock.has_option.assert_called_once_with(self.section, self.option) self.assertTrue(exists) @@ -185,7 +151,7 @@ class TestAutosubmitConfig(TestCase): # arrange parser_mock = self._create_parser_mock(False) # act - exists = AutosubmitConfig.check_exists(parser_mock, self.section, self.option) + exists = parser_mock.check_exists(self.section, self.option) # assert parser_mock.has_option.assert_called_once_with(self.section, self.option) self.assertFalse(exists) @@ -281,136 +247,6 @@ class TestAutosubmitConfig(TestCase): for i in range(1, 4): self.assertEquals(project_parameters.get('dummy_key' + str(i)), 'dummy_value' + str(i)) - def test_check_json(self): - # arrange - valid_json = '[it_is_a_sample", "true]' - invalid_json = '{[[dummy]random}' - - # act - should_be_true = AutosubmitConfig.check_json('random_key', valid_json) - should_be_false = AutosubmitConfig.check_json('random_key', invalid_json) - - # assert - self.assertTrue(should_be_true) - self.assertFalse(should_be_false) - - def test_check_is_int(self): - # arrange - section = 'any-section' - option = 'any-option' - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has_option = Mock(side_effect=[False, True, True, False]) - parser_mock.get = Mock(side_effect=['123', 'dummy']) - - # act - should_be_true = AutosubmitConfig.check_is_int(parser_mock, section, option, False) - should_be_true2 = AutosubmitConfig.check_is_int(parser_mock, section, option, False) - should_be_false = AutosubmitConfig.check_is_int(parser_mock, section, option, False) - should_be_false2 = AutosubmitConfig.check_is_int(parser_mock, section, option, True) - - # assert - self.assertTrue(should_be_true) - self.assertTrue(should_be_true2) - self.assertFalse(should_be_false) - self.assertFalse(should_be_false2) - - def test_check_is_boolean(self): - # arrange - section = 'any-section' - option = 'any-option' - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has_option = Mock(side_effect=[False, True, True, False]) - parser_mock.get = Mock(side_effect=['True', 'dummy']) - - # act - should_be_true = AutosubmitConfig.check_is_boolean(parser_mock, section, option, False) - should_be_true2 = AutosubmitConfig.check_is_boolean(parser_mock, section, option, False) - should_be_false = AutosubmitConfig.check_is_boolean(parser_mock, section, option, False) - should_be_false2 = AutosubmitConfig.check_is_boolean(parser_mock, section, option, True) - - # assert - self.assertTrue(should_be_true) - self.assertTrue(should_be_true2) - self.assertFalse(should_be_false) - self.assertFalse(should_be_false2) - - def test_check_regex(self): - # arrange - section = 'any-section' - option = 'any-option' - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has_option = Mock(side_effect=[False, False, True, True, True, True, False, True, True, True, True]) - parser_mock.get = Mock(side_effect=['dummy-value', '999999', 'dummy-value', 'dummy-value', '999999']) - - # act - # TODO: unexpected logic? - should_be_false = AutosubmitConfig.check_regex(parser_mock, section, option, False, 'dummy-regex') - should_be_true = AutosubmitConfig.check_regex(parser_mock, section, option, False, '[0-9]') - should_be_false2 = AutosubmitConfig.check_regex(parser_mock, section, option, False, 'dummy-regex') - should_be_true2 = AutosubmitConfig.check_regex(parser_mock, section, option, False, '[0-9]*') - - should_be_false3 = AutosubmitConfig.check_regex(parser_mock, section, option, True, 'dummy-regex') - should_be_false4 = AutosubmitConfig.check_regex(parser_mock, section, option, True, 'dummy-regex') - should_be_true3 = AutosubmitConfig.check_regex(parser_mock, section, option, True, '[0-9]*') - - # assert - self.assertFalse(should_be_false) - self.assertFalse(should_be_false2) - self.assertFalse(should_be_false3) - self.assertFalse(should_be_false4) - - self.assertTrue(should_be_true) - self.assertTrue(should_be_true2) - self.assertTrue(should_be_true3) - - def test_check_is_choice(self): - # arrange - section = 'any-section' - option = 'any-option' - choices = ['dummy-choice1', 'dummy-choice2'] - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has_option = Mock(side_effect=[False, True, True, False]) - parser_mock.get = Mock(side_effect=[choices[1], 'not-a-choice']) - - # act - should_be_true = AutosubmitConfig.check_is_choice(parser_mock, section, option, False, choices) - should_be_true2 = AutosubmitConfig.check_is_choice(parser_mock, section, option, False, choices) - should_be_false = AutosubmitConfig.check_is_choice(parser_mock, section, option, False, choices) - should_be_false2 = AutosubmitConfig.check_is_choice(parser_mock, section, option, True, choices) - - # assert - self.assertTrue(should_be_true) - self.assertTrue(should_be_true2) - self.assertFalse(should_be_false) - self.assertFalse(should_be_false2) - - def test_get_bool_option(self): - # arrange - section = 'any-section' - option = 'any-option' - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has_option = Mock(side_effect=[True, True, False, False]) - parser_mock.get = Mock(side_effect=['false', 'true']) - - # act - should_be_false = AutosubmitConfig.get_bool_option(parser_mock, section, option, True) - should_be_true = AutosubmitConfig.get_bool_option(parser_mock, section, option, False) - - should_be_false2 = AutosubmitConfig.get_bool_option(parser_mock, section, option, False) - should_be_true2 = AutosubmitConfig.get_bool_option(parser_mock, section, option, True) - - # assert - self.assertTrue(should_be_true) - self.assertTrue(should_be_true2) - - self.assertFalse(should_be_false) - self.assertFalse(should_be_false2) - def test_get_startdates_list(self): # arrange parser_mock = Mock(spec=SafeConfigParser) diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index c4bf116dd..4e0c8af10 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -3,7 +3,7 @@ from unittest import TestCase from mock import Mock import math -from autosubmit.config.parser_factory import ConfigParserFactory +from bscearth.utils.config_parser import ConfigParserFactory from autosubmit.job.job_common import Status from autosubmit.job.job_common import Type from autosubmit.job.job_list import DicJobs diff --git a/test/unit/test_job_list.py b/test/unit/test_job_list.py index 24f869c7a..dde5834ee 100644 --- a/test/unit/test_job_list.py +++ b/test/unit/test_job_list.py @@ -4,7 +4,7 @@ from unittest import TestCase import os from mock import Mock -from autosubmit.config.parser_factory import ConfigParserFactory +from bscearth.utils.config_parser import ConfigParserFactory from autosubmit.job.job import Job from autosubmit.job.job_common import Status from autosubmit.job.job_list import JobList -- GitLab From a9a4f215bdbf231ab5ecc4e39e9d65ddd8726b41 Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Fri, 17 Feb 2017 13:20:17 +0100 Subject: [PATCH 30/60] Major refactor of monitor / stats. Some bug fixes. Related with #238 --- autosubmit/experiment/statistics.py | 150 +++++++++++++++++++ autosubmit/job/job.py | 36 +++++ autosubmit/monitor/diagram.py | 130 +++++++++++++++++ autosubmit/monitor/monitor.py | 198 ++----------------------- autosubmit/monitor/utils.py | 38 +++++ test/unit/test_autosubmit_ config.py | 206 ++++----------------------- 6 files changed, 397 insertions(+), 361 deletions(-) create mode 100644 autosubmit/experiment/statistics.py create mode 100644 autosubmit/monitor/diagram.py create mode 100644 autosubmit/monitor/utils.py diff --git a/autosubmit/experiment/statistics.py b/autosubmit/experiment/statistics.py new file mode 100644 index 000000000..3ccf6ddb8 --- /dev/null +++ b/autosubmit/experiment/statistics.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python + +# Copyright 2017 Earth Sciences Department, BSC-CNS + +# This file is part of Autosubmit. + +# Autosubmit is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# Autosubmit is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with Autosubmit. If not, see . + +import datetime +from autosubmit.job.job import Job +from autosubmit.monitor.utils import FixedSizeList +from bscearth.utils.log import Log + + +def timedelta2hours(deltatime): + return deltatime.days * 24 + deltatime.seconds / 3600.0 + + +class ExperimentStats(object): + + def __init__(self, jobs_list, start, end): + self._jobs_list = jobs_list + self._start = start + self._end = end + # Max variables + self._max_timedelta = 0 + self._max_time = 0 + self._max_fail = 0 + # Totals variables + self._total_jobs_submitted = 0 + self._total_jobs_run = 0 + self._total_jobs_failed = 0 + self._total_jobs_completed = 0 + self._cpu_consumption = datetime.timedelta() + self._real_consumption = datetime.timedelta() + self._expected_cpu_consumption = 0 + self._expected_real_consumption = 0 + self._threshold = 0 + # Totals arrays + self._totals = [] + self._run = [datetime.timedelta()] * len(jobs_list) + self._queued = [datetime.timedelta()] * len(jobs_list) + self._failed_jobs = [0] * len(jobs_list) + self._fail_queued = [datetime.timedelta()] * len(jobs_list) + self._fail_run = [datetime.timedelta()] * len(jobs_list) + # Do calculations + self._calculate_stats() + self._calculate_maxs() + self._calculate_totals() + self._format_stats() + + @property + def totals(self): + return self._totals + + @property + def max_time(self): + return self._max_time + + @property + def max_fail(self): + return self._max_fail + + @property + def threshold(self): + return self._threshold + + @property + def run(self): + return FixedSizeList(self._run, 0.0) + + @property + def queued(self): + return FixedSizeList(self._queued, 0.0) + + @property + def failed_jobs(self): + return FixedSizeList(self._failed_jobs, 0.0) + + @property + def fail_queued(self): + return FixedSizeList(self._fail_queued, 0.0) + + @property + def fail_run(self): + return FixedSizeList(self._fail_run, 0.0) + + def _calculate_stats(self): + for i, job in enumerate(self._jobs_list): + processors = job.processors + last_retrials = job.get_last_retrials() + for retrial in last_retrials: + if Job.is_a_completed_retrial(retrial): + self._queued[i] += retrial[1] - retrial[0] + self._run[i] += retrial[2] - retrial[1] + self._cpu_consumption += self.run[i] * int(processors) + self._real_consumption += self.run[i] + self._total_jobs_completed += 1 + else: + if len(retrial) > 2: + self._fail_run[i] += retrial[2] - retrial[1] + if len(retrial) > 1: + self._fail_queued[i] += retrial[1] - retrial[0] + self._cpu_consumption += self.fail_run[i] * int(processors) + self._real_consumption += self.fail_run[i] + self._failed_jobs[i] += 1 + self._total_jobs_submitted += len(last_retrials) + self._total_jobs_run += len(last_retrials) + self._total_jobs_failed += self.failed_jobs[i] + self._threshold = max(self._threshold, job.total_wallclock) + self._expected_cpu_consumption += job.total_wallclock * int(job.total_processors) + self._expected_real_consumption += job.total_wallclock + + def _calculate_maxs(self): + max_run = max(max(self._run), max(self._fail_run)) + max_queued = max(max(self._queued), max(self._fail_queued)) + self._max_timedelta = max(max_run, max_queued, datetime.timedelta(hours=self._threshold)) + self._max_time = max(self._max_time, self._max_timedelta.days * 24 + self._max_timedelta.seconds / 3600.0) + self._max_fail = max(self._max_fail, max(self._failed_jobs)) + + def _calculate_totals(self): + percentage_consumption = timedelta2hours(self._cpu_consumption) / self._expected_cpu_consumption * 100 + self._totals = ['Period: ' + str(self._start) + " ~ " + str(self._end), + 'Submitted (#): ' + str(self._total_jobs_submitted), + 'Run (#): ' + str(self._total_jobs_run), + 'Failed (#): ' + str(self._total_jobs_failed), + 'Completed (#): ' + str(self._total_jobs_completed), + 'Expected consumption real (h): ' + str(round(self._expected_real_consumption, 2)), + 'Expected consumption CPU time (h): ' + str(round(self._expected_cpu_consumption, 2)), + 'Consumption real (h): ' + str(round(timedelta2hours(self._real_consumption), 2)), + 'Consumption CPU time (h): ' + str(round(timedelta2hours(self._cpu_consumption), 2)), + 'Consumption (%): ' + str(round(percentage_consumption, 2))] + Log.result('\n'.join(self._totals)) + + def _format_stats(self): + self._queued = map(lambda y: timedelta2hours(y), self._queued) + self._run = map(lambda y: timedelta2hours(y), self._run) + self._fail_queued = map(lambda y: timedelta2hours(y), self._fail_queued) + self._fail_run = map(lambda y: timedelta2hours(y), self._fail_run) diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 2de13b76a..de5b420cb 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -231,6 +231,19 @@ class Job(object): def remote_logs(self, value): self._remote_logs = value + @property + def total_processors(self): + if ':' in self.processors: + return reduce(lambda x, y: int(x) + int(y), self.processors.split(':')) + return self.processors + + @property + def total_wallclock(self): + if self.wallclock: + hours, minutes = self.wallclock.split(':') + return float(minutes) / 60 + float(hours) + return 0 + def log_job(self): """ Prints job information in log @@ -425,6 +438,22 @@ class Job(object): """ return self._get_from_total_stats(1) + def get_last_retrials(self): + log_name = os.path.join(self._tmp_path, self.name + '_TOTAL_STATS') + retrials_list = [] + if os.path.exists(log_name): + already_completed = False + for retrial in reversed(open(log_name).readlines()): + retrial_fields = retrial.split() + if Job.is_a_completed_retrial(retrial_fields): + if already_completed: + break + already_completed = True + retrial_dates = map(lambda y: parse_date(y) if y != 'COMPLETED' and y != 'FAILED' else y, + retrial_fields) + retrials_list.insert(0, retrial_dates) + return retrials_list + def update_status(self, new_status, copy_remote_logs=False): """ Updates job status, checking COMPLETED file if needed @@ -669,6 +698,13 @@ class Job(object): template, snippet.as_tailer()]) + @staticmethod + def is_a_completed_retrial(fields): + if len(fields) == 4: + if fields[3] == 'COMPLETED': + return True + return False + def create_script(self, as_conf): """ Creates script file to be run for the job diff --git a/autosubmit/monitor/diagram.py b/autosubmit/monitor/diagram.py new file mode 100644 index 000000000..696e87c57 --- /dev/null +++ b/autosubmit/monitor/diagram.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python + +# Copyright 2017 Earth Sciences Department, BSC-CNS + +# This file is part of Autosubmit. + +# Autosubmit is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# Autosubmit is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with Autosubmit. If not, see . + +import datetime +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec +import matplotlib.patches as mpatches +from autosubmit.experiment.statistics import ExperimentStats +from autosubmit.job.job_common import Status +from bscearth.utils.log import Log +from autosubmit.job.job import Job + +# Autosubmit stats constants +RATIO = 4 +MAX_JOBS_PER_PLOT = 12.0 + + +def create_bar_diagram(experiment_id, jobs_list, general_stats, output_file, period_ini=None, period_fi=None): + # Error prevention + plt.close('all') + # Stats variables definition + num_plots = int(np.ceil(len(jobs_list) / MAX_JOBS_PER_PLOT)) + ind = np.arange(int(MAX_JOBS_PER_PLOT)) + width = 0.16 + # Creating stats figure + fig = plt.figure(figsize=(RATIO * 4, 3 * RATIO * num_plots)) + fig.suptitle('STATS - ' + experiment_id, fontsize=24, fontweight='bold') + # Variables initialization + ax, ax2 = [], [] + rects = [None] * 6 + exp_stats = ExperimentStats(jobs_list, period_ini, period_fi) + grid_spec = gridspec.GridSpec(RATIO * num_plots + 2, 1) + for plot in range(1, num_plots + 1): + # Calculating jobs inside the given plot + l1 = int((plot - 1) * MAX_JOBS_PER_PLOT) + l2 = int(plot * MAX_JOBS_PER_PLOT) + # Building plot axis + ax.append(fig.add_subplot(grid_spec[RATIO * plot - RATIO + 2:RATIO * plot + 1])) + ax[plot - 1].set_ylabel('hours') + ax[plot - 1].set_xticks(ind + width) + ax[plot - 1].set_xticklabels([job.name for job in jobs_list[l1:l2]], rotation='vertical') + ax[plot - 1].set_title(experiment_id, fontsize=20) + ax[plot - 1].set_ylim(0, float(1.10 * exp_stats.max_time)) + # Axis 2 + ax2.append(ax[plot - 1].twinx()) + ax2[plot - 1].set_ylabel('# failed jobs') + ax2[plot - 1].set_yticks(range(0, exp_stats.max_fail + 2)) + ax2[plot - 1].set_ylim(0, exp_stats.max_fail + 1) + # Building rects + rects[0] = ax[plot - 1].bar(ind, exp_stats.queued[l1:l2], width, color='orchid') + rects[1] = ax[plot - 1].bar(ind + width, exp_stats.run[l1:l2], width, color='limegreen') + rects[2] = ax2[plot - 1].bar(ind + width * 2, exp_stats.failed_jobs[l1:l2], width, color='red') + rects[3] = ax[plot - 1].bar(ind + width * 3, exp_stats.fail_queued[l1:l2], width, color='purple') + rects[4] = ax[plot - 1].bar(ind + width * 4, exp_stats.fail_run[l1:l2], width, color='tomato') + rects[5] = ax[plot - 1].plot([0., width * 6 * MAX_JOBS_PER_PLOT], [exp_stats.threshold, exp_stats.threshold], + "k--", label='wallclock sim') + + # Building legends subplot + legends_plot = fig.add_subplot(grid_spec[0, 0]) + legends_plot.set_frame_on(False) + legends_plot.axes.get_xaxis().set_visible(False) + legends_plot.axes.get_yaxis().set_visible(False) + + # Building legends + build_legends(legends_plot, rects, exp_stats, general_stats) + + # Saving output figure + grid_spec.tight_layout(fig, rect=[0, 0.03, 1, 0.97]) + plt.savefig(output_file) + + +def build_legends(plot, rects, experiment_stats, general_stats): + # Main legend with colourful rectangles + legend_rects = [[rect[0] for rect in rects]] + legend_titles = [ + ['Queued (h)', 'Run (h)', 'Failed jobs (#)', 'Fail Queued (h)', 'Fail Run (h)', 'Max wallclock (h)'] + ] + legend_locs = ["upper right"] + legend_handlelengths = [None] + + # General stats legends, if exists + if len(general_stats) > 0: + legend_rects.append(get_whites_array(len(general_stats))) + legend_titles.append([str(key) + ': ' + str(value) for key, value in general_stats]) + legend_locs.append("upper center") + legend_handlelengths.append(0) + + # Total stats legend + legend_rects.append(get_whites_array(len(experiment_stats.totals))) + legend_titles.append(experiment_stats.totals) + legend_locs.append("upper left") + legend_handlelengths.append(0) + + # Creating the legends + legends = create_legends(plot, legend_rects, legend_titles, legend_locs, legend_handlelengths) + for legend in legends: + plt.gca().add_artist(legend) + + +def create_legends(plot, rects, titles, locs, handlelengths): + legends = [] + for i in xrange(len(rects)): + legends.append(create_legend(plot, rects[i], titles[i], locs[i], handlelengths[i])) + return legends + + +def create_legend(plot, rects, titles, loc, handlelength=None): + return plot.legend(rects, titles, loc=loc, handlelength=handlelength) + + +def get_whites_array(length): + white = mpatches.Rectangle((0, 0), 0, 0, alpha=0.0) + return [white for _ in xrange(length)] diff --git a/autosubmit/monitor/monitor.py b/autosubmit/monitor/monitor.py index cd480e036..d5272a1a1 100644 --- a/autosubmit/monitor/monitor.py +++ b/autosubmit/monitor/monitor.py @@ -26,12 +26,7 @@ from os import listdir from os import remove import pydotplus - - -# These packages produce errors when added to setup. -# noinspection PyPackageRequirements import numpy as np -# noinspection PyPackageRequirements import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import matplotlib.patches as mpatches @@ -40,7 +35,11 @@ import subprocess from autosubmit.job.job_common import Status from autosubmit.config.basicConfig import BasicConfig +from autosubmit.config.config_common import AutosubmitConfig from bscearth.utils.log import Log +from bscearth.utils.config_parser import ConfigParserFactory + +from diagram import create_bar_diagram class Monitor: @@ -213,7 +212,7 @@ class Monitor: output_date = time.strftime("%Y%m%d_%H%M", now) output_file = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "plot", expid + "_statistics_" + output_date + "." + output_format) - self.create_bar_diagram(expid, joblist, output_file, period_ini, period_fi) + create_bar_diagram(expid, joblist, self.get_general_stats(expid), output_file, period_ini, period_fi) Log.result('Stats created at {0}', output_file) if show: try: @@ -221,183 +220,6 @@ class Monitor: except subprocess.CalledProcessError: Log.error('File {0} could not be opened', output_file) - @staticmethod - def create_bar_diagram(expid, joblist, output_file, period_ini=None, period_fi=None): - """ - Function to plot statistics - - :param expid: experiment's identifier - :type expid: str - :param joblist: joblist to plot - :type joblist: JobList - :param output_file: path to create file - :type output_file: str - :param period_ini: initial datetime of filtered period - :type period_ini: datetime - :param period_fi: final datetime of filtered period - :type period_fi: datetime - """ - - def timedelta2hours(deltatime): - return deltatime.days * 24 + deltatime.seconds / 3600.0 - - total_jobs_submitted = 0 - cpu_consumption = datetime.timedelta() - real_consumption = datetime.timedelta() - total_jobs_run = 0 - total_jobs_failed = 0 - total_jobs_completed = 0 - expected_cpu_consumption = 0 - expected_real_consumption = 0 - threshold = 0 - for job in joblist: - total_jobs_submitted += len(job.check_retrials_submit_time()) - if job.wallclock: - l = job.wallclock.split(':') - hours = float(l[1]) / 60 + float(l[0]) - else: - hours = 0 - threshold = max(threshold, hours) - if ':' in job.processors: - processors = reduce(lambda x, y: int(x) + int(y), job.processors.split(':')) - else: - processors = job.processors - expected_cpu_consumption += hours * int(processors) - expected_real_consumption += hours - # These are constants, so they need to be CAPS. Suppress PyCharm warning - # noinspection PyPep8Naming - MAX = 12.0 - # noinspection PyPep8Naming - N = len(joblist) - num_plots = int(np.ceil(N / MAX)) - - ind = np.arange(int(MAX)) # the x locations for the groups - width = 0.16 # the width of the bars - - plt.close('all') - - # noinspection PyPep8Naming - RATIO = 4 - fig = plt.figure(figsize=(RATIO * 4, 3 * RATIO * num_plots)) - gs = gridspec.GridSpec(RATIO * num_plots + 2, 1) - fig.suptitle('STATS - ' + expid, fontsize=24, fontweight='bold') - - ax = [] - ax2 = [] - max_time = 0 - max_fail = 0 - for plot in range(1, num_plots + 1): - ax.append(fig.add_subplot(gs[RATIO * plot - RATIO + 2:RATIO * plot + 1])) - ax2.append(ax[plot - 1].twinx()) - l1 = int((plot - 1) * MAX) - l2 = int(plot * MAX) - - run = [datetime.timedelta()] * (l2 - l1) - queued = [datetime.timedelta()] * (l2 - l1) - failed_jobs = [0] * (l2 - l1) - fail_queued = [datetime.timedelta()] * (l2 - l1) - fail_run = [datetime.timedelta()] * (l2 - l1) - - for i, job in enumerate(joblist[l1:l2]): - submit_times = job.check_retrials_submit_time() - start_times = job.check_retrials_start_time() - end_times = job.check_retrials_end_time() - - if ':' in job.processors: - processors = reduce(lambda x, y: int(x) + int(y), job.processors.split(':')) - else: - processors = job.processors - - for j, t in enumerate(submit_times): - - if j >= len(end_times): - if j < len(start_times): - queued[i] += start_times[j] - submit_times[j] - elif j == (len(submit_times) - 1) and job.status == Status.COMPLETED: - queued[i] += start_times[j] - submit_times[j] - run[i] += end_times[j] - start_times[j] - cpu_consumption += run[i] * int(processors) - real_consumption += run[i] - else: - failed_jobs[i] += 1 - fail_queued[i] += start_times[j] - submit_times[j] - fail_run[i] += end_times[j] - start_times[j] - cpu_consumption += fail_run[i] * int(processors) - real_consumption += fail_run[i] - total_jobs_run += len(start_times) - total_jobs_failed += failed_jobs[i] - total_jobs_completed += len(end_times) - failed_jobs[i] - max_timedelta = max(max(max(run, fail_run, queued, fail_queued)), datetime.timedelta(hours=threshold)) - max_time = max(max_time, max_timedelta.days * 24 + max_timedelta.seconds / 3600.0) - max_fail = max(max_fail, max(failed_jobs)) - - for i, delta in enumerate(queued): - queued[i] = timedelta2hours(delta) - - for i, delta in enumerate(run): - run[i] = timedelta2hours(delta) - - for i, delta in enumerate(fail_queued): - fail_queued[i] = timedelta2hours(delta) - - for i, delta in enumerate(fail_run): - fail_run[i] = timedelta2hours(delta) - - rects1 = ax[plot - 1].bar(ind, queued, width, color='orchid') - rects2 = ax[plot - 1].bar(ind + width, run, width, color='limegreen') - rects3 = ax2[plot - 1].bar(ind + width * 2, failed_jobs, width, color='red') - rects4 = ax[plot - 1].bar(ind + width * 3, fail_queued, width, color='purple') - rects5 = ax[plot - 1].bar(ind + width * 4, fail_run, width, color='tomato') - ax[plot - 1].set_ylabel('hours') - ax2[plot - 1].set_ylabel('# failed jobs') - ax[plot - 1].set_xticks(ind + width) - ax[plot - 1].set_xticklabels([job.name for job in joblist[l1:l2]], rotation='vertical') - ax[plot - 1].set_title(expid, fontsize=20) - # autolabel(rects1) - # autolabel(rects2) - # autolabel(rects4) - # autolabel(rects5) - - rects6 = ax[plot - 1].plot([0., width * 6 * MAX], [threshold, threshold], "k--", label='wallclock sim') - - for plot in range(1, num_plots + 1): - ax[plot - 1].set_ylim(0, float(1.10 * max_time)) - ax2[plot - 1].set_yticks(range(0, max_fail + 2)) - ax2[plot - 1].set_ylim(0, max_fail + 1) - - percentage_consumption = timedelta2hours(cpu_consumption) / expected_cpu_consumption * 100 - white = mpatches.Rectangle((0, 0), 0, 0, alpha=0.0) - totals = ['Period: ' + str(period_ini) + " ~ " + str(period_fi), - 'Submitted (#): ' + str(total_jobs_submitted), - 'Run (#): ' + str(total_jobs_run), - 'Failed (#): ' + str(total_jobs_failed), - 'Completed (#): ' + str(total_jobs_completed), - 'Expected consumption real (h): ' + str(round(expected_real_consumption, 2)), - 'Expected consumption CPU time (h): ' + str(round(expected_cpu_consumption, 2)), - 'Consumption real (h): ' + str(round(timedelta2hours(real_consumption), 2)), - 'Consumption CPU time (h): ' + str(round(timedelta2hours(cpu_consumption), 2)), - 'Consumption (%): ' + str(round(percentage_consumption, 2))] - Log.result('\n'.join(totals)) - - ax0 = fig.add_subplot(gs[0, 0]) - ax0.set_frame_on(False) - ax0.axes.get_xaxis().set_visible(False) - ax0.axes.get_yaxis().set_visible(False) - # noinspection PyUnboundLocalVariable - first_legend = ax0.legend((rects1[0], rects2[0], rects3[0], rects4[0], rects5[0], rects6[0]), - ('Queued (h)', 'Run (h)', 'Failed jobs (#)', 'Fail Queued (h)', 'Fail Run (h)', - 'Max wallclock (h)'), loc="upper right") - plt.gca().add_artist(first_legend) - - ax0.legend([white, white, white, white, white, white, white, white, white, white], - totals, - handlelength=0, - loc="upper left") - - gs.tight_layout(fig, rect=[0, 0.03, 1, 0.97]) # adjust rect parameter while leaving some room for suptitle. - # plt.show() - plt.savefig(output_file) - @staticmethod def clean_plot(expid): """ @@ -437,3 +259,13 @@ class Monitor: for f in filelist: remove(f) Log.result("Stats cleaned!\nLast stats' plot remanining there.\n") + + @staticmethod + def get_general_stats(expid): + general_stats = [] + general_stats_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, "tmp", expid + "_GENERAL_STATS") + parser = AutosubmitConfig.get_parser(ConfigParserFactory(), general_stats_path) + for section in parser.sections(): + general_stats.append((section, '')) + general_stats += parser.items(section) + return general_stats diff --git a/autosubmit/monitor/utils.py b/autosubmit/monitor/utils.py new file mode 100644 index 000000000..86d0758c7 --- /dev/null +++ b/autosubmit/monitor/utils.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +# Copyright 2017 Earth Sciences Department, BSC-CNS + +# This file is part of Autosubmit. + +# Autosubmit is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# Autosubmit is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with Autosubmit. If not, see . + + +class FixedSizeList(list): + """ + Customized list to retrieve a fixed sublist / slice of its elements. + The list will be filled with as many items (default) as needed to + get a list with the expected size. + """ + + def __init__(self, lst, default=None): + super(FixedSizeList, self).__init__(lst) + self._default = default + + def __getitem__(self, key): + if isinstance(key, slice): + return [list(self)[i] if 0 <= i < len(self) else self._default for i in xrange(key.start, key.stop, key.step or 1)] + return list(self)[key] + + def __getslice__(self, i, j): + return self.__getitem__(slice(i, j)) diff --git a/test/unit/test_autosubmit_ config.py b/test/unit/test_autosubmit_ config.py index 34d3290a9..eea91d336 100644 --- a/test/unit/test_autosubmit_ config.py +++ b/test/unit/test_autosubmit_ config.py @@ -1,6 +1,6 @@ from unittest import TestCase from autosubmit.config.config_common import AutosubmitConfig -from bscearth.utils.config_parser import ConfigParserFactory +from bscearth.utils.config_parser import ConfigParserFactory, ConfigParser from mock import Mock from mock import patch from mock import mock_open @@ -8,13 +8,6 @@ import os import sys from datetime import datetime -try: - # noinspection PyCompatibility - from configparser import SafeConfigParser -except ImportError: - # noinspection PyCompatibility - from ConfigParser import SafeConfigParser - # compatibility with both versions (2 & 3) from sys import version_info @@ -39,7 +32,7 @@ class TestAutosubmitConfig(TestCase): # arrange file_path = 'dummy/file/path' - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.read = Mock() factory_mock = Mock(spec=ConfigParserFactory) @@ -49,7 +42,7 @@ class TestAutosubmitConfig(TestCase): returned_parser = AutosubmitConfig.get_parser(factory_mock, file_path) # assert - self.assertTrue(isinstance(returned_parser, SafeConfigParser)) + self.assertTrue(isinstance(returned_parser, ConfigParser)) factory_mock.create_parser.assert_called_with() parser_mock.read.assert_called_with(file_path) @@ -59,7 +52,7 @@ class TestAutosubmitConfig(TestCase): "expdef_" + self.any_expid + ".conf")) def test_platforms_parser(self): - self.assertTrue(isinstance(self.config.platforms_parser, SafeConfigParser)) + self.assertTrue(isinstance(self.config.platforms_parser, ConfigParser)) def test_platforms_file(self): self.assertEqual(self.config.platforms_file, @@ -77,7 +70,7 @@ class TestAutosubmitConfig(TestCase): "jobs_" + self.any_expid + ".conf")) def test_get_project_dir(self): - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.get = Mock(side_effect=['/dummy/path']) factory_mock = Mock(spec=ConfigParserFactory) @@ -96,65 +89,52 @@ class TestAutosubmitConfig(TestCase): def test_get_wallclock(self): # arrange expected_value = '00:05' + default_value = '' config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_wallclock(self.section) # assert - self._assert_get_option(parser_mock, 'WALLCLOCK', expected_value, returned_value, str) + self._assert_get_option(parser_mock, 'WALLCLOCK', expected_value, returned_value, default_value, str) def test_get_processors(self): # arrange expected_value = '99999' + default_value = 1 config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_processors(self.section) # assert - self._assert_get_option(parser_mock, 'PROCESSORS', expected_value, returned_value, str) + self._assert_get_option(parser_mock, 'PROCESSORS', expected_value, returned_value, default_value, str) def test_get_threads(self): # arrange expected_value = 99999 + default_value = 1 config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_threads(self.section) # assert - self._assert_get_option(parser_mock, 'THREADS', expected_value, returned_value, int) + self._assert_get_option(parser_mock, 'THREADS', expected_value, returned_value, default_value, int) def test_get_tasks(self): # arrange expected_value = 99999 + default_value = 0 config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_tasks(self.section) # assert - self._assert_get_option(parser_mock, 'TASKS', expected_value, returned_value, int) + self._assert_get_option(parser_mock, 'TASKS', expected_value, returned_value, default_value, int) def test_get_memory(self): # arrange expected_value = '99999' + default_value = '' config, parser_mock = self._arrange_config(expected_value) # act returned_value = config.get_memory(self.section) # assert - self._assert_get_option(parser_mock, 'MEMORY', expected_value, returned_value, str) - - def test_check_exists_case_true(self): - # arrange - parser_mock = self._create_parser_mock(True) - # act - exists = parser_mock.check_exists(self.section, self.option) - # assert - parser_mock.has_option.assert_called_once_with(self.section, self.option) - self.assertTrue(exists) - - def test_check_exists_case_false(self): - # arrange - parser_mock = self._create_parser_mock(False) - # act - exists = parser_mock.check_exists(self.section, self.option) - # assert - parser_mock.has_option.assert_called_once_with(self.section, self.option) - self.assertFalse(exists) + self._assert_get_option(parser_mock, 'MEMORY', expected_value, returned_value, default_value, str) def test_that_reload_must_load_parsers(self): # arrange @@ -173,7 +153,7 @@ class TestAutosubmitConfig(TestCase): # TODO: could be improved asserting that the methods are called for parser in parsers: self.assertTrue(hasattr(config, parser)) - self.assertTrue(isinstance(getattr(config, parser), SafeConfigParser)) + self.assertTrue(isinstance(getattr(config, parser), ConfigParser)) def test_set_expid(self): # arrange @@ -226,7 +206,7 @@ class TestAutosubmitConfig(TestCase): def test_load_project_parameters(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.sections = Mock(return_value=['DUMMY_SECTION_1', 'DUMMY_SECTION_2']) parser_mock.items = Mock(side_effect=[[['dummy_key1', 'dummy_value1'], ['dummy_key2', 'dummy_value2']], [['dummy_key3', 'dummy_value3'], ['dummy_key4', 'dummy_value4']]]) @@ -249,7 +229,7 @@ class TestAutosubmitConfig(TestCase): def test_get_startdates_list(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) # TODO: Check if these are all accepted formats parser_mock.get = Mock(return_value='1920 193005 19400909 1950[01 0303]') @@ -270,35 +250,9 @@ class TestAutosubmitConfig(TestCase): self.assertTrue(datetime(1950, 1, 1) in returned_dates) self.assertTrue(datetime(1950, 3, 3) in returned_dates) - def test_get_project_destination(self): - # arrange - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.get = Mock(side_effect=['/dummy/path', - None, 'local', '/dummy/local/local-path', - None, 'svn', 'svn', '/dummy/svn/svn-path', - None, 'git', 'git', 'git', '/dummy/git/git-path.git']) - - factory_mock = Mock(spec=ConfigParserFactory) - factory_mock.create_parser = Mock(return_value=parser_mock) - - config = AutosubmitConfig(self.any_expid, FakeBasicConfig, factory_mock) - config.reload() - - # act - returned_project_destination = config.get_project_destination() - returned_project_destination_local = config.get_project_destination() - returned_project_destination_svn = config.get_project_destination() - returned_project_destination_git = config.get_project_destination() - - # assert - self.assertEquals('/dummy/path', returned_project_destination) - self.assertEquals('local-path', returned_project_destination_local) - self.assertEquals('svn-path', returned_project_destination_svn) - self.assertEquals('git-path', returned_project_destination_git) - def test_check_project(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.read = Mock(side_effect=Exception) factory_mock = Mock(spec=ConfigParserFactory) @@ -321,66 +275,9 @@ class TestAutosubmitConfig(TestCase): self.assertTrue(should_be_true2) self.assertFalse(should_be_false) - def test_get_some_properties(self): - # arrange - properties = {'RETRIALS': '111', 'SAFETYSLEEPTIME': '222', 'MAXWAITINGJOBS': '333', - 'TOTALJOBS': '444', 'FILE_PROJECT_CONF': '/dummy/path', 'FILE_JOBS_CONF': '/dummy/object', - 'PROJECT_BRANCH': 'dummy/branch', 'PROJECT_COMMIT': 'dummy/commit', - 'PROJECT_REVISION': 'dummy/revision', 'NUMCHUNKS': '999', 'CHUNKSIZEUNIT': '9999', - 'MEMBERS': 'MEMBER1 MEMBER2', 'RERUN': 'dummy/rerun', 'CHUNKLIST': 'dummy/chunklist', - 'HPCARCH': 'dummy/hpcarch'} - - # TODO: Improve making properties as a dict of dicts (for section) - def get_option(section, option): - return properties[option] - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.has = Mock(return_value=True) - parser_mock.get = Mock(side_effect=get_option) - - factory_mock = Mock(spec=ConfigParserFactory) - factory_mock.create_parser = Mock(return_value=parser_mock) - - config = AutosubmitConfig(self.any_expid, FakeBasicConfig, factory_mock) - config.reload() - - # act - returned_retrials = config.get_retrials() - returned_safetysleeptime = config.get_safetysleeptime() - returned_max_jobs = config.get_max_waiting_jobs() - returned_total_jobs = config.get_total_jobs() - returned_file_project = config.get_file_project_conf() - returned_file_jobs = config.get_file_jobs_conf() - returned_branch = config.get_git_project_branch() - returned_commit = config.get_git_project_commit() - returned_revision = config.get_svn_project_revision() - returned_num_chunks = config.get_num_chunks() - returned_chunk_size_unit = config.get_chunk_size_unit() - returned_member_list = config.get_member_list() - returned_rerun = config.get_rerun() - returned_chunk_list = config.get_chunk_list() - returned_platform = config.get_platform() - - # assert - self.assertEquals(int(properties['RETRIALS']), returned_retrials) - self.assertEquals(int(properties['SAFETYSLEEPTIME']), returned_safetysleeptime) - self.assertEquals(int(properties['MAXWAITINGJOBS']), returned_max_jobs) - self.assertEquals(int(properties['TOTALJOBS']), returned_total_jobs) - self.assertEquals(properties['FILE_PROJECT_CONF'], returned_file_project) - self.assertEquals(properties['FILE_JOBS_CONF'], returned_file_jobs) - self.assertEquals(properties['PROJECT_BRANCH'], returned_branch) - self.assertEquals(properties['PROJECT_COMMIT'], returned_commit) - self.assertEquals(properties['PROJECT_REVISION'], returned_revision) - self.assertEquals(int(properties['NUMCHUNKS']), returned_num_chunks) - self.assertEquals(properties['CHUNKSIZEUNIT'], returned_chunk_size_unit) - self.assertEquals(properties['MEMBERS'].split(' '), returned_member_list) - self.assertEquals(properties['RERUN'], returned_rerun) - self.assertEquals(properties['CHUNKLIST'], returned_chunk_list) - self.assertEquals(properties['HPCARCH'], returned_platform) - def test_load_parameters(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.sections = Mock(side_effect=[['dummy-section1'], ['dummy-section2'], ['dummy-section3']]) parser_mock.options = Mock(side_effect=[['dummy-option1', 'dummy-option2'], @@ -416,7 +313,7 @@ class TestAutosubmitConfig(TestCase): sys.modules['subprocess'].check_output = Mock(side_effect=[Exception, 'dummy/path/', Exception, 'dummy/path/', 'dummy/sha/']) - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) factory_mock = Mock(spec=ConfigParserFactory) factory_mock.create_parser = Mock(return_value=parser_mock) @@ -439,58 +336,10 @@ class TestAutosubmitConfig(TestCase): self.assertFalse(should_be_false) self.assertFalse(should_be_false2) - def test_check_autosubmit_conf(self): - # arrange - - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.read = Mock() - parser_mock.has = Mock(return_value=True) - - parser_mock.get = Mock(side_effect=[1111, 2222, 3333, 4444, 'True', 'paramiko', 'db', 'True', 'example@test.org', - 1111, 2222, 3333, 'no-int', 'True', 'True', 'example@test.org']) - - factory_mock = Mock(spec=ConfigParserFactory) - factory_mock.create_parser = Mock(return_value=parser_mock) - - config = AutosubmitConfig(self.any_expid, FakeBasicConfig, factory_mock) - config.reload() - - # act - should_be_true = config.check_autosubmit_conf() - should_be_false = config.check_autosubmit_conf() - - # arrange - self.assertTrue(should_be_true) - self.assertFalse(should_be_false) - - # TODO: Test other CVS cases - def test_check_expdef_conf(self): - # arrange - parser_mock = Mock(spec=SafeConfigParser) - parser_mock.read = Mock() - parser_mock.has = Mock(return_value=True) - - parser_mock.get = Mock(side_effect=['year', 111, 222, 'standard', 'True', 'git', 'git', - 'year', 111, 'not-a-number', 'standard', 'True', 'none', 'none']) - - factory_mock = Mock(spec=ConfigParserFactory) - factory_mock.create_parser = Mock(return_value=parser_mock) - - config = AutosubmitConfig(self.any_expid, FakeBasicConfig, factory_mock) - config.reload() - - # act - should_be_true = config.check_expdef_conf() - should_be_false = config.check_expdef_conf() - - # assert - self.assertTrue(should_be_true) - self.assertFalse(should_be_false) - # TODO: Test specific cases def test_check_jobs_conf(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.sections = Mock(side_effect=[['dummy-section1', 'dummy-section2'], ['dummy-platform1', 'dummy-platform2']]) parser_mock.has = Mock(return_value=True) @@ -515,7 +364,7 @@ class TestAutosubmitConfig(TestCase): # TODO: Test specific cases def test_check_platforms_conf(self): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.sections = Mock(side_effect=[[], [], ['dummy-section1'], ['dummy-section1', 'dummy-section2']]) parser_mock.has = Mock(return_value=True) @@ -570,16 +419,17 @@ class TestAutosubmitConfig(TestCase): ############################# ## Helper functions & classes - def _assert_get_option(self, parser_mock, option, expected_value, returned_value, expected_type): + def _assert_get_option(self, parser_mock, option, expected_value, returned_value, default_value, expected_type): self.assertTrue(isinstance(returned_value, expected_type)) self.assertEqual(expected_value, returned_value) - parser_mock.has_option.assert_called_once_with(self.section, option) + parser_mock.get_option.assert_called_once_with(self.section, option, default_value) def _arrange_config(self, option_value): # arrange - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.has_option = Mock(return_value=True) parser_mock.get = Mock(return_value=option_value) + parser_mock.get_option = Mock(return_value=option_value) factory_mock = Mock(spec=ConfigParserFactory) factory_mock.create_parser = Mock(return_value=parser_mock) config = AutosubmitConfig(self.any_expid, FakeBasicConfig, factory_mock) @@ -587,7 +437,7 @@ class TestAutosubmitConfig(TestCase): return config, parser_mock def _create_parser_mock(self, has_option, returned_option=None): - parser_mock = Mock(spec=SafeConfigParser) + parser_mock = Mock(spec=ConfigParser) parser_mock.has_option = Mock(return_value=has_option) parser_mock.get = Mock(return_value=returned_option) return parser_mock -- GitLab From 9ba615cdff62a56c7ece616e43b8735f04440f4f Mon Sep 17 00:00:00 2001 From: Joan Lopez Date: Fri, 17 Feb 2017 15:24:59 +0100 Subject: [PATCH 31/60] Docs improved --- docs/source/usage.rst | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/docs/source/usage.rst b/docs/source/usage.rst index 6f5b4669a..a90c07aeb 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -204,6 +204,25 @@ More info on password-less ssh can be found at: http://www.linuxproblem.org/art_ .. caution:: After launching Autosubmit, one must be aware of login expiry limit and policy (if applicable for any HPC) and renew the login access accordingly (by using token/key etc) before expiry. +How to run an experiment that was created with another version +============================================================== + +.. important:: First of all you have to stop your Autosubmit instance related with the experiment + +Once you've already loaded / installed the Autosubmit version do you want: +:: + + autosubmit create EXPID + autosubmit recovery EXPID -s -all + autosubmit run EXPID + +*EXPID* is the experiment identifier. + +The most common problem when you change your Autosubmit version is the apparition of several Python errors. +This is due to how Autosubmit saves internally the data, which can be incompatible between versions. +The steps above represent the process to re-create (1) these internal data structures and to recover (2) the previous status of your experiment. + + How to test the experiment ========================== This method is to conduct a test for a given experiment. It creates a new experiment for a given experiment with a @@ -352,6 +371,25 @@ The location where user can find the generated plots with date and timestamp can /cxxx/plot/cxxx_statistics__