diff --git a/.gitignore b/.gitignore index 069bcb6d1e312ce6e72641dbdd3db08be28cbbf0..7eebdc167af1f3d3036ad2bcf562fa023726215f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,5 @@ /.coverage autosubmit/miniTest.py autosubmit/simple_test.py -.vscode/ -.vscode \ No newline at end of file +.vscode +autosubmit.egg-info/ \ No newline at end of file diff --git a/autosubmit/config/files/jobs.conf b/autosubmit/config/files/jobs.conf index eaf192ce6b081598bb98537b9079061013f74d8f..b42c36f193ca9b0c60e5df3fbb3c45bc5164f4ae 100644 --- a/autosubmit/config/files/jobs.conf +++ b/autosubmit/config/files/jobs.conf @@ -47,6 +47,8 @@ # CHECK = False ## Select the interpreter that will run the job. Options: bash, python, r Default: bash # TYPE = bash +## Specify the path to the interpreter. If empty, use system default based on job type . Default: empty +# EXECUTABLE = /my_python_env/python3 ## Synchronize a chunk job with its dependency chunks at a 'date' or 'member' level # SYNCHRONIZE = date | member ## Optional. Custom directives for the resource manager of the platform used for that job. diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 89d34d3120eedc486d27460234409ee3821539c4..3fc3f1076c8323483d25725139901dd049a6e7fd 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -107,6 +107,7 @@ class Job(object): self.log_retries = 5 self.id = job_id self.file = None + self.executable = None self._local_logs = ('', '') self._remote_logs = ('', '') @@ -913,9 +914,11 @@ class Job(object): def _get_paramiko_template(self, snippet, template): current_platform = self._platform - return ''.join([snippet.as_header(current_platform.get_header(self)), - template, - snippet.as_tailer()]) + return ''.join([ + snippet.as_header(current_platform.get_header(self), self.executable), + template, + snippet.as_tailer() + ]) def queuing_reason_cancel(self, reason): try: @@ -1362,9 +1365,9 @@ for job in {0} do if [ -f "${{job}}_STAT" ] then - echo ${{job}} $(head ${{job}}_STAT) + echo ${{job}} $(head ${{job}}_STAT) else - echo ${{job}} + echo ${{job}} fi done """).format(str(not_finished_jobs_names), str(remote_log_dir), '\n'.ljust(13)) diff --git a/autosubmit/job/job_common.py b/autosubmit/job/job_common.py index 03aef1eebbe4e49c21f77884e4ee610fdc411a4c..73b6d7889d8811b22474914fb0af972f65915016 100644 --- a/autosubmit/job/job_common.py +++ b/autosubmit/job/job_common.py @@ -91,11 +91,13 @@ class StatisticsSnippetBash: """ @staticmethod - def as_header(scheduler_header): + def as_header(scheduler_header, executable): + if not executable: + executable = "/bin/bash" return textwrap.dedent("""\ - #!/bin/bash + #!{0} - """) + \ + """).format(executable) + \ scheduler_header + \ textwrap.dedent("""\ ################### @@ -108,7 +110,7 @@ class StatisticsSnippetBash: ################### # Autosubmit job ################### - + """) @staticmethod @@ -122,7 +124,7 @@ class StatisticsSnippetBash: echo $(date +%s) >> ${job_name_ptrn}_STAT touch ${job_name_ptrn}_COMPLETED exit 0 - + """) @@ -133,11 +135,13 @@ class StatisticsSnippetPython: """ @staticmethod - def as_header(scheduler_header): + def as_header(scheduler_header, executable): + if not executable: + executable = "/usr/bin/env python" return textwrap.dedent("""\ - #!/usr/bin/env python + #!{0} - """) + \ + """).format(executable) + \ scheduler_header + \ textwrap.dedent("""\ ################### @@ -181,18 +185,20 @@ class StatisticsSnippetR: """ @staticmethod - def as_header(scheduler_header): + def as_header(scheduler_header, executable): + if not executable: + executable = "/usr/bin/env Rscript" return textwrap.dedent("""\ - #!/usr/bin/env Rscript + #!{0} - """) + \ + """).format(executable) + \ scheduler_header + \ textwrap.dedent("""\ ################### # Autosubmit header ################### - job_name_ptrn = '%CURRENT_LOGDIR%/%JOBNAME%' + job_name_ptrn = '%CURRENT_LOGDIR%/%JOBNAME%' fileConn<-file(paste(job_name_ptrn,"_STAT", sep = ''),"w") writeLines(toString(trunc(as.numeric(Sys.time()))), fileConn) @@ -229,11 +235,13 @@ class StatisticsSnippetEmpty: """ @staticmethod - def as_header(scheduler_header): + def as_header(scheduler_header, executable): + if not executable: + executable = "/bin/bash" return textwrap.dedent("""\ - #!/bin/bash + #!{0} - """) + \ + """).format(executable) + \ scheduler_header @staticmethod diff --git a/autosubmit/job/job_dict.py b/autosubmit/job/job_dict.py index 6b7ebdbeff37cc696137de9344bf6c3fd4e20270..e57bb11939d91f767bc53d5936c22e9d230301a9 100644 --- a/autosubmit/job/job_dict.py +++ b/autosubmit/job/job_dict.py @@ -296,7 +296,7 @@ class DicJobs: if split > -1: name += "_{0}".format(split) name += "_" + section - if name in jobs_data: + if name in jobs_data: job = Job(name, jobs_data[name][1], jobs_data[name][2], priority) job.local_logs = (jobs_data[name][8], jobs_data[name][9]) job.remote_logs = (jobs_data[name][10], jobs_data[name][11]) @@ -317,7 +317,6 @@ class DicJobs: job.delay = int(self.get_option(section, "DELAY", -1)) job.wait = self.get_option(section, "WAIT", 'true').lower() == 'true' job.rerun_only = self.get_option(section, "RERUN_ONLY", 'false').lower() == 'true' - job_type = self.get_option(section, "TYPE", default_job_type).lower() if job_type == 'bash': job.type = Type.BASH @@ -325,6 +324,7 @@ class DicJobs: job.type = Type.PYTHON elif job_type == 'r': job.type = Type.R + job.executable = self.get_option(section, "EXECUTABLE", None) job.platform_name = self.get_option(section, "PLATFORM", None) if job.platform_name is not None: diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index 47fa2388a207d7ba42845ee7c967c52742a40d3a..e68517ec7ede1412e081688ef78b337e7a0eeee0 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -196,6 +196,8 @@ Examples: # CHECK = False ## Select the interpreter that will run the job. Options: bash, python, r Default: bash # TYPE = bash + ## Specify the path to the interpreter. If empty, use system default based on job type . Default: empty + # EXECUTABLE = /my_python_env/python3 [LOCAL_SETUP] @@ -319,7 +321,7 @@ Examples: # Number of retrials if a job fails. Can ve override at job level # Default = 0 RETRIALS = 0 - # Default output type for CREATE, MONITOR, SET STATUS, RECOVERY. Available options: pdf, svg, png, ps, txt + # Default output type for CREATE, MONITOR, SET STATUS, RECOVERY. Available options: pdf, svg, png, ps, txt # Default = pdf OUTPUT = pdf diff --git a/test/unit/test_dic_jobs.py b/test/unit/test_dic_jobs.py index 7c6e045e35072652ef05da25f954c2052f0857ff..49785c712d6e930b689f084d334738bc43769941 100644 --- a/test/unit/test_dic_jobs.py +++ b/test/unit/test_dic_jobs.py @@ -278,35 +278,35 @@ class TestDicJobs(TestCase): self.assertEquals(len(self.dictionary._dic[mock_section.name]), len(self.date_list)) def test_create_job_creates_a_job_with_right_parameters(self): - # arrange section = '' priority = 99 date = datetime(2016, 1, 1) member = 'fc0' chunk = 'ch0' - frequency = 123 - delay = -1 - platform_name = 'fake-platform' - filename = 'fake-fike' - queue = 'fake-queue' - processors = '111' - threads = '222' - tasks = '333' - memory = memory_per_task = 444 - wallclock = 555 - notify_on = 'COMPLETED FAILED' - synchronize = None - self.parser_mock.has_option = Mock(side_effect=[True, True, True, True, True, True, True, True, True, True, True, - True, True, True, True, False, True, False,False]) - self.parser_mock.get = Mock(side_effect=[frequency, delay, 'True', 'True', 'bash', platform_name, filename, queue, - 'True', processors, threads, tasks, memory, memory_per_task, - wallclock, notify_on,synchronize]) + # arrange + options = { + 'FREQUENCY': 123, + 'DELAY': -1, + 'PLATFORM': 'fake-platform', + 'FILE': 'fake-fike', + 'QUEUE': 'fake-queue', + 'PROCESSORS': '111', + 'THREADS': '222', + 'TASKS': '333', + 'MEMORY': 'memory_per_task= 444', + 'WALLCLOCK': 555, + 'NOTIFY_ON': 'COMPLETED FAILED', + 'SYNCHRONIZE': None, + 'RERUN_ONLY': 'True', + } + self.parser_mock.has_option = lambda _, option: option in options + self.parser_mock.get = lambda _, option: options[option] job_list_mock = Mock() job_list_mock.append = Mock() self.dictionary._jobs_list.get_job_list = Mock(return_value=job_list_mock) # act - created_job = self.dictionary.build_job(section, priority, date, member, chunk, dict()) + created_job = self.dictionary.build_job(section, priority, date, member, chunk, 'bash') # assert self.assertEquals('random-id_2016010100_fc0_ch0_', created_job.name) @@ -317,20 +317,21 @@ class TestDicJobs(TestCase): self.assertEquals(member, created_job.member) self.assertEquals(chunk, created_job.chunk) self.assertEquals(self.date_format, created_job.date_format) - self.assertEquals(frequency, created_job.frequency) - self.assertEquals(delay, created_job.delay) + self.assertEquals(options['FREQUENCY'], created_job.frequency) + self.assertEquals(options['DELAY'], created_job.delay) self.assertTrue(created_job.wait) self.assertTrue(created_job.rerun_only) self.assertEquals(Type.BASH, created_job.type) - self.assertEquals(platform_name, created_job.platform_name) - self.assertEquals(filename, created_job.file) - self.assertEquals(queue, created_job.queue) + self.assertEquals(None, created_job.executable) + self.assertEquals(options['PLATFORM'], created_job.platform_name) + self.assertEquals(options['FILE'], created_job.file) + self.assertEquals(options['QUEUE'], created_job.queue) self.assertTrue(created_job.check) - self.assertEquals(processors, created_job.processors) - self.assertEquals(threads, created_job.threads) - self.assertEquals(tasks, created_job.tasks) - self.assertEquals(memory, created_job.memory) - self.assertEquals(wallclock, created_job.wallclock) + self.assertEquals(options['PROCESSORS'], created_job.processors) + self.assertEquals(options['THREADS'], created_job.threads) + self.assertEquals(options['TASKS'], created_job.tasks) + self.assertEquals(options['MEMORY'], created_job.memory) + self.assertEquals(options['WALLCLOCK'], created_job.wallclock) self.assertIsNone(created_job.retrials) job_list_mock.append.assert_called_once_with(created_job)