diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py index 8342e54c6439113975cce208812220e4517f121d..4d499e3823f0a7005049de5063a0e61d057f9997 100644 --- a/autosubmit/autosubmit.py +++ b/autosubmit/autosubmit.py @@ -4438,14 +4438,17 @@ class Autosubmit: "Preparing .lock file to avoid multiple instances with same expid.") as_conf = AutosubmitConfig(expid, BasicConfig, YAMLParserFactory()) - as_conf.check_conf_files(True) - + # Get original configuration + as_conf.check_conf_files(False) project_type = as_conf.get_project_type() # Getting output type provided by the user in config, 'pdf' as default output_type = as_conf.get_output_type() if not Autosubmit._copy_code(as_conf, expid, project_type, False): return False + # Update configuration with the new config in the dist ( if any ) + as_conf.check_conf_files(False) + if not os.path.exists(os.path.join(exp_path, "pkl")): raise AutosubmitCritical( "The pkl folder doesn't exists. Make sure that the 'pkl' folder exists in the following path: {}".format( diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 495288ee3567f3704a07653ece544f226fb5bd08..bfdadc95d6ec10da703ee6145a0117e990f7fea2 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -993,6 +993,8 @@ class Job(object): """ chunk = 1 as_conf.reload() + #parameters = as_conf.sustitute_dynamic_variables(parameters,25) + parameters = parameters.copy() parameters.update(default_parameters) parameters['JOBNAME'] = self.name @@ -1015,10 +1017,10 @@ class Job(object): chunk = self.chunk parameters['CHUNK'] = chunk - total_chunk = int(parameters.get('EXPERIMENT.NUMCHUNKS')) - chunk_length = int(parameters['EXPERIMENT.CHUNKSIZE']) - chunk_unit = str(parameters['EXPERIMENT.CHUNKSIZEUNIT']).lower() - cal = str(parameters['EXPERIMENT.CALENDAR']).lower() + total_chunk = int(parameters.get('EXPERIMENT.NUMCHUNKS',1)) + chunk_length = int(parameters.get('EXPERIMENT.CHUNKSIZE',1)) + chunk_unit = str(parameters.get('EXPERIMENT.CHUNKSIZEUNIT',"")).lower() + cal = str(parameters.get('EXPERIMENT.CALENDAR',"")).lower() chunk_start = chunk_start_date( self.date, chunk, chunk_length, chunk_unit, cal) chunk_end = chunk_end_date( @@ -1033,36 +1035,30 @@ class Job(object): parameters['RUN_DAYS'] = str( subs_dates(chunk_start, chunk_end, cal)) - parameters['Chunk_End_IN_DAYS'] = str( + parameters['CHUNK_END_IN_DAYS'] = str( subs_dates(self.date, chunk_end, cal)) - #parameters['Chunk_START_DATE'] = date2str( - # chunk_start, self.date_format) - #parameters['Chunk_START_YEAR'] = str(chunk_start.year) - #parameters['Chunk_START_MONTH'] = str(chunk_start.month).zfill(2) - #parameters['Chunk_START_DAY'] = str(chunk_start.day).zfill(2) - #parameters['Chunk_START_HOUR'] = str(chunk_start.hour).zfill(2) - parameters['Chunk_START_DATE'] = date2str( + parameters['CHUNK_START_DATE'] = date2str( chunk_start, self.date_format) - parameters['Chunk_START_YEAR'] = str(chunk_start.year) - parameters['Chunk_START_MONTH'] = str(chunk_start.month).zfill(2) - parameters['Chunk_START_DAY'] = str(chunk_start.day).zfill(2) - parameters['Chunk_START_HOUR'] = str(chunk_start.hour).zfill(2) + parameters['CHUNK_START_YEAR'] = str(chunk_start.year) + parameters['CHUNK_START_MONTH'] = str(chunk_start.month).zfill(2) + parameters['CHUNK_START_DAY'] = str(chunk_start.day).zfill(2) + parameters['CHUNK_START_HOUR'] = str(chunk_start.hour).zfill(2) - parameters['Chunk_SECOND_TO_LAST_DATE'] = date2str( + parameters['CHUNK_SECOND_TO_LAST_DATE'] = date2str( chunk_end_1, self.date_format) - parameters['Chunk_SECOND_TO_LAST_YEAR'] = str(chunk_end_1.year) - parameters['Chunk_SECOND_TO_LAST_MONTH'] = str(chunk_end_1.month).zfill(2) - parameters['Chunk_SECOND_TO_LAST_DAY'] = str(chunk_end_1.day).zfill(2) - parameters['Chunk_SECOND_TO_LAST_HOUR'] = str(chunk_end_1.hour).zfill(2) + parameters['CHUNK_SECOND_TO_LAST_YEAR'] = str(chunk_end_1.year) + parameters['CHUNK_SECOND_TO_LAST_MONTH'] = str(chunk_end_1.month).zfill(2) + parameters['CHUNK_SECOND_TO_LAST_DAY'] = str(chunk_end_1.day).zfill(2) + parameters['CHUNK_SECOND_TO_LAST_HOUR'] = str(chunk_end_1.hour).zfill(2) - parameters['Chunk_END_DATE'] = date2str( + parameters['CHUNK_END_DATE'] = date2str( chunk_end_1, self.date_format) - parameters['Chunk_END_YEAR'] = str(chunk_end.year) - parameters['Chunk_END_MONTH'] = str(chunk_end.month).zfill(2) - parameters['Chunk_END_DAY'] = str(chunk_end.day).zfill(2) - parameters['Chunk_END_HOUR'] = str(chunk_end.hour).zfill(2) + parameters['CHUNK_END_YEAR'] = str(chunk_end.year) + parameters['CHUNK_END_MONTH'] = str(chunk_end.month).zfill(2) + parameters['CHUNK_END_DAY'] = str(chunk_end.day).zfill(2) + parameters['CHUNK_END_HOUR'] = str(chunk_end.hour).zfill(2) parameters['PREV'] = str(subs_dates(self.date, chunk_start, cal)) @@ -1172,6 +1168,7 @@ class Job(object): parameters[wrapper_section+"_EXTENSIBLE"] = int(as_conf.get_extensible_wallclock(as_conf.experiment_data["WRAPPERS"].get(wrapper_section))) self.dependencies = parameters['DEPENDENCIES'] + # This shouldn't be necesary anymore as now all sub is done in the as_conf.reload() if len(self.export) > 0: variables = re.findall('%(? 0: @@ -1188,23 +1185,8 @@ class Job(object): parameters['EXPORT'] = self.export parameters['PROJECT_TYPE'] = as_conf.get_project_type() - substituted = False - max_deep = 25 - dynamic_variables = [] - backup_variables = as_conf.dynamic_variables - while len(as_conf.dynamic_variables) > 0 and max_deep > 0: - dynamic_variables = [] - for dynamic_var in as_conf.dynamic_variables: - substituted,new_param = as_conf.sustitute_placeholder_variables(dynamic_var[0],dynamic_var[1],parameters) - if not substituted: - dynamic_variables.append(dynamic_var) - else: - parameters= new_param - as_conf.dynamic_variables = dynamic_variables - max_deep = max_deep - 1 - as_conf.dynamic_variables = backup_variables + # For some reason, there is return but the assigne is also neccesary self.parameters = parameters - return parameters def update_content_extra(self,as_conf,files): additional_templates = [] diff --git a/autosubmit/job/job_common.py b/autosubmit/job/job_common.py index 16b9b473afcf13f7e61d4863bf3174c83347355f..bc87eaaf3de27c73251c419df6403996c778b5f9 100644 --- a/autosubmit/job/job_common.py +++ b/autosubmit/job/job_common.py @@ -347,6 +347,7 @@ def increase_wallclock_by_chunk(current, increase, chunk): :return: HH:MM wallclock :rtype: str """ + # Pipeline is not testing this since mock is not well made try: if current and increase and chunk and chunk > 0: wallclock = current.split(":") diff --git a/autosubmit/job/job_dict.py b/autosubmit/job/job_dict.py index d81fbe456300b87591a917710ea5b961e03bf47b..d453cd51c5105074018ea072a7abb7ff837f56cb 100644 --- a/autosubmit/job/job_dict.py +++ b/autosubmit/job/job_dict.py @@ -379,8 +379,11 @@ class DicJobs: elif job_type == 'r': job.type = Type.R job.executable = str(parameters[section].get( "EXECUTABLE", "")) - default_data = self.experiment_data.get("DEFAULT",{}) - job.platform_name = str(parameters[section].get( "PLATFORM", default_data.get("HPCARCH",''))).upper() + hpcarch = self.experiment_data.get("DEFAULT",{}) + hpcarch = hpcarch.get("HPCARCH","") + job.platform_name = str(parameters[section].get("PLATFORM", hpcarch)).upper() + if self.experiment_data["PLATFORMS"].get(job.platform_name, "") == "" and job.platform_name.upper() != "LOCAL": + raise AutosubmitCritical("Platform does not exists, check the value of %JOBS.{0}.PLATFORM% = {1} parameter".format(job.section,job.platform_name),7000,"List of platforms: {0} ".format(self.experiment_data["PLATFORMS"].keys()) ) job.file = str(parameters[section].get( "FILE", "")) job.additional_files = parameters[section].get( "ADDITIONAL_FILES", []) diff --git a/bin/autosubmit b/bin/autosubmit index b3d084120df6b30b77e122ef71d3e883d06e47a1..bc18d54311141b98a5742a9ee5ce1a1468892ae6 100755 --- a/bin/autosubmit +++ b/bin/autosubmit @@ -49,8 +49,8 @@ def main(): except AutosubmitCritical as e: if os.path.exists(os.path.join(Log.file_path, "autosubmit.lock")): os.remove(os.path.join(Log.file_path, "autosubmit.lock")) - Log.error(str(traceback.print_exc())) - if e.trace is not None: + str(traceback.print_exc()) + if e.trace is not None or e.trace == "": Log.error("Trace: {0}", e.trace) Log.critical("{1} [eCode={0}]", e.code, e.message) diff --git a/setup.py b/setup.py index 44481bf0425d1f3b07f0a87f210c9f629f29dadb..47ddd81a2d2ea38d38cbf91a4346da433c61adc2 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ setup( url='http://www.bsc.es/projects/earthscience/autosubmit/', download_url='https://earth.bsc.es/wiki/doku.php?id=tools:autosubmit', keywords=['climate', 'weather', 'workflow', 'HPC'], - install_requires=['packaging>19','six>=1.10.0','configobj>=5.0.6','argparse>=1.4.0','python-dateutil>=2.8.2','matplotlib==3.4.3','numpy<1.22','py3dotplus>=1.1.0','pyparsing>=3.0.7','paramiko>=2.9.2','mock>=4.0.3','portalocker>=2.3.2','networkx==2.6.3','requests>=2.27.1','bscearth.utils>=0.5.2','cryptography>=36.0.1','setuptools>=60.8.2','xlib>=0.21','pip>=22.0.3','ruamel.yaml','pythondialog','pytest','nose','coverage','PyNaCl==1.4.0','Pygments','autosubmitconfigparser==0.0.74'], + install_requires=['packaging>19','six>=1.10.0','configobj>=5.0.6','argparse>=1.4.0','python-dateutil>=2.8.2','matplotlib==3.4.3','numpy<1.22','py3dotplus>=1.1.0','pyparsing>=3.0.7','paramiko>=2.9.2','mock>=4.0.3','portalocker>=2.3.2','networkx==2.6.3','requests>=2.27.1','bscearth.utils>=0.5.2','cryptography>=36.0.1','setuptools>=60.8.2','xlib>=0.21','pip>=22.0.3','ruamel.yaml','pythondialog','pytest','nose','coverage','PyNaCl==1.4.0','Pygments','autosubmitconfigparser==0.0.75'], classifiers=[ "Programming Language :: Python :: 3.7", "License :: OSI Approved :: GNU General Public License (GPL)", diff --git a/test/README_PIP.md b/test/README_PIP.md new file mode 100644 index 0000000000000000000000000000000000000000..3fce62d7225bbea1640ff6931a26e09888e51133 --- /dev/null +++ b/test/README_PIP.md @@ -0,0 +1,12 @@ + +Autosubmit is a lightweight workflow manager designed to meet climate research necessities. Unlike other workflow solutions in the domain, it integrates the capabilities of an experiment manager, workflow orchestrator and monitor in a self-contained application. The experiment manager allows for defining and configuring experiments, supported by a hierarchical database that ensures reproducibility and traceability. The orchestrator is designed to run complex workflows in research and operational mode by managing their dependencies and interfacing with local and remote hosts. These multi-scale workflows can involve from a few to thousands of steps and from one to multiple platforms. + +Autosubmit facilitates easy and fast integration and relocation on new platforms. On the one hand, users can rapidly execute general scripts and progressively parametrize them by reading Autosubmit variables. On the other hand, it is a self-contained desktop application capable of submitting jobs to remote platforms without any external deployment. + +Due to its robustness, it can handle different eventualities, such as networking or I/O errors. Finally, the monitoring capabilities extend beyond the desktop application through a REST API that allows communication with workflow monitoring tools such as the Autosubmit web GUI. + +Autosubmit is a Python package provided in PyPI. Conda recipes can also be found on the website. A containerized version for testing purposes is also available but not public yet. + +It has contributed to various European research projects and runs different operational systems. During the following years, it will support some of the Earth Digital Twins as the Digital Twin Ocean. + +Concretely, it is currently used at Barcelona Supercomputing Centre (BSC) to run models (EC-Earth, MONARCH, NEMO, CALIOPE, HERMES...), operational toolchains (S2S4E), data-download workflows (ECMWF MARS), and many other. Autosubmit has run these workflows in different supercomputers in BSC, ECMWF, IC3, CESGA, EPCC, PDC, and OLCF. \ No newline at end of file diff --git a/test/unit/test_job.py b/test/unit/test_job.py index 0913867538818631d134326f0fff581bd632caab..9a0cdddbff8c22fb00544b6311062ed54910c4ab 100644 --- a/test/unit/test_job.py +++ b/test/unit/test_job.py @@ -166,8 +166,9 @@ class TestJob(TestCase): self.job.parameters['NUMTASK'] = 666 self.job._tmp_path = '/dummy/tmp/path' + self.job.additional_files = '/dummy/tmp/path_additional_file' - update_content_mock = Mock(return_value=('some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%','some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%')) + update_content_mock = Mock(return_value=('some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%',['some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%'])) self.job.update_content = update_content_mock config = Mock(spec=AutosubmitConfig)