diff --git a/README.md b/README.md index 2d97af1884444a3a605c320d8727c3b2d1393339..ed32aeee373320b222be60efda9c03689975fe90 100644 --- a/README.md +++ b/README.md @@ -55,11 +55,11 @@ SUNSET has two different modes: - A 'debug mode' which you can run in an interactive R session. See an example here: [Use case 1.1](https://earth.bsc.es/gitlab/es/sunset/-/blob/master/use_cases/ex1_1_single_analysis_terminal/ex1_1-handson.md) -- A 'job launch' mode which you can use to run one or more jobs on an HPC platform. In this way, you can run the same analysis for multiple models, initialization dates, variables or regions at the same time, from one recipe. See an example here: [Use case 1.3](https://earth.bsc.es/gitlab/es/sunset/-/blob/master/use_cases/ex1_1_single_analysis_terminal/ex1_1-handson.md) +- A 'job launch' mode which you can use to run one or more jobs on an HPC platform. In this way, you can run the same analysis for multiple models, initialization dates, variables or regions at the same time, from one recipe. See an example here: [Use case 1.3](https://earth.bsc.es/gitlab/es/sunset/-/blob/master/use_cases/ex1_3_nino_indices_comparison/ex1_3-handson.md) You can find a summary of the differences between the two modes in this presentation: [Steps to use SUNSET](https://docs.google.com/presentation/d/1FdW25b4JZLxXDkVRVIOz-P_2A1WXLWDXhxZvisrVdqU/edit#slide=id.g308472768fe_0_124) -Resources +Documentation and Use Cases --------- You can access the documentation through the wiki: @@ -69,6 +69,9 @@ Use cases and hands-on tutorials are available in the repository for you to foll [SUNSET Use Cases](https://earth.bsc.es/gitlab/es/sunset/-/tree/master/use_cases/) +Other resources +--------- + You may also find useful information in the slides from past user meetings: [R-tools Climate Forecast Analysis Training session 2023](https://earth.bsc.es/wiki/lib/exe/fetch.php?media=tools:day2_04_handson_r_tools.pdf) diff --git a/autosubmit/conf_esarchive/jobs.yml b/autosubmit/conf_esarchive/jobs.yml index 04d23ba0acedf56ab1edb813db4f2e1300168f59..8196dcc10ba0dd5fab8fd2a5e738ea852fa4a555 100644 --- a/autosubmit/conf_esarchive/jobs.yml +++ b/autosubmit/conf_esarchive/jobs.yml @@ -6,6 +6,8 @@ JOBS: NOTIFY_ON: PLATFORM: nord3v2 PROCESSORS: + ## TODO: Uncomment (see #162) + # NODES: 1 # SPLITS: # n_atomic_recipes, number of atomic recipes multimodel: FILE: autosubmit/auto-multimodel.sh @@ -14,6 +16,8 @@ JOBS: NOTIFY_ON: PLATFORM: nord3v2 PROCESSORS: + ## TODO: Uncomment + # NODES: 1 DEPENDENCIES: verification: SPLITS_FROM: diff --git a/autosubmit/conf_gpfs/jobs.yml b/autosubmit/conf_gpfs/jobs.yml index 9f9ce28148dd78dc3fcc97c268236475c47b3004..e6806ca7192dfc4d74b22cfd3dc5ca1c5d5d1cf7 100644 --- a/autosubmit/conf_gpfs/jobs.yml +++ b/autosubmit/conf_gpfs/jobs.yml @@ -13,6 +13,8 @@ JOBS: NOTIFY_ON: PLATFORM: PROCESSORS: + ## TODO: Uncomment + # NODES: 1 # SPLITS: # n_atomic_recipes, number of atomic recipes multimodel: FILE: autosubmit/auto-multimodel.sh @@ -22,6 +24,8 @@ JOBS: PLATFORM: PROCESSORS: DEPENDENCIES: + ## TODO: Uncomment + # NODES: 1 verification: SPLITS_FROM: SPLITS: # n_atomic_recipes/n_models = n_multimodels diff --git a/launch_SUNSET.sh b/launch_SUNSET.sh index 74c161231b0eb12ee68cd93f074d683bfddf792a..b95898b549d55a954628345499c0c45b294d3b0d 100644 --- a/launch_SUNSET.sh +++ b/launch_SUNSET.sh @@ -151,7 +151,7 @@ if [[ $run_method == "sbatch" ]]; then outfile=${logdir}/run-${job_name}.out errfile=${logdir}/run-${job_name}.err # Send batch job and capture job ID - job_ID=$(sbatch --parsable --job-name="SUNSET_verification" $platform_params --output=$outfile --error=$errfile --time=$wallclock --cpus-per-task=$cpus $custom_directives conf/slurm_templates/run_parallel_workflow.sh ${script} ${atomic_recipe}) + job_ID=$(sbatch --parsable --job-name="SUNSET_verification" $platform_params --nodes=1 --output=$outfile --error=$errfile --time=$wallclock --cpus-per-task=$cpus $custom_directives conf/slurm_templates/run_parallel_workflow.sh ${script} ${atomic_recipe}) # Add job ID to array verification_job_list+=($job_ID) echo "Submitted batch job $job_ID" @@ -166,7 +166,7 @@ if [[ $run_method == "sbatch" ]]; then outfile=${logdir}/run-multimodel-${job_name}.out errfile=${logdir}/run-multimodel-${job_name}.err # Send batch job and capture job ID - job_ID=$(sbatch --parsable --dependency=afterok:$(IFS=,; echo "${verification_job_list[*]}") --kill-on-invalid-dep=yes --job-name="SUNSET_multimodel" $platform_params --output=$outfile --error=$errfile --time=$wallclock --cpus-per-task=$cpus $custom_directives conf/slurm_templates/run_parallel_workflow.sh ${script} ${atomic_recipe}) + job_ID=$(sbatch --parsable --dependency=afterok:$(IFS=,; echo "${verification_job_list[*]}") --kill-on-invalid-dep=yes --job-name="SUNSET_multimodel" $platform_params --nodes=1 --output=$outfile --error=$errfile --time=$wallclock --cpus-per-task=$cpus $custom_directives conf/slurm_templates/run_parallel_workflow.sh ${script} ${atomic_recipe}) # Add job ID to array multimodel_job_list+=($job_ID) echo "Submitted batch job $job_ID"