diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 37c04c5d6cb50de2447b8e3995518a8fb589851b..1ba0a11884a40d26a55baf2a8620b26af96181ca 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -18,7 +18,6 @@ prepare: - conda update conda - conda install -n autosubmit2 coverage=4.5.4 - test_python2: stage: test script: @@ -43,9 +42,8 @@ report_codacy: - source activate autosubmit2 - pip install codacy-coverage --upgrade - python-codacy-coverage -r test/coverage.xml - clean: stage: clean script: - - conda clean --all --yes \ No newline at end of file + - conda clean --all --yes diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py index 2857fbb9be78c4514f3fb168ccbd233585bb7873..88421892dc994ac4e50504a572858bc5f594047d 100644 --- a/autosubmit/job/job.py +++ b/autosubmit/job/job.py @@ -44,6 +44,7 @@ from threading import Thread import threading from autosubmit.platforms.paramiko_submitter import ParamikoSubmitter + def threaded(fn): def wrapper(*args, **kwargs): thread = Thread(target=fn, args=args, kwargs=kwargs) @@ -92,7 +93,7 @@ class Job(object): self.split = None self.delay = None self.synchronize = None - + self._long_name = None self.long_name = name self.date_format = '' @@ -108,14 +109,15 @@ class Job(object): self.status = status self.old_status = self.status - self.new_status=status + self.new_status = status self.priority = priority self._parents = set() self._children = set() self.fail_count = 0 self.expid = name.split('_')[0] self.parameters = dict() - self._tmp_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid, BasicConfig.LOCAL_TMP_DIR) + self._tmp_path = os.path.join( + BasicConfig.LOCAL_ROOT_DIR, self.expid, BasicConfig.LOCAL_TMP_DIR) self.write_start = False self._platform = None self.check = 'true' @@ -123,7 +125,6 @@ class Job(object): self.packed = False self.hold = False - def __getstate__(self): odict = self.__dict__ if '_platform' in odict: @@ -131,6 +132,9 @@ class Job(object): del odict['_platform'] # remove filehandle entry return odict + # def __str__(self): + # return self.name + def print_job(self): """ Prints debug information about the job @@ -499,7 +503,7 @@ class Job(object): return retrials_list @threaded - def retrieve_logfiles(self,copy_remote_logs,local_logs,remote_logs,expid,platform_name): + def retrieve_logfiles(self, copy_remote_logs, local_logs, remote_logs, expid, platform_name): as_conf = AutosubmitConfig(expid, BasicConfig, ConfigParserFactory()) as_conf.reload() submitter = self._get_submitter(as_conf) @@ -512,8 +516,10 @@ class Job(object): sleeptime = 5 i = 0 while (not out_exist or not err_exist) and i < retries: - out_exist = platform.check_file_exists(remote_logs[0]) # will do 5 retries - err_exist = platform.check_file_exists(remote_logs[1]) # will do 5 retries + out_exist = platform.check_file_exists( + remote_logs[0]) # will do 5 retries + err_exist = platform.check_file_exists( + remote_logs[1]) # will do 5 retries if not out_exist or not err_exist: sleeptime = sleeptime + 5 i = i + 1 @@ -521,15 +527,18 @@ class Job(object): if out_exist and err_exist: if copy_remote_logs: if local_logs != remote_logs: - self.synchronize_logs(platform,remote_logs,local_logs) # unifying names for log files + # unifying names for log files + self.synchronize_logs(platform, remote_logs, local_logs) remote_logs = local_logs platform.get_logs_files(self.expid, remote_logs) # Update the logs with Autosubmit Job Id Brand for local_log in local_logs: - platform.write_jobid(self.id,os.path.join(self._tmp_path, 'LOG_' + str(self.expid), local_log)) + platform.write_jobid(self.id, os.path.join( + self._tmp_path, 'LOG_' + str(self.expid), local_log)) platform.closeConnection() sleep(2) return + def update_status(self, copy_remote_logs=False): """ Updates job status, checking COMPLETED file if needed @@ -556,16 +565,19 @@ class Job(object): elif self.status == Status.COMPLETED: Log.result("Job {0} is COMPLETED", self.name) elif self.status == Status.FAILED: - Log.user_warning("Job {0} is FAILED. Checking completed files to confirm the failure...", self.name) + Log.user_warning( + "Job {0} is FAILED. Checking completed files to confirm the failure...", self.name) self.platform.get_completed_files(self.name) self.check_completion() if self.status == Status.COMPLETED: - Log.warning('Job {0} seems to have failed but there is a COMPLETED file', self.name) + Log.warning( + 'Job {0} seems to have failed but there is a COMPLETED file', self.name) Log.result("Job {0} is COMPLETED", self.name) else: self.update_children_status() elif self.status == Status.UNKNOWN: - Log.debug("Job {0} in UNKNOWN status. Checking completed files...", self.name) + Log.debug( + "Job {0} in UNKNOWN status. Checking completed files...", self.name) self.platform.get_completed_files(self.name) self.check_completion(Status.UNKNOWN) if self.status == Status.UNKNOWN: @@ -574,22 +586,25 @@ class Job(object): Log.result("Job {0} is COMPLETED", self.name) elif self.status == Status.SUBMITTED: # after checking the jobs , no job should have the status "submitted" - Log.warning('Job {0} in SUBMITTED status after checking.', self.name) + Log.warning( + 'Job {0} in SUBMITTED status after checking.', self.name) if previous_status != Status.RUNNING and self.status in [Status.COMPLETED, Status.FAILED, Status.UNKNOWN, Status.RUNNING]: self.write_start_time() - # Updating logs - if self.status in [Status.COMPLETED, Status.FAILED, Status.UNKNOWN]: + # Updating logs + if self.status in [Status.COMPLETED, Status.FAILED, Status.UNKNOWN]: self.write_end_time(self.status == Status.COMPLETED) - #New thread, check if file exist + # New thread, check if file exist expid = copy.deepcopy(self.expid) platform_name = copy.deepcopy(self.platform_name.lower()) local_logs = copy.deepcopy(self.local_logs) remote_logs = copy.deepcopy(self.remote_logs) - self.retrieve_logfiles(copy_remote_logs,local_logs,remote_logs,expid,platform_name) + self.retrieve_logfiles( + copy_remote_logs, local_logs, remote_logs, expid, platform_name) return self.status + @staticmethod def _get_submitter(as_conf): """ @@ -603,8 +618,10 @@ class Job(object): return ParamikoSubmitter() # communications library not known - Log.error('You have defined a not valid communications library on the configuration file') + Log.error( + 'You have defined a not valid communications library on the configuration file') raise Exception('Communications library not known') + def update_children_status(self): children = list(self.children) for child in children: @@ -623,12 +640,13 @@ class Job(object): if os.path.exists(log_name): self.status = Status.COMPLETED else: - Log.warning("Job {0} completion check failed. There is no COMPLETED file", self.name) + Log.warning( + "Job {0} completion check failed. There is no COMPLETED file", self.name) self.status = default_status def update_parameters(self, as_conf, parameters, default_parameters={'d': '%d%', 'd_': '%d_%', 'Y': '%Y%', 'Y_': '%Y_%', - 'M' : '%M%', 'M_' : '%M_%', 'm' : '%m%', 'm_' : '%m_%'}): + 'M': '%M%', 'M_': '%M_%', 'm': '%m%', 'm_': '%m_%'}): """ Refresh parameters value @@ -666,22 +684,29 @@ class Job(object): chunk_length = int(parameters['CHUNKSIZE']) chunk_unit = parameters['CHUNKSIZEUNIT'].lower() cal = parameters['CALENDAR'].lower() - chunk_start = chunk_start_date(self.date, chunk, chunk_length, chunk_unit, cal) - chunk_end = chunk_end_date(chunk_start, chunk_length, chunk_unit, cal) + chunk_start = chunk_start_date( + self.date, chunk, chunk_length, chunk_unit, cal) + chunk_end = chunk_end_date( + chunk_start, chunk_length, chunk_unit, cal) chunk_end_1 = previous_day(chunk_end, cal) - parameters['DAY_BEFORE'] = date2str(previous_day(self.date, cal), self.date_format) + parameters['DAY_BEFORE'] = date2str( + previous_day(self.date, cal), self.date_format) - parameters['RUN_DAYS'] = str(subs_dates(chunk_start, chunk_end, cal)) - parameters['Chunk_End_IN_DAYS'] = str(subs_dates(self.date, chunk_end, cal)) + parameters['RUN_DAYS'] = str( + subs_dates(chunk_start, chunk_end, cal)) + parameters['Chunk_End_IN_DAYS'] = str( + subs_dates(self.date, chunk_end, cal)) - parameters['Chunk_START_DATE'] = date2str(chunk_start, self.date_format) + parameters['Chunk_START_DATE'] = date2str( + chunk_start, self.date_format) parameters['Chunk_START_YEAR'] = str(chunk_start.year) parameters['Chunk_START_MONTH'] = str(chunk_start.month).zfill(2) parameters['Chunk_START_DAY'] = str(chunk_start.day).zfill(2) parameters['Chunk_START_HOUR'] = str(chunk_start.hour).zfill(2) - parameters['Chunk_END_DATE'] = date2str(chunk_end_1, self.date_format) + parameters['Chunk_END_DATE'] = date2str( + chunk_end_1, self.date_format) parameters['Chunk_END_YEAR'] = str(chunk_end_1.year) parameters['Chunk_END_MONTH'] = str(chunk_end_1.month).zfill(2) parameters['Chunk_END_DAY'] = str(chunk_end_1.day).zfill(2) @@ -714,9 +739,11 @@ class Job(object): self.scratch_free_space = job_platform.scratch_free_space self.custom_directives = as_conf.get_custom_directives(self.section) if self.custom_directives != '': - self.custom_directives = json.loads(as_conf.get_custom_directives(self.section)) + self.custom_directives = json.loads( + as_conf.get_custom_directives(self.section)) if job_platform.custom_directives: - self.custom_directives = self.custom_directives + json.loads(job_platform.custom_directives) + self.custom_directives = self.custom_directives + \ + json.loads(job_platform.custom_directives) elif job_platform.custom_directives: self.custom_directives = json.loads(job_platform.custom_directives) elif self.custom_directives == '': @@ -747,7 +774,8 @@ class Job(object): parameters['CURRENT_ROOTDIR'] = job_platform.root_dir parameters['CURRENT_LOGDIR'] = job_platform.get_files_path() - parameters['ROOTDIR'] = os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid) + parameters['ROOTDIR'] = os.path.join( + BasicConfig.LOCAL_ROOT_DIR, self.expid) parameters['PROJDIR'] = as_conf.get_project_dir() parameters['NUMMEMBERS'] = len(as_conf.get_member_list()) @@ -767,7 +795,8 @@ class Job(object): :rtype: str """ if self.parameters['PROJECT_TYPE'].lower() != "none": - template_file = open(os.path.join(as_conf.get_project_dir(), self.file), 'r') + template_file = open(os.path.join( + as_conf.get_project_dir(), self.file), 'r') template = template_file.read() else: if self.type == Type.BASH: @@ -788,14 +817,17 @@ class Job(object): else: raise Exception('Job type {0} not supported'.format(self.type)) - template_content = self._get_template_content(as_conf, snippet, template) + template_content = self._get_template_content( + as_conf, snippet, template) return template_content def get_wrapped_content(self, as_conf): snippet = StatisticsSnippetEmpty - template = 'python $SCRATCH/{1}/LOG_{1}/{0}.cmd'.format(self.name, self.expid) - template_content = self._get_template_content(as_conf, snippet, template) + template = 'python $SCRATCH/{1}/LOG_{1}/{0}.cmd'.format( + self.name, self.expid) + template_content = self._get_template_content( + as_conf, snippet, template) return template_content def _get_template_content(self, as_conf, snippet, template): @@ -824,11 +856,11 @@ class Job(object): if len(reason.split('(', 1)) > 1: reason = reason.split('(', 1)[1].split(')')[0] if 'Invalid' in reason or reason in ['AssociationJobLimit', 'AssociationResourceLimit', 'AssociationTimeLimit', - 'BadConstraints', 'QOSMaxCpuMinutesPerJobLimit', 'QOSMaxWallDurationPerJobLimit', - 'QOSMaxNodePerJobLimit', 'DependencyNeverSatisfied', 'QOSMaxMemoryPerJob', - 'QOSMaxMemoryPerNode', 'QOSMaxMemoryMinutesPerJob', 'QOSMaxNodeMinutesPerJob', - 'InactiveLimit', 'JobLaunchFailure', 'NonZeroExitCode', 'PartitionNodeLimit', - 'PartitionTimeLimit', 'SystemFailure', 'TimeLimit', 'QOSUsageThreshold']: + 'BadConstraints', 'QOSMaxCpuMinutesPerJobLimit', 'QOSMaxWallDurationPerJobLimit', + 'QOSMaxNodePerJobLimit', 'DependencyNeverSatisfied', 'QOSMaxMemoryPerJob', + 'QOSMaxMemoryPerNode', 'QOSMaxMemoryMinutesPerJob', 'QOSMaxNodeMinutesPerJob', + 'InactiveLimit', 'JobLaunchFailure', 'NonZeroExitCode', 'PartitionNodeLimit', + 'PartitionTimeLimit', 'SystemFailure', 'TimeLimit', 'QOSUsageThreshold']: return True return False except: @@ -853,13 +885,16 @@ class Job(object): parameters = self.parameters template_content = self.update_content(as_conf) for key, value in parameters.items(): - template_content = re.sub('%(? 1: reason = reason.split('(', 1)[1].split(')')[0] if 'Invalid' in reason or reason in ['AssociationJobLimit', 'AssociationResourceLimit', 'AssociationTimeLimit', - 'BadConstraints', 'QOSMaxCpuMinutesPerJobLimit', 'QOSMaxWallDurationPerJobLimit', - 'QOSMaxNodePerJobLimit', 'DependencyNeverSatisfied', 'QOSMaxMemoryPerJob', - 'QOSMaxMemoryPerNode', 'QOSMaxMemoryMinutesPerJob', 'QOSMaxNodeMinutesPerJob', - 'InactiveLimit', 'JobLaunchFailure', 'NonZeroExitCode', 'PartitionNodeLimit', - 'PartitionTimeLimit', 'SystemFailure', 'TimeLimit', 'QOSUsageThreshold']: + 'BadConstraints', 'QOSMaxCpuMinutesPerJobLimit', 'QOSMaxWallDurationPerJobLimit', + 'QOSMaxNodePerJobLimit', 'DependencyNeverSatisfied', 'QOSMaxMemoryPerJob', + 'QOSMaxMemoryPerNode', 'QOSMaxMemoryMinutesPerJob', 'QOSMaxNodeMinutesPerJob', + 'InactiveLimit', 'JobLaunchFailure', 'NonZeroExitCode', 'PartitionNodeLimit', + 'PartitionTimeLimit', 'SystemFailure', 'TimeLimit', 'QOSUsageThreshold']: return True return False except: @@ -1092,24 +1130,29 @@ class WrapperJob(Job): prev_status = self.status self.status = status Log.debug('Checking inner jobs status') - if self.status in [ Status.HELD, Status.QUEUING ]: # If WRAPPER is QUEUED OR HELD - self._check_inner_jobs_queue(prev_status) # This will update the inner jobs to QUEUE or HELD (normal behaviour) or WAITING ( if they fails to be held) - elif self.status == Status.RUNNING: # If wrapper is running - if prev_status in [ Status.SUBMITTED ]: # This will update the status from submitted or hold to running (if safety timer is high enough or queue is fast enough) + if self.status in [Status.HELD, Status.QUEUING]: # If WRAPPER is QUEUED OR HELD + # This will update the inner jobs to QUEUE or HELD (normal behaviour) or WAITING ( if they fails to be held) + self._check_inner_jobs_queue(prev_status) + elif self.status == Status.RUNNING: # If wrapper is running + # This will update the status from submitted or hold to running (if safety timer is high enough or queue is fast enough) + if prev_status in [Status.SUBMITTED]: for job in self.job_list: job.status = Status.QUEUING - self._check_running_jobs() #Check and update inner_jobs status that are elegible + self._check_running_jobs() # Check and update inner_jobs status that are elegible - elif self.status == Status.COMPLETED: # Completed wrapper will always come from check function. + # Completed wrapper will always come from check function. + elif self.status == Status.COMPLETED: self.check_inner_jobs_completed(self.job_list) - if self.status in [ Status.FAILED, Status.UNKNOWN ]: # Fail can come from check function or running/completed checkers. + # Fail can come from check function or running/completed checkers. + if self.status in [Status.FAILED, Status.UNKNOWN]: self.status = Status.FAILED self.cancel_failed_wrapper_job() self.update_failed_jobs() - + def check_inner_jobs_completed(self, jobs): - not_completed_jobs = [job for job in jobs if job.status != Status.COMPLETED] + not_completed_jobs = [ + job for job in jobs if job.status != Status.COMPLETED] not_completed_job_names = [job.name for job in not_completed_jobs] job_names = ' '.join(not_completed_job_names) if job_names: @@ -1119,20 +1162,24 @@ class WrapperJob(Job): if completed_files and len(completed_files) > 0: if job.name in completed_files: completed_jobs.append(job) - job.new_status=Status.COMPLETED - job.update_status(self.as_config.get_copy_remote_logs() == 'true') + job.new_status = Status.COMPLETED + job.update_status( + self.as_config.get_copy_remote_logs() == 'true') for job in completed_jobs: self.running_jobs_start.pop(job, None) - not_completed_jobs = list(set(not_completed_jobs) - set(completed_jobs)) + not_completed_jobs = list( + set(not_completed_jobs) - set(completed_jobs)) for job in not_completed_jobs: self._check_finished_job(job) - def _check_inner_jobs_queue(self,prev_status): + def _check_inner_jobs_queue(self, prev_status): reason = str() if self.platform.type == 'slurm': - self.platform.send_command(self.platform.get_queue_status_cmd(self.id)) - reason = self.platform.parse_queue_reason(self.platform._ssh_output,self.id) + self.platform.send_command( + self.platform.get_queue_status_cmd(self.id)) + reason = self.platform.parse_queue_reason( + self.platform._ssh_output, self.id) if self._queuing_reason_cancel(reason): Log.error("Job {0} will be cancelled and set to FAILED as it was queuing due to {1}", self.name, reason) @@ -1141,7 +1188,9 @@ class WrapperJob(Job): return if reason == '(JobHeldUser)': if self.hold is False: - self.platform.send_command("scontrol release " + "{0}".format(self.id)) # SHOULD BE MORE CLASS (GET_scontrol realease but not sure if this can be implemented on others PLATFORMS + # SHOULD BE MORE CLASS (GET_scontrol realease but not sure if this can be implemented on others PLATFORMS + self.platform.send_command( + "scontrol release " + "{0}".format(self.id)) self.status = Status.QUEUING for job in self.job_list: job.hold = self.hold @@ -1151,8 +1200,10 @@ class WrapperJob(Job): self.status = Status.HELD Log.info("Job {0} is HELD", self.name) elif reason == '(JobHeldAdmin)': - Log.debug("Job {0} Failed to be HELD, canceling... ", self.name) - self.platform.send_command(self.platform.cancel_cmd + " {0}".format(self.id)) + Log.debug( + "Job {0} Failed to be HELD, canceling... ", self.name) + self.platform.send_command( + self.platform.cancel_cmd + " {0}".format(self.id)) self.status = Status.WAITING else: Log.info("Job {0} is QUEUING {1}", self.name, reason) @@ -1167,8 +1218,9 @@ class WrapperJob(Job): def _check_inner_job_wallclock(self, job): start_time = self.running_jobs_start[job] if self._is_over_wallclock(start_time, job.wallclock): - #if self.as_config.get_wrapper_type() in ['vertical', 'horizontal']: - Log.error("Job {0} inside wrapper {1} is running for longer than it's wallclock! Cancelling...".format(job.name, self.name)) + # if self.as_config.get_wrapper_type() in ['vertical', 'horizontal']: + Log.error("Job {0} inside wrapper {1} is running for longer than it's wallclock! Cancelling...".format( + job.name, self.name)) job.new_status = Status.FAILED job.update_status(self.as_config.get_copy_remote_logs() == 'true') return True @@ -1176,12 +1228,14 @@ class WrapperJob(Job): def _check_running_jobs(self): not_finished_jobs_dict = OrderedDict() - not_finished_jobs = [ job for job in self.job_list if job.status not in [ Status.COMPLETED, Status.FAILED ] ] + not_finished_jobs = [job for job in self.job_list if job.status not in [ + Status.COMPLETED, Status.FAILED]] for job in not_finished_jobs: - tmp = [parent for parent in job.parents if parent.status == Status.COMPLETED or self.status == Status.COMPLETED] + tmp = [parent for parent in job.parents if parent.status == + Status.COMPLETED or self.status == Status.COMPLETED] if job.parents is None or len(tmp) == len(job.parents): not_finished_jobs_dict[job.name] = job - if len(not_finished_jobs_dict.keys()) > 0: # Only running jobs will enter there + if len(not_finished_jobs_dict.keys()) > 0: # Only running jobs will enter there not_finished_jobs_names = ' '.join(not_finished_jobs_dict.keys()) remote_log_dir = self.platform.get_remote_log_dir() # PREPARE SCRIPT TO SEND @@ -1196,23 +1250,26 @@ do echo ${{job}} fi done -""").format(str(not_finished_jobs_names),str(remote_log_dir), '\n'.ljust(13)) +""").format(str(not_finished_jobs_names), str(remote_log_dir), '\n'.ljust(13)) - log_dir = os.path.join(self._tmp_path, 'LOG_{0}'.format(self.expid)) - multiple_checker_inner_jobs = os.path.join(log_dir, "inner_jobs_checker.sh") + log_dir = os.path.join( + self._tmp_path, 'LOG_{0}'.format(self.expid)) + multiple_checker_inner_jobs = os.path.join( + log_dir, "inner_jobs_checker.sh") open(multiple_checker_inner_jobs, 'w+').write(command) os.chmod(multiple_checker_inner_jobs, 0o770) self._platform.send_file(multiple_checker_inner_jobs, False) - command = os.path.join(self._platform.get_files_path(), "inner_jobs_checker.sh") + command = os.path.join( + self._platform.get_files_path(), "inner_jobs_checker.sh") # wait = 2 retries = 5 over_wallclock = False content = '' while content == '' and retries > 0: - self._platform.send_command(command,False) + self._platform.send_command(command, False) content = self.platform._ssh_output.split('\n') - #content.reverse() + # content.reverse() for line in content[:-1]: out = line.split() if out: @@ -1221,23 +1278,28 @@ done if len(out) > 1: if job not in self.running_jobs_start: start_time = self._check_time(out, 1) - Log.info("Job {0} started at {1}".format(jobname, str(parse_date(start_time)))) + Log.info("Job {0} started at {1}".format( + jobname, str(parse_date(start_time)))) self.running_jobs_start[job] = start_time - job.new_status=Status.RUNNING - job.update_status(self.as_config.get_copy_remote_logs() == 'true') + job.new_status = Status.RUNNING + job.update_status( + self.as_config.get_copy_remote_logs() == 'true') if len(out) == 2: Log.info("Job {0} is RUNNING".format(jobname)) - over_wallclock = self._check_inner_job_wallclock(job) + over_wallclock = self._check_inner_job_wallclock( + job) if over_wallclock: - Log.error("Job {0} is FAILED".format(jobname)) + Log.error( + "Job {0} is FAILED".format(jobname)) elif len(out) == 3: end_time = self._check_time(out, 2) self._check_finished_job(job) - Log.info("Job {0} finished at {1}".format(jobname, str(parse_date(end_time)))) + Log.info("Job {0} finished at {1}".format( + jobname, str(parse_date(end_time)))) if content == '': sleep(wait) - retries= retries -1 + retries = retries - 1 if retries == 0 or over_wallclock: self.status = Status.FAILED @@ -1250,24 +1312,27 @@ done output = self.platform.check_completed_files(job.name) if output is None or output == '': sleep(wait) - retries=retries-1 + retries = retries-1 if output is not None and output != '' and 'COMPLETED' in output: - job.new_status=Status.COMPLETED + job.new_status = Status.COMPLETED job.update_status(self.as_config.get_copy_remote_logs() == 'true') else: - Log.info("No completed filed found, setting {0} to FAILED...".format(job.name)) - job.new_status=Status.FAILED + Log.info( + "No completed filed found, setting {0} to FAILED...".format(job.name)) + job.new_status = Status.FAILED job.update_status(self.as_config.get_copy_remote_logs() == 'true') self.running_jobs_start.pop(job, None) def update_failed_jobs(self): - not_finished_jobs = [job for job in self.job_list if job.status not in [Status.FAILED, Status.COMPLETED]] + not_finished_jobs = [job for job in self.job_list if job.status not in [ + Status.FAILED, Status.COMPLETED]] for job in not_finished_jobs: self._check_finished_job(job) def cancel_failed_wrapper_job(self): Log.error("Cancelling job with id {0}".format(self.id)) - self.platform.send_command(self.platform.cancel_cmd + " " + str(self.id)) + self.platform.send_command( + self.platform.cancel_cmd + " " + str(self.id)) def _update_completed_jobs(self): for job in self.job_list: @@ -1275,7 +1340,8 @@ done self.running_jobs_start.pop(job, None) Log.debug('Setting job {0} to COMPLETED'.format(job.name)) job.new_status = Status.COMPLETED - job.update_status(self.as_config.get_copy_remote_logs() == 'true') + job.update_status( + self.as_config.get_copy_remote_logs() == 'true') def _is_over_wallclock(self, start_time, wallclock): elapsed = datetime.datetime.now() - parse_date(start_time) diff --git a/autosubmit/job/job_packager.py b/autosubmit/job/job_packager.py index 556b9ce0ac816a209f83b6f5ba9cf87ed33262a4..d35af91d97d50a57ec58d7545a4e550e77f9b79d 100644 --- a/autosubmit/job/job_packager.py +++ b/autosubmit/job/job_packager.py @@ -38,7 +38,7 @@ class JobPackager(object): :type jobs_list: JobList object. """ - def __init__(self, as_config, platform, jobs_list,hold=False): + def __init__(self, as_config, platform, jobs_list, hold=False): self._as_config = as_config self._platform = platform self._jobs_list = jobs_list @@ -62,21 +62,24 @@ class JobPackager(object): # .total_jobs is defined in each section of platforms_.conf, if not from there, it comes form autosubmit_.conf # .total_jobs Maximum number of jobs at the same time self._max_jobs_to_submit = platform.total_jobs - queuing_jobs_len - self.max_jobs = min(self._max_wait_jobs_to_submit, self._max_jobs_to_submit) + self.max_jobs = min(self._max_wait_jobs_to_submit, + self._max_jobs_to_submit) # These are defined in the [wrapper] section of autosubmit_,conf self.wrapper_type = self._as_config.get_wrapper_type() self.wrapper_method = self._as_config.get_wrapper_method().lower() # True or False self.jobs_in_wrapper = self._as_config.get_wrapper_jobs() - Log.debug("Number of jobs ready: {0}", len(jobs_list.get_ready(platform,hold=self.hold))) - Log.debug("Number of jobs available: {0}", self._max_wait_jobs_to_submit) - if len(jobs_list.get_ready(platform,hold=self.hold)) > 0: - Log.info("Jobs ready for {0}: {1}", self._platform.name, len(jobs_list.get_ready(platform,hold=self.hold))) + Log.debug("Number of jobs ready: {0}", len( + jobs_list.get_ready(platform, hold=self.hold))) + Log.debug( + "Number of jobs available: {0}", self._max_wait_jobs_to_submit) + if len(jobs_list.get_ready(platform, hold=self.hold)) > 0: + Log.info("Jobs ready for {0}: {1}", self._platform.name, len( + jobs_list.get_ready(platform, hold=self.hold))) self._maxTotalProcessors = 0 - - def build_packages(self,only_generate=False, jobs_filtered=[]): + def build_packages(self, only_generate=False, jobs_filtered=[]): """ Returns the list of the built packages to be submitted @@ -88,10 +91,11 @@ class JobPackager(object): if only_generate: jobs_to_submit = jobs_filtered else: - jobs_ready = self._jobs_list.get_ready(self._platform,self.hold) + jobs_ready = self._jobs_list.get_ready(self._platform, self.hold) - if self.hold and len(jobs_ready) > 0 : - jobs_in_held_status = self._jobs_list.get_held_jobs() + self._jobs_list.get_submitted(self._platform,hold=self.hold) + if self.hold and len(jobs_ready) > 0: + jobs_in_held_status = self._jobs_list.get_held_jobs( + ) + self._jobs_list.get_submitted(self._platform, hold=self.hold) held_by_id = dict() for held_job in jobs_in_held_status: held_by_id[held_job.id] = held_job @@ -113,30 +117,37 @@ class JobPackager(object): return packages_to_submit # Sort by 6 first digits of date - available_sorted = sorted(jobs_ready, key=lambda k: k.long_name.split('_')[1][:6]) + available_sorted = sorted( + jobs_ready, key=lambda k: k.long_name.split('_')[1][:6]) # Sort by Priority, highest first - list_of_available = sorted(available_sorted, key=lambda k: k.priority, reverse=True) - num_jobs_to_submit = min(self._max_wait_jobs_to_submit, len(jobs_ready), self._max_jobs_to_submit) + list_of_available = sorted( + available_sorted, key=lambda k: k.priority, reverse=True) + num_jobs_to_submit = min(self._max_wait_jobs_to_submit, len( + jobs_ready), self._max_jobs_to_submit) # Take the first num_jobs_to_submit from the list of available jobs_to_submit = list_of_available[0:num_jobs_to_submit] # print(len(jobs_to_submit)) - jobs_to_submit_by_section = self._divide_list_by_section(jobs_to_submit) + jobs_to_submit_by_section = self._divide_list_by_section( + jobs_to_submit) for section in jobs_to_submit_by_section: # Only if platform allows wrappers, wrapper type has been correctly defined, and job names for wrappers have been correctly defined # ('None' is a default value) or the correct section is included in the corresponding sections in [wrappers] if self._platform.allow_wrappers and self.wrapper_type in ['horizontal', 'vertical', 'vertical-mixed', 'vertical-horizontal', 'horizontal-vertical'] \ - and (self.jobs_in_wrapper == 'None' or section in self.jobs_in_wrapper): + and (self.jobs_in_wrapper == 'None' or section in self.jobs_in_wrapper): # Trying to find the value in jobs_parser, if not, default to an autosubmit_.conf value (Looks first in [wrapper] section) - max_wrapped_jobs = int(self._as_config.jobs_parser.get_option(section, "MAX_WRAPPED", self._as_config.get_max_wrapped_jobs())) + max_wrapped_jobs = int(self._as_config.jobs_parser.get_option( + section, "MAX_WRAPPED", self._as_config.get_max_wrapped_jobs())) if '&' not in section: - dependencies_keys = self._as_config.jobs_parser.get(section, "DEPENDENCIES").split() + dependencies_keys = self._as_config.jobs_parser.get( + section, "DEPENDENCIES").split() else: multiple_sections = section.split('&') - dependencies_keys=[] + dependencies_keys = [] for sectionN in multiple_sections: - dependencies_keys += self._as_config.jobs_parser.get(sectionN, "DEPENDENCIES").split() + dependencies_keys += self._as_config.jobs_parser.get( + sectionN, "DEPENDENCIES").split() hard_limit_wrapper = max_wrapped_jobs for k in dependencies_keys: @@ -146,16 +157,18 @@ class JobPackager(object): number = int(k_divided[1].strip(" ")) if number < hard_limit_wrapper: hard_limit_wrapper = number - min_wrapped_jobs = min(self._as_config.jobs_parser.get_option(section, "MIN_WRAPPED",self._as_config.get_min_wrapped_jobs()),hard_limit_wrapper) + min_wrapped_jobs = min(self._as_config.jobs_parser.get_option( + section, "MIN_WRAPPED", self._as_config.get_min_wrapped_jobs()), hard_limit_wrapper) built_packages = [] if self.wrapper_type in ['vertical', 'vertical-mixed']: built_packages_tmp = self._build_vertical_packages(jobs_to_submit_by_section[section], - max_wrapped_jobs) + max_wrapped_jobs) for p in built_packages_tmp: for job in p.jobs: job.packed = True - if len(p.jobs) >= min_wrapped_jobs: # if the quantity is not enough, don't make the wrapper + # if the quantity is not enough, don't make the wrapper + if len(p.jobs) >= min_wrapped_jobs: built_packages.append(p) elif self._jobs_list._chunk_list.index(p.jobs[0].chunk) >= len(self._jobs_list._chunk_list) - ( len(self._jobs_list._chunk_list) % min_wrapped_jobs): # Last case, wrap remaining jobs @@ -165,25 +178,28 @@ class JobPackager(object): job.packed = False elif self.wrapper_type == 'horizontal': built_packages_tmp = self._build_horizontal_packages(jobs_to_submit_by_section[section], - max_wrapped_jobs, section) + max_wrapped_jobs, section) for p in built_packages_tmp: for job in p.jobs: job.packed = True - if len(p.jobs) >= self._as_config.jobs_parser.get_option(section, "MIN_WRAPPED",self._as_config.get_min_wrapped_jobs()): # if the quantity is not enough, don't make the wrapper + # if the quantity is not enough, don't make the wrapper + if len(p.jobs) >= self._as_config.jobs_parser.get_option(section, "MIN_WRAPPED", self._as_config.get_min_wrapped_jobs()): built_packages.append(p) elif self._jobs_list._member_list.index(p.jobs[0].member) >= len( - self._jobs_list._member_list) - (len(self._jobs_list._member_list) % min_wrapped_jobs): # Last case, wrap remaining jobs + self._jobs_list._member_list) - (len(self._jobs_list._member_list) % min_wrapped_jobs): # Last case, wrap remaining jobs built_packages.append(p) else: # If a package is discarded, allow to wrap their inner jobs again. for job in p.jobs: job.packed = False elif self.wrapper_type in ['vertical-horizontal', 'horizontal-vertical']: - built_packages_tmp =[] - built_packages_tmp.append(self._build_hybrid_package(jobs_to_submit_by_section[section], max_wrapped_jobs, section)) + built_packages_tmp = [] + built_packages_tmp.append(self._build_hybrid_package( + jobs_to_submit_by_section[section], max_wrapped_jobs, section)) for p in built_packages_tmp: for job in p.jobs: job.packed = True - if len(p.jobs) >= min_wrapped_jobs: # if the quantity is not enough, don't make the wrapper + # if the quantity is not enough, don't make the wrapper + if len(p.jobs) >= min_wrapped_jobs: built_packages.append(p) elif self._jobs_list._chunk_list.index(p.jobs[0].chunk) >= len(self._jobs_list._chunk_list) - ( len(self._jobs_list._chunk_list) % min_wrapped_jobs): # Last case, wrap remaining jobs @@ -191,10 +207,10 @@ class JobPackager(object): else: # If a package is discarded, allow to wrap their inner jobs again. for job in p.jobs: job.packed = False - built_packages=built_packages_tmp + built_packages = built_packages_tmp else: - built_packages=built_packages_tmp - self.max_jobs = self.max_jobs -1 + built_packages = built_packages_tmp + self.max_jobs = self.max_jobs - 1 packages_to_submit += built_packages else: @@ -224,7 +240,8 @@ class JobPackager(object): jobs_section = dict() for job in jobs_list: # This iterator will always return None if there is no '&' defined in the section name - section = next((s for s in sections_split if job.section in s and '&' in s), None) + section = next( + (s for s in sections_split if job.section in s and '&' in s), None) if section is None: section = job.section if section not in jobs_section: @@ -247,11 +264,10 @@ class JobPackager(object): if machinefile_function == 'COMPONENTS': jobs_resources = horizontal_packager.components_dict jobs_resources['MACHINEFILES'] = machinefile_function - current_package = JobPackageHorizontal(package_jobs, jobs_resources=jobs_resources,method=self.wrapper_method,configuration=self._as_config) + current_package = JobPackageHorizontal( + package_jobs, jobs_resources=jobs_resources, method=self.wrapper_method, configuration=self._as_config) packages.append(current_package) - - return packages def _build_vertical_packages(self, section_list, max_wrapped_jobs): @@ -276,17 +292,19 @@ class JobPackager(object): if self.wrapper_type == 'vertical-mixed': dict_jobs = self._jobs_list.get_ordered_jobs_by_date_member() job_vertical_packager = JobPackagerVerticalMixed(dict_jobs, job, [job], job.wallclock, self.max_jobs, - max_wrapped_jobs, self._platform.max_wallclock) + max_wrapped_jobs, self._platform.max_wallclock) else: job_vertical_packager = JobPackagerVerticalSimple([job], job.wallclock, self.max_jobs, - max_wrapped_jobs, self._platform.max_wallclock) + max_wrapped_jobs, self._platform.max_wallclock) - jobs_list = job_vertical_packager.build_vertical_package(job) + jobs_list = job_vertical_packager.build_vertical_package( + job) # update max_jobs, potential_dependency is None - #self.max_jobs -= len(jobs_list) + # self.max_jobs -= len(jobs_list) if job.status is Status.READY: - packages.append(JobPackageVertical(jobs_list,configuration=self._as_config)) - else: + packages.append(JobPackageVertical( + jobs_list, configuration=self._as_config)) + else: package = JobPackageVertical(jobs_list, None) packages.append(package) @@ -311,15 +329,18 @@ class JobPackager(object): total_wallclock = '00:00' horizontal_package = horizontal_packager.build_horizontal_package() horizontal_packager.create_sections_order(section) - horizontal_packager.add_sectioncombo_processors(horizontal_packager.total_processors) - horizontal_package.sort(key=lambda job: horizontal_packager.sort_by_expression(job.name)) + horizontal_packager.add_sectioncombo_processors( + horizontal_packager.total_processors) + horizontal_package.sort( + key=lambda job: horizontal_packager.sort_by_expression(job.name)) job = max(horizontal_package, key=attrgetter('total_wallclock')) wallclock = job.wallclock current_package = [horizontal_package] #current_package = [] ## Get the next horizontal packages ## - max_procs =horizontal_packager.total_processors - new_package=horizontal_packager.get_next_packages(section, max_wallclock=self._platform.max_wallclock,horizontal_vertical=True,max_procs=max_procs) + max_procs = horizontal_packager.total_processors + new_package = horizontal_packager.get_next_packages( + section, max_wallclock=self._platform.max_wallclock, horizontal_vertical=True, max_procs=max_procs) if new_package is not None: current_package += new_package @@ -327,7 +348,7 @@ class JobPackager(object): total_wallclock = sum_str_hours(total_wallclock, wallclock) return JobPackageHorizontalVertical(current_package, max_procs, total_wallclock, - jobs_resources=jobs_resources,configuration=self._as_config) + jobs_resources=jobs_resources, configuration=self._as_config) def _build_vertical_horizontal_package(self, horizontal_packager, max_wrapped_jobs, jobs_resources): total_wallclock = '00:00' @@ -347,7 +368,7 @@ class JobPackager(object): total_wallclock = sum_str_hours(total_wallclock, job.wallclock) return JobPackageVerticalHorizontal(current_package, total_processors, total_wallclock, - jobs_resources=jobs_resources,method=self.wrapper_method,configuration=self._as_config) + jobs_resources=jobs_resources, method=self.wrapper_method, configuration=self._as_config) class JobPackagerVertical(object): @@ -392,7 +413,8 @@ class JobPackagerVertical(object): # If not None, it is wrappable if child is not None: # Calculate total wallclock per possible wrapper - self.total_wallclock = sum_str_hours(self.total_wallclock, child.wallclock) + self.total_wallclock = sum_str_hours( + self.total_wallclock, child.wallclock) # Testing against max from platform if self.total_wallclock <= self.max_wallclock: # Marking, this is later tested in the main loop @@ -427,7 +449,8 @@ class JobPackagerVerticalSimple(JobPackagerVertical): """ def __init__(self, jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock): - super(JobPackagerVerticalSimple, self).__init__(jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock) + super(JobPackagerVerticalSimple, self).__init__( + jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock) def get_wrappable_child(self, job): """ @@ -439,7 +462,7 @@ class JobPackagerVerticalSimple(JobPackagerVertical): :rtype: Job Object """ for child in job.children: - if child.status in [Status.WAITING, Status.READY] and self._is_wrappable(child, job) : + if child.status in [Status.WAITING, Status.READY] and self._is_wrappable(child, job): return child continue return None @@ -485,8 +508,10 @@ class JobPackagerVerticalMixed(JobPackagerVertical): :param max_wallclock: Value from Platform. \n :type max_wallclock: String \n """ + def __init__(self, dict_jobs, ready_job, jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock): - super(JobPackagerVerticalMixed, self).__init__(jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock) + super(JobPackagerVerticalMixed, self).__init__( + jobs_list, total_wallclock, max_jobs, max_wrapped_jobs, max_wallclock) self.ready_job = ready_job self.dict_jobs = dict_jobs # Last date from the ordering @@ -558,13 +583,14 @@ class JobPackagerHorizontal(object): self._maxTotalProcessors = 0 self._sectionList = list() self._package_sections = dict() - def build_horizontal_package(self,horizontal_vertical=False): + + def build_horizontal_package(self, horizontal_vertical=False): current_package = [] if horizontal_vertical: self._current_processors = 0 for job in self.job_list: if self.max_jobs > 0 and len(current_package) < self.max_wrapped_jobs: - #self.max_jobs -= 1 + # self.max_jobs -= 1 if int(job.tasks) != 0 and int(job.tasks) != int(self.processors_node) and \ int(job.tasks) < job.total_processors: nodes = int(ceil(job.total_processors / float(job.tasks))) @@ -582,15 +608,14 @@ class JobPackagerHorizontal(object): self.create_components_dict() - return current_package def create_sections_order(self, jobs_sections): for i, section in enumerate(jobs_sections.split('&')): self._sort_order_dict[section] = i - #EXIT FALSE IF A SECTION EXIST AND HAVE LESS PROCESSORS - def add_sectioncombo_processors(self,total_processors_section): + # EXIT FALSE IF A SECTION EXIST AND HAVE LESS PROCESSORS + def add_sectioncombo_processors(self, total_processors_section): keySection = "" self._sectionList.sort() @@ -601,15 +626,15 @@ class JobPackagerHorizontal(object): return False else: self._package_sections[keySection] = total_processors_section - self._maxTotalProcessors=max(max(self._package_sections.values()),self._maxTotalProcessors) + self._maxTotalProcessors = max( + max(self._package_sections.values()), self._maxTotalProcessors) return True - def sort_by_expression(self, jobname): jobname = jobname.split('_')[-1] return self._sort_order_dict[jobname] - def get_next_packages(self, jobs_sections, max_wallclock=None, potential_dependency=None, packages_remote_dependencies=list(),horizontal_vertical=False,max_procs=0): + def get_next_packages(self, jobs_sections, max_wallclock=None, potential_dependency=None, packages_remote_dependencies=list(), horizontal_vertical=False, max_procs=0): packages = [] job = max(self.job_list, key=attrgetter('total_wallclock')) wallclock = job.wallclock @@ -628,13 +653,14 @@ class JobPackagerHorizontal(object): if wrappable and child not in next_section_list: next_section_list.append(child) - next_section_list.sort(key=lambda job: self.sort_by_expression(job.name)) + next_section_list.sort( + key=lambda job: self.sort_by_expression(job.name)) self.job_list = next_section_list package_jobs = self.build_horizontal_package(horizontal_vertical) if package_jobs: - #if not self.add_sectioncombo_processors(self.total_processors) and horizontal_vertical: - if self._current_processors != max_procs: + # if not self.add_sectioncombo_processors(self.total_processors) and horizontal_vertical: + if self._current_processors != max_procs: return packages if max_wallclock: total_wallclock = sum_str_hours(total_wallclock, wallclock) @@ -656,7 +682,7 @@ class JobPackagerHorizontal(object): return self._components_dict def create_components_dict(self): - self._sectionList=[] + self._sectionList = [] for job in self.job_list: if job.section not in self._sectionList: self._sectionList.append(job.section) @@ -664,4 +690,4 @@ class JobPackagerHorizontal(object): self._components_dict[job.section] = dict() self._components_dict[job.section]['COMPONENTS'] = {parameter: job.parameters[parameter] for parameter in job.parameters.keys() - if '_NUMPROC' in parameter } + if '_NUMPROC' in parameter} diff --git a/test/run_unit_suite.py b/test/run_unit_suite.py index 8df064a1cd98352a881738ad98d6fa317555a4ad..426ad141a1b5c54ca27f5eefb5e5d7b23db8a1e4 100644 --- a/test/run_unit_suite.py +++ b/test/run_unit_suite.py @@ -1,4 +1,4 @@ import os -#os.system("nosetests --with-coverage --cover-erase --cover-package=autosubmit --cover-html test/unit") -os.system("nosetests --exclude=regression --with-coverage --cover-package=autosubmit --cover-inclusive --cover-xml --cover-xml-file=test/coverage.xml test") \ No newline at end of file +os.system("nosetests --with-coverage --cover-erase --cover-package=autosubmit --cover-html test/unit") +# os.system("nosetests --exclude=regression --with-coverage --cover-package=autosubmit --cover-inclusive --cover-xml --cover-xml-file=test/coverage.xml test") diff --git a/test/unit/test_job.py b/test/unit/test_job.py index 93e7e4c0265a1aee246ec8b96cfeaa48fe547d35..61967ae8b0b534d181047d6c4819ee3928e50f23 100644 --- a/test/unit/test_job.py +++ b/test/unit/test_job.py @@ -1,293 +1,293 @@ -# from unittest import TestCase -# import os -# import sys -# from autosubmit.config.config_common import AutosubmitConfig -# from autosubmit.job.job_common import Status -# from autosubmit.job.job import Job -# from autosubmit.platforms.platform import Platform -# from mock import Mock, MagicMock -# from mock import patch -# -# # compatibility with both versions (2 & 3) -# from sys import version_info -# -# if version_info.major == 2: -# import __builtin__ as builtins -# else: -# import builtins -# -# -# class TestJob(TestCase): -# def setUp(self): -# self.experiment_id = 'random-id' -# self.job_name = 'random-name' -# self.job_id = 999 -# self.job_priority = 0 -# -# self.job = Job(self.job_name, self.job_id, Status.WAITING, self.job_priority) -# self.job.processors = 2 -# -# def test_when_the_job_has_more_than_one_processor_returns_the_parallel_platform(self): -# platform = Platform(self.experiment_id, 'parallel-platform', FakeBasicConfig) -# platform.serial_platform = 'serial-platform' -# -# self.job._platform = platform -# self.job.processors = 999 -# -# returned_platform = self.job.platform -# -# self.assertEquals(platform, returned_platform) -# -# def test_when_the_job_has_only_one_processor_returns_the_serial_platform(self): -# platform = Platform(self.experiment_id, 'parallel-platform', FakeBasicConfig) -# platform.serial_platform = 'serial-platform' -# -# self.job._platform = platform -# self.job.processors = '1' -# -# returned_platform = self.job.platform -# -# self.assertEquals('serial-platform', returned_platform) -# -# def test_set_platform(self): -# dummy_platform = Platform('whatever', 'rand-name', FakeBasicConfig) -# self.assertNotEquals(dummy_platform, self.job._platform) -# -# self.job.platform = dummy_platform -# -# self.assertEquals(dummy_platform, self.job.platform) -# -# def test_when_the_job_has_a_queue_returns_that_queue(self): -# dummy_queue = 'whatever' -# self.job._queue = dummy_queue -# -# returned_queue = self.job.queue -# -# self.assertEquals(dummy_queue, returned_queue) -# -# def test_when_the_job_has_not_a_queue_and_some_processors_returns_the_queue_of_the_platform(self): -# dummy_queue = 'whatever-parallel' -# dummy_platform = Platform('whatever', 'rand-name', FakeBasicConfig) -# dummy_platform.queue = dummy_queue -# self.job.platform = dummy_platform -# -# self.assertIsNone(self.job._queue) -# -# returned_queue = self.job.queue -# -# self.assertIsNotNone(returned_queue) -# self.assertEquals(dummy_queue, returned_queue) -# -# def test_when_the_job_has_not_a_queue_and_one_processor_returns_the_queue_of_the_serial_platform(self): -# serial_queue = 'whatever-serial' -# parallel_queue = 'whatever-parallel' -# -# dummy_serial_platform = Platform('whatever', 'serial', FakeBasicConfig) -# dummy_serial_platform.serial_queue = serial_queue -# -# dummy_platform = Platform('whatever', 'parallel', FakeBasicConfig) -# dummy_platform.serial_platform = dummy_serial_platform -# dummy_platform.queue = parallel_queue -# -# self.job.platform = dummy_platform -# self.job.processors = '1' -# -# self.assertIsNone(self.job._queue) -# -# returned_queue = self.job.queue -# -# self.assertIsNotNone(returned_queue) -# self.assertEquals(serial_queue, returned_queue) -# self.assertNotEquals(parallel_queue, returned_queue) -# -# def test_set_queue(self): -# dummy_queue = 'whatever' -# self.assertNotEquals(dummy_queue, self.job._queue) -# -# self.job.queue = dummy_queue -# -# self.assertEquals(dummy_queue, self.job.queue) -# -# def test_that_the_increment_fails_count_only_adds_one(self): -# initial_fail_count = self.job.fail_count -# self.job.inc_fail_count() -# incremented_fail_count = self.job.fail_count -# -# self.assertEquals(initial_fail_count + 1, incremented_fail_count) -# -# def test_parents_and_children_management(self): -# random_job1 = Job('dummy-name', 111, Status.WAITING, 0) -# random_job2 = Job('dummy-name2', 222, Status.WAITING, 0) -# random_job3 = Job('dummy-name3', 333, Status.WAITING, 0) -# -# self.job.add_parent(random_job1, -# random_job2, -# random_job3) -# -# # assert added -# self.assertEquals(3, len(self.job.parents)) -# self.assertEquals(1, len(random_job1.children)) -# self.assertEquals(1, len(random_job2.children)) -# self.assertEquals(1, len(random_job3.children)) -# -# # assert contains -# self.assertTrue(self.job.parents.__contains__(random_job1)) -# self.assertTrue(self.job.parents.__contains__(random_job2)) -# self.assertTrue(self.job.parents.__contains__(random_job3)) -# -# self.assertTrue(random_job1.children.__contains__(self.job)) -# self.assertTrue(random_job2.children.__contains__(self.job)) -# self.assertTrue(random_job3.children.__contains__(self.job)) -# -# # assert has -# self.assertFalse(self.job.has_children()) -# self.assertTrue(self.job.has_parents()) -# -# # assert deletions -# self.job.delete_parent(random_job3) -# self.assertEquals(2, len(self.job.parents)) -# -# random_job1.delete_child(self.job) -# self.assertEquals(0, len(random_job1.children)) -# -# def test_create_script(self): -# # arrange -# self.job.parameters = dict() -# self.job.parameters['NUMPROC'] = 999 -# self.job.parameters['NUMTHREADS'] = 777 -# self.job.parameters['NUMTASK'] = 666 -# -# self.job._tmp_path = '/dummy/tmp/path' -# -# update_content_mock = Mock(return_value='some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%') -# self.job.update_content = update_content_mock -# -# config = Mock(spec=AutosubmitConfig) -# config.get_project_dir = Mock(return_value='/project/dir') -# -# chmod_mock = Mock() -# sys.modules['os'].chmod = chmod_mock -# -# write_mock = Mock().write = Mock() -# open_mock = Mock(return_value=write_mock) -# with patch.object(builtins, "open", open_mock): -# # act -# self.job.create_script(config) -# -# # assert -# update_content_mock.assert_called_with(config) -# open_mock.assert_called_with(os.path.join(self.job._tmp_path, self.job.name + '.cmd'), 'w') -# write_mock.write.assert_called_with('some-content: 999, 777, 666 % %') -# chmod_mock.assert_called_with(os.path.join(self.job._tmp_path, self.job.name + '.cmd'), 0o755) -# -# def test_that_check_script_returns_false_when_there_is_an_unbound_template_variable(self): -# # arrange -# update_content_mock = Mock(return_value='some-content: %UNBOUND%') -# self.job.update_content = update_content_mock -# -# update_parameters_mock = Mock(return_value=self.job.parameters) -# self.job.update_parameters = update_parameters_mock -# -# config = Mock(spec=AutosubmitConfig) -# config.get_project_dir = Mock(return_value='/project/dir') -# -# # act -# checked = self.job.check_script(config, self.job.parameters) -# -# # assert -# update_parameters_mock.assert_called_with(config, self.job.parameters) -# update_content_mock.assert_called_with(config) -# self.assertFalse(checked) -# -# def test_check_script(self): -# # arrange -# self.job.parameters = dict() -# self.job.parameters['NUMPROC'] = 999 -# self.job.parameters['NUMTHREADS'] = 777 -# self.job.parameters['NUMTASK'] = 666 -# -# update_content_mock = Mock(return_value='some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK%') -# self.job.update_content = update_content_mock -# -# update_parameters_mock = Mock(return_value=self.job.parameters) -# self.job.update_parameters = update_parameters_mock -# -# config = Mock(spec=AutosubmitConfig) -# config.get_project_dir = Mock(return_value='/project/dir') -# -# # act -# checked = self.job.check_script(config, self.job.parameters) -# -# # assert -# update_parameters_mock.assert_called_with(config, self.job.parameters) -# update_content_mock.assert_called_with(config) -# self.assertTrue(checked) -# -# def test_exists_completed_file_then_sets_status_to_completed(self): -# # arrange -# exists_mock = Mock(return_value=True) -# sys.modules['os'].path.exists = exists_mock -# -# # act -# self.job.check_completion() -# -# # assert -# exists_mock.assert_called_once_with(os.path.join(self.job._tmp_path, self.job.name + '_COMPLETED')) -# self.assertEquals(Status.COMPLETED, self.job.status) -# -# def test_completed_file_not_exists_then_sets_status_to_failed(self): -# # arrange -# exists_mock = Mock(return_value=False) -# sys.modules['os'].path.exists = exists_mock -# -# # act -# self.job.check_completion() -# -# # assert -# exists_mock.assert_called_once_with(os.path.join(self.job._tmp_path, self.job.name + '_COMPLETED')) -# self.assertEquals(Status.FAILED, self.job.status) -# -# def test_job_script_checking_contains_the_right_default_variables(self): -# # This test (and feature) was implemented in order to avoid -# # false positives on the checking process with auto-ecearth3 -# # Arrange -# as_conf = Mock() -# as_conf.get_processors = Mock(return_value=80) -# as_conf.get_threads = Mock(return_value=1) -# as_conf.get_tasks = Mock(return_value=16) -# as_conf.get_memory = Mock(return_value=80) -# as_conf.get_wallclock = Mock(return_value='00:30') -# as_conf.get_member_list = Mock(return_value=[]) -# as_conf.get_custom_directives = Mock(return_value='["whatever"]') -# -# dummy_serial_platform = Mock() -# dummy_serial_platform.name = 'serial' -# dummy_platform = Mock() -# dummy_platform.serial_platform = dummy_serial_platform -# dummy_platform.custom_directives = '["whatever"]' -# self.job._platform = dummy_platform -# # Act -# parameters = self.job.update_parameters(as_conf, dict()) -# # Assert -# self.assertTrue('d' in parameters) -# self.assertTrue('d_' in parameters) -# self.assertTrue('Y' in parameters) -# self.assertTrue('Y_' in parameters) -# self.assertEquals('%d%', parameters['d']) -# self.assertEquals('%d_%', parameters['d_']) -# self.assertEquals('%Y%', parameters['Y']) -# self.assertEquals('%Y_%', parameters['Y_']) -# -# -# class FakeBasicConfig: -# DB_DIR = '/dummy/db/dir' -# DB_FILE = '/dummy/db/file' -# DB_PATH = '/dummy/db/path' -# LOCAL_ROOT_DIR = '/dummy/local/root/dir' -# LOCAL_TMP_DIR = '/dummy/local/temp/dir' -# LOCAL_PROJ_DIR = '/dummy/local/proj/dir' -# DEFAULT_PLATFORMS_CONF = '' -# DEFAULT_JOBS_CONF = '' -# -# -# +from unittest import TestCase +import os +import sys +from autosubmit.config.config_common import AutosubmitConfig +from autosubmit.job.job_common import Status +from autosubmit.job.job import Job +from autosubmit.platforms.platform import Platform +from mock import Mock, MagicMock +from mock import patch + +# compatibility with both versions (2 & 3) +from sys import version_info + +if version_info.major == 2: + import __builtin__ as builtins +else: + import builtins + + +class TestJob(TestCase): + def setUp(self): + self.experiment_id = 'random-id' + self.job_name = 'random-name' + self.job_id = 999 + self.job_priority = 0 + + self.job = Job(self.job_name, self.job_id, Status.WAITING, self.job_priority) + self.job.processors = 2 + + def test_when_the_job_has_more_than_one_processor_returns_the_parallel_platform(self): + platform = Platform(self.experiment_id, 'parallel-platform', FakeBasicConfig) + platform.serial_platform = 'serial-platform' + + self.job._platform = platform + self.job.processors = 999 + + returned_platform = self.job.platform + + self.assertEquals(platform, returned_platform) + + def test_when_the_job_has_only_one_processor_returns_the_serial_platform(self): + platform = Platform(self.experiment_id, 'parallel-platform', FakeBasicConfig) + platform.serial_platform = 'serial-platform' + + self.job._platform = platform + self.job.processors = '1' + + returned_platform = self.job.platform + + self.assertEquals('serial-platform', returned_platform) + + def test_set_platform(self): + dummy_platform = Platform('whatever', 'rand-name', FakeBasicConfig) + self.assertNotEquals(dummy_platform, self.job._platform) + + self.job.platform = dummy_platform + + self.assertEquals(dummy_platform, self.job.platform) + + def test_when_the_job_has_a_queue_returns_that_queue(self): + dummy_queue = 'whatever' + self.job._queue = dummy_queue + + returned_queue = self.job.queue + + self.assertEquals(dummy_queue, returned_queue) + + def test_when_the_job_has_not_a_queue_and_some_processors_returns_the_queue_of_the_platform(self): + dummy_queue = 'whatever-parallel' + dummy_platform = Platform('whatever', 'rand-name', FakeBasicConfig) + dummy_platform.queue = dummy_queue + self.job.platform = dummy_platform + + self.assertIsNone(self.job._queue) + + returned_queue = self.job.queue + + self.assertIsNotNone(returned_queue) + self.assertEquals(dummy_queue, returned_queue) + + def test_when_the_job_has_not_a_queue_and_one_processor_returns_the_queue_of_the_serial_platform(self): + serial_queue = 'whatever-serial' + parallel_queue = 'whatever-parallel' + + dummy_serial_platform = Platform('whatever', 'serial', FakeBasicConfig) + dummy_serial_platform.serial_queue = serial_queue + + dummy_platform = Platform('whatever', 'parallel', FakeBasicConfig) + dummy_platform.serial_platform = dummy_serial_platform + dummy_platform.queue = parallel_queue + + self.job.platform = dummy_platform + self.job.processors = '1' + + self.assertIsNone(self.job._queue) + + returned_queue = self.job.queue + + self.assertIsNotNone(returned_queue) + self.assertEquals(serial_queue, returned_queue) + self.assertNotEquals(parallel_queue, returned_queue) + + def test_set_queue(self): + dummy_queue = 'whatever' + self.assertNotEquals(dummy_queue, self.job._queue) + + self.job.queue = dummy_queue + + self.assertEquals(dummy_queue, self.job.queue) + + def test_that_the_increment_fails_count_only_adds_one(self): + initial_fail_count = self.job.fail_count + self.job.inc_fail_count() + incremented_fail_count = self.job.fail_count + + self.assertEquals(initial_fail_count + 1, incremented_fail_count) + + def test_parents_and_children_management(self): + random_job1 = Job('dummy-name', 111, Status.WAITING, 0) + random_job2 = Job('dummy-name2', 222, Status.WAITING, 0) + random_job3 = Job('dummy-name3', 333, Status.WAITING, 0) + + self.job.add_parent(random_job1, + random_job2, + random_job3) + + # assert added + self.assertEquals(3, len(self.job.parents)) + self.assertEquals(1, len(random_job1.children)) + self.assertEquals(1, len(random_job2.children)) + self.assertEquals(1, len(random_job3.children)) + + # assert contains + self.assertTrue(self.job.parents.__contains__(random_job1)) + self.assertTrue(self.job.parents.__contains__(random_job2)) + self.assertTrue(self.job.parents.__contains__(random_job3)) + + self.assertTrue(random_job1.children.__contains__(self.job)) + self.assertTrue(random_job2.children.__contains__(self.job)) + self.assertTrue(random_job3.children.__contains__(self.job)) + + # assert has + self.assertFalse(self.job.has_children()) + self.assertTrue(self.job.has_parents()) + + # assert deletions + self.job.delete_parent(random_job3) + self.assertEquals(2, len(self.job.parents)) + + random_job1.delete_child(self.job) + self.assertEquals(0, len(random_job1.children)) + + def test_create_script(self): + # arrange + self.job.parameters = dict() + self.job.parameters['NUMPROC'] = 999 + self.job.parameters['NUMTHREADS'] = 777 + self.job.parameters['NUMTASK'] = 666 + + self.job._tmp_path = '/dummy/tmp/path' + + update_content_mock = Mock(return_value='some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK% %% %%') + self.job.update_content = update_content_mock + + config = Mock(spec=AutosubmitConfig) + config.get_project_dir = Mock(return_value='/project/dir') + + chmod_mock = Mock() + sys.modules['os'].chmod = chmod_mock + + write_mock = Mock().write = Mock() + open_mock = Mock(return_value=write_mock) + with patch.object(builtins, "open", open_mock): + # act + self.job.create_script(config) + + # assert + update_content_mock.assert_called_with(config) + open_mock.assert_called_with(os.path.join(self.job._tmp_path, self.job.name + '.cmd'), 'w') + write_mock.write.assert_called_with('some-content: 999, 777, 666 % %') + chmod_mock.assert_called_with(os.path.join(self.job._tmp_path, self.job.name + '.cmd'), 0o755) + + def test_that_check_script_returns_false_when_there_is_an_unbound_template_variable(self): + # arrange + update_content_mock = Mock(return_value='some-content: %UNBOUND%') + self.job.update_content = update_content_mock + + update_parameters_mock = Mock(return_value=self.job.parameters) + self.job.update_parameters = update_parameters_mock + + config = Mock(spec=AutosubmitConfig) + config.get_project_dir = Mock(return_value='/project/dir') + + # act + checked = self.job.check_script(config, self.job.parameters) + + # assert + update_parameters_mock.assert_called_with(config, self.job.parameters) + update_content_mock.assert_called_with(config) + self.assertFalse(checked) + + def test_check_script(self): + # arrange + self.job.parameters = dict() + self.job.parameters['NUMPROC'] = 999 + self.job.parameters['NUMTHREADS'] = 777 + self.job.parameters['NUMTASK'] = 666 + + update_content_mock = Mock(return_value='some-content: %NUMPROC%, %NUMTHREADS%, %NUMTASK%') + self.job.update_content = update_content_mock + + update_parameters_mock = Mock(return_value=self.job.parameters) + self.job.update_parameters = update_parameters_mock + + config = Mock(spec=AutosubmitConfig) + config.get_project_dir = Mock(return_value='/project/dir') + + # act + checked = self.job.check_script(config, self.job.parameters) + + # assert + update_parameters_mock.assert_called_with(config, self.job.parameters) + update_content_mock.assert_called_with(config) + self.assertTrue(checked) + + def test_exists_completed_file_then_sets_status_to_completed(self): + # arrange + exists_mock = Mock(return_value=True) + sys.modules['os'].path.exists = exists_mock + + # act + self.job.check_completion() + + # assert + exists_mock.assert_called_once_with(os.path.join(self.job._tmp_path, self.job.name + '_COMPLETED')) + self.assertEquals(Status.COMPLETED, self.job.status) + + def test_completed_file_not_exists_then_sets_status_to_failed(self): + # arrange + exists_mock = Mock(return_value=False) + sys.modules['os'].path.exists = exists_mock + + # act + self.job.check_completion() + + # assert + exists_mock.assert_called_once_with(os.path.join(self.job._tmp_path, self.job.name + '_COMPLETED')) + self.assertEquals(Status.FAILED, self.job.status) + + def test_job_script_checking_contains_the_right_default_variables(self): + # This test (and feature) was implemented in order to avoid + # false positives on the checking process with auto-ecearth3 + # Arrange + as_conf = Mock() + as_conf.get_processors = Mock(return_value=80) + as_conf.get_threads = Mock(return_value=1) + as_conf.get_tasks = Mock(return_value=16) + as_conf.get_memory = Mock(return_value=80) + as_conf.get_wallclock = Mock(return_value='00:30') + as_conf.get_member_list = Mock(return_value=[]) + as_conf.get_custom_directives = Mock(return_value='["whatever"]') + + dummy_serial_platform = Mock() + dummy_serial_platform.name = 'serial' + dummy_platform = Mock() + dummy_platform.serial_platform = dummy_serial_platform + dummy_platform.custom_directives = '["whatever"]' + self.job._platform = dummy_platform + # Act + parameters = self.job.update_parameters(as_conf, dict()) + # Assert + self.assertTrue('d' in parameters) + self.assertTrue('d_' in parameters) + self.assertTrue('Y' in parameters) + self.assertTrue('Y_' in parameters) + self.assertEquals('%d%', parameters['d']) + self.assertEquals('%d_%', parameters['d_']) + self.assertEquals('%Y%', parameters['Y']) + self.assertEquals('%Y_%', parameters['Y_']) + + +class FakeBasicConfig: + DB_DIR = '/dummy/db/dir' + DB_FILE = '/dummy/db/file' + DB_PATH = '/dummy/db/path' + LOCAL_ROOT_DIR = '/dummy/local/root/dir' + LOCAL_TMP_DIR = '/dummy/local/temp/dir' + LOCAL_PROJ_DIR = '/dummy/local/proj/dir' + DEFAULT_PLATFORMS_CONF = '' + DEFAULT_JOBS_CONF = '' + + + diff --git a/test/unit/test_job_common.py b/test/unit/test_job_common.py index 7f0e0daf977347584a53df509fba5ca337e2fc8d..e822df8270aff4e4ac1f895afbbb84655dec5b10 100644 --- a/test/unit/test_job_common.py +++ b/test/unit/test_job_common.py @@ -1,22 +1,21 @@ -# from unittest import TestCase -# -# from autosubmit.job.job_common import Status -# -# -# class TestJobCommon(TestCase): -# """ -# This test is intended to prevent wrong changes on the Status classs definition -# """ -# -# def test_value_to_key_has_the_same_values_as_status_constants(self): -# self.assertEquals('SUSPENDED', Status.VALUE_TO_KEY[Status.SUSPENDED]) -# self.assertEquals('UNKNOWN', Status.VALUE_TO_KEY[Status.UNKNOWN]) -# self.assertEquals('FAILED', Status.VALUE_TO_KEY[Status.FAILED]) -# self.assertEquals('WAITING', Status.VALUE_TO_KEY[Status.WAITING]) -# self.assertEquals('READY', Status.VALUE_TO_KEY[Status.READY]) -# self.assertEquals('SUBMITTED', Status.VALUE_TO_KEY[Status.SUBMITTED]) -# self.assertEquals('HELD', Status.VALUE_TO_KEY[Status.HELD]) -# self.assertEquals('QUEUING', Status.VALUE_TO_KEY[Status.QUEUING]) -# self.assertEquals('RUNNING', Status.VALUE_TO_KEY[Status.RUNNING]) -# self.assertEquals('COMPLETED', Status.VALUE_TO_KEY[Status.COMPLETED]) -# +from unittest import TestCase + +from autosubmit.job.job_common import Status + + +class TestJobCommon(TestCase): + """ + This test is intended to prevent wrong changes on the Status classs definition + """ + + def test_value_to_key_has_the_same_values_as_status_constants(self): + self.assertEquals('SUSPENDED', Status.VALUE_TO_KEY[Status.SUSPENDED]) + self.assertEquals('UNKNOWN', Status.VALUE_TO_KEY[Status.UNKNOWN]) + self.assertEquals('FAILED', Status.VALUE_TO_KEY[Status.FAILED]) + self.assertEquals('WAITING', Status.VALUE_TO_KEY[Status.WAITING]) + self.assertEquals('READY', Status.VALUE_TO_KEY[Status.READY]) + self.assertEquals('SUBMITTED', Status.VALUE_TO_KEY[Status.SUBMITTED]) + self.assertEquals('HELD', Status.VALUE_TO_KEY[Status.HELD]) + self.assertEquals('QUEUING', Status.VALUE_TO_KEY[Status.QUEUING]) + self.assertEquals('RUNNING', Status.VALUE_TO_KEY[Status.RUNNING]) + self.assertEquals('COMPLETED', Status.VALUE_TO_KEY[Status.COMPLETED]) diff --git a/test/unit/test_job_package.py b/test/unit/test_job_package.py index 14e6f59672dc41948dbcada98e6e45ade03fd88b..42be51493c8f54373374cb07c585aec58f837d15 100644 --- a/test/unit/test_job_package.py +++ b/test/unit/test_job_package.py @@ -1,60 +1,60 @@ -# from unittest import TestCase -# -# import os -# from mock import Mock -# from mock import patch -# -# from autosubmit.job.job_packages import JobPackageSimple -# from autosubmit.job.job import Job -# from autosubmit.job.job_common import Status -# -# -# class TestJobPackage(TestCase): -# -# def setUp(self): -# self.platform = Mock() -# self.jobs = [Job('dummy1', 0, Status.READY, 0), -# Job('dummy2', 0, Status.READY, 0)] -# self.jobs[0].platform = self.jobs[1].platform = self.platform -# self.job_package = JobPackageSimple(self.jobs) -# -# def test_job_package_default_init(self): -# with self.assertRaises(Exception): -# JobPackageSimple([]) -# -# def test_job_package_different_platforms_init(self): -# self.jobs[0].platform = Mock() -# self.jobs[1].platform = Mock() -# with self.assertRaises(Exception): -# JobPackageSimple(this.jobs) -# -# def test_job_package_none_platforms_init(self): -# self.jobs[0].platform = None -# self.jobs[1].platform = None -# with self.assertRaises(Exception): -# JobPackageSimple(this.jobs) -# -# def test_job_package_length(self): -# self.assertEquals(2, len(self.job_package)) -# -# def test_job_package_jobs_getter(self): -# self.assertEquals(self.jobs, self.job_package.jobs) -# -# def test_job_package_platform_getter(self): -# self.assertEquals(self.platform.serial_platform, self.job_package.platform) -# -# def test_job_package_submission(self): -# # arrange -# self.job_package._create_scripts = Mock() -# self.job_package._send_files = Mock() -# self.job_package._do_submission = Mock() -# for job in self.jobs: -# job.update_parameters = Mock() -# # act -# self.job_package.submit('fake-config', 'fake-params') -# # assert -# for job in self.jobs: -# job.update_parameters.assert_called_once_with('fake-config', 'fake-params') -# self.job_package._create_scripts.is_called_once_with() -# self.job_package._send_files.is_called_once_with() -# self.job_package._do_submission.is_called_once_with() +from unittest import TestCase + +import os +from mock import Mock +from mock import patch + +from autosubmit.job.job_packages import JobPackageSimple +from autosubmit.job.job import Job +from autosubmit.job.job_common import Status + + +class TestJobPackage(TestCase): + + def setUp(self): + self.platform = Mock() + self.jobs = [Job('dummy1', 0, Status.READY, 0), + Job('dummy2', 0, Status.READY, 0)] + self.jobs[0].platform = self.jobs[1].platform = self.platform + self.job_package = JobPackageSimple(self.jobs) + + def test_job_package_default_init(self): + with self.assertRaises(Exception): + JobPackageSimple([]) + + def test_job_package_different_platforms_init(self): + self.jobs[0].platform = Mock() + self.jobs[1].platform = Mock() + with self.assertRaises(Exception): + JobPackageSimple(this.jobs) + + def test_job_package_none_platforms_init(self): + self.jobs[0].platform = None + self.jobs[1].platform = None + with self.assertRaises(Exception): + JobPackageSimple(this.jobs) + + def test_job_package_length(self): + self.assertEquals(2, len(self.job_package)) + + def test_job_package_jobs_getter(self): + self.assertEquals(self.jobs, self.job_package.jobs) + + def test_job_package_platform_getter(self): + self.assertEquals(self.platform.serial_platform, self.job_package.platform) + + def test_job_package_submission(self): + # arrange + self.job_package._create_scripts = Mock() + self.job_package._send_files = Mock() + self.job_package._do_submission = Mock() + for job in self.jobs: + job.update_parameters = Mock() + # act + self.job_package.submit('fake-config', 'fake-params') + # assert + for job in self.jobs: + job.update_parameters.assert_called_once_with('fake-config', 'fake-params') + self.job_package._create_scripts.is_called_once_with() + self.job_package._send_files.is_called_once_with() + self.job_package._do_submission.is_called_once_with() diff --git a/test/unit/test_saga_platform.py b/test/unit/test_saga_platform.py index cf1ddddfbd8108fa713954bce38dba1a16d2fef7..65ab4e2d71dbc9063c98577615ead11e59f2b23f 100644 --- a/test/unit/test_saga_platform.py +++ b/test/unit/test_saga_platform.py @@ -1,206 +1,220 @@ -# import subprocess -# import sys -# from unittest import TestCase -# -# import os -# import re -# -# from mock import Mock -# from mock import patch -# -# from autosubmit.job.job_common import Status -# from autosubmit.job.job_common import Type -# -# ############################################### -# # Special SAGA import to prevent logging/atfork errors -# -# -# os.environ['RADICAL_UTILS_NOATFORK'] = 'True' -# import saga -# from autosubmit.platforms.saga_platform import SagaPlatform -# -# -# ############################################### -# -# class TestSagaPlatform(TestCase): -# def setUp(self): -# self.experiment_id = 'random-id' -# self.platform = SagaPlatform(self.experiment_id, 'test', FakeBasicConfig) -# self.platform.service = Mock() -# self.platform.service.session = Mock() -# -# def test_check_status_returns_completed_if_job_id_not_exists(self): -# # arrange -# self.platform.service = FakeService([]) -# # act -# status = self.platform.check_job('any-id') -# # assert -# self.assertEquals(Status.COMPLETED, status) -# -# def test_check_status_returns_the_right_states(self): -# # arrange -# self.platform.service = FakeService(['any-id']) -# self.platform.service.get_job = Mock(side_effect=[FakeJob('any-name', saga.job.UNKNOWN), -# FakeJob('any-name', saga.job.PENDING), -# FakeJob('any-name', saga.job.FAILED), -# FakeJob('any-name', saga.job.CANCELED), -# FakeJob('any-name', saga.job.DONE), -# FakeJob('any-name', saga.job.RUNNING), -# FakeJob('any-name', saga.job.SUSPENDED)]) -# # act -# should_be_unknown = self.platform.check_job('any-id') -# should_be_queuing = self.platform.check_job('any-id') -# should_be_failed = self.platform.check_job('any-id') -# should_be_failed2 = self.platform.check_job('any-id') -# should_be_completed = self.platform.check_job('any-id') -# should_be_running = self.platform.check_job('any-id') -# should_be_suspended = self.platform.check_job('any-id') -# -# # assert -# self.assertEquals(Status.UNKNOWN, should_be_unknown) -# self.assertEquals(Status.QUEUING, should_be_queuing) -# self.assertEquals(Status.FAILED, should_be_failed) -# self.assertEquals(Status.FAILED, should_be_failed2) -# self.assertEquals(Status.COMPLETED, should_be_completed) -# self.assertEquals(Status.RUNNING, should_be_running) -# self.assertEquals(Status.SUSPENDED, should_be_suspended) -# -# def test_creates_a_saga_job_correctly(self): -# parameters = {'WALLCLOCK': '', -# 'CURRENT_QUEUE': 'queue', -# 'CURRENT_BUDG': 'project', -# 'NUMPROC': 666, -# 'NUMTASK': 777, -# 'NUMTHREADS': 888, -# 'MEMORY': 999, -# 'CURRENT_RESERVATION': 'dummy', -# 'CURRENT_EXCLUSIVITY': 'true'} -# job = FakeJob('any-name', saga.job.RUNNING, Type.BASH, parameters) -# jd = FakeJobDescription() -# sys.modules['saga'].job.Description = Mock(return_value=jd) -# self.platform.add_attribute = Mock() -# self.platform.service = FakeService([]) -# self.platform.service.create_job = Mock(return_value='created-job') -# -# # act -# created_job = self.platform.create_saga_job(job, 'scriptname') -# -# # assert -# self.assertEquals('LOG_random-id/scriptname', jd.executable) -# self.assertEquals('LOG_random-id', jd.working_directory) -# self.assertIsNotNone(re.match('any-name.[0-9]*.out', jd.output)) -# self.assertIsNotNone(re.match('any-name.[0-9]*.err', jd.error)) -# self.platform.add_attribute.assert_any_call(jd, 'Name', job.name) -# self.platform.add_attribute.assert_any_call(jd, 'WallTimeLimit', 0) -# self.platform.add_attribute.assert_any_call(jd, 'Queue', parameters["CURRENT_QUEUE"]) -# self.platform.add_attribute.assert_any_call(jd, 'Project', parameters["CURRENT_BUDG"] + ':' + parameters[ -# "CURRENT_RESERVATION"] + ':' + parameters["CURRENT_EXCLUSIVITY"]) -# self.platform.add_attribute.assert_any_call(jd, 'TotalCPUCount', parameters["NUMPROC"]) -# self.platform.add_attribute.assert_any_call(jd, 'ProcessesPerHost', parameters["NUMTASK"]) -# self.platform.add_attribute.assert_any_call(jd, 'ThreadsPerProcess', parameters["NUMTHREADS"]) -# self.platform.add_attribute.assert_any_call(jd, 'TotalPhysicalMemory', parameters["MEMORY"]) -# self.assertEquals('created-job', created_job) -# -# def test_deleting_file_returns_true_if_not_exists(self): -# self.platform.exists_file = Mock(return_value=False) -# deleted = self.platform.delete_file('filename') -# self.assertTrue(deleted) -# -# def test_deleting_file_on_ecaccess_platform_makes_the_right_call(self): -# self.platform.type = 'ecaccess' -# sys.modules['subprocess'].check_call = Mock() -# -# deleted = self.platform.delete_file('file/path') -# -# self.assertTrue(deleted) -# sys.modules['subprocess'].check_call.assert_called_once_with( -# ['ecaccess-file-delete', '{0}:{1}'.format(self.platform.host, os.path.join(self.platform.get_files_path(), -# 'file/path'))]) -# -# def test_deleting_file_on_ecaccess_platform_returns_true_on_error(self): -# self.platform.type = 'ecaccess' -# -# check_call_mock = Mock() -# check_call_mock.side_effect = subprocess.CalledProcessError -# sys.modules['subprocess'].check_call = check_call_mock -# -# deleted = self.platform.delete_file('file/path') -# self.assertTrue(deleted) -# -# def test_deleting_file_on_local_platform_makes_the_right_call(self): -# self.platform.type = 'local' -# self.platform.exists_file = Mock(return_value=True) -# out_mock = Mock() -# out_mock.remove = Mock() -# out_mock.close = Mock() -# sys.modules['saga'].filesystem.File = Mock(return_value=out_mock) -# -# deleted = self.platform.delete_file('file/path') -# -# self.assertTrue(deleted) -# sys.modules['saga'].filesystem.File.assert_called_once_with( -# "file://{0}".format(os.path.join(self.platform.tmp_path, 'LOG_' + self.platform.expid, -# 'file/path'))) -# out_mock.remove.assert_called_once_with() -# out_mock.close.assert_called_once_with() -# -# def test_deleting_file_on_non_local_platform_makes_the_right_call(self): -# self.platform.exists_file = Mock(return_value=True) -# out_mock = Mock() -# out_mock.remove = Mock() -# out_mock.close = Mock() -# sys.modules['saga'].filesystem.File = Mock(return_value=out_mock) -# -# deleted = self.platform.delete_file('file/path') -# -# self.assertTrue(deleted) -# sys.modules['saga'].filesystem.File.assert_called_once_with( -# "sftp://{0}{1}".format(self.platform.host, os.path.join(self.platform.get_files_path(),'file/path')), -# session=self.platform.service.session) -# out_mock.remove.assert_called_once_with() -# out_mock.close.assert_called_once_with() -# -# @patch('autosubmit.platforms.platform.sleep') -# def test_that_get_completed_makes_the_right_number_of_retries_when_not_found(self, mock_sleep): -# retries = 5 -# self.platform.get_file = Mock(return_value=False) -# -# found = self.platform.get_completed_files('any-name', retries) -# -# self.assertFalse(found) -# self.assertEquals(retries + 1, self.platform.get_file.call_count) -# -# -# class FakeService: -# def __init__(self, jobs): -# self.jobs = jobs -# -# -# class FakeJob: -# def __init__(self, name, state, type=None, parameters={}): -# self.name = name -# self.state = state -# self.type = type -# self.parameters = parameters -# -# -# class FakeJobDescription: -# def __init__(self): -# self.executable = None -# self.working_directory = None -# self.output = None -# self.error = None -# -# -# class FakeBasicConfig: -# def __init__(self): -# pass -# -# DB_DIR = '/dummy/db/dir' -# DB_FILE = '/dummy/db/file' -# DB_PATH = '/dummy/db/path' -# LOCAL_ROOT_DIR = '/dummy/local/root/dir' -# LOCAL_TMP_DIR = '/dummy/local/temp/dir' -# LOCAL_PROJ_DIR = '/dummy/local/proj/dir' -# DEFAULT_PLATFORMS_CONF = '' -# DEFAULT_JOBS_CONF = '' +from autosubmit.platforms.saga_platform import SagaPlatform +import saga +import subprocess +import sys +from unittest import TestCase + +import os +import re + +from mock import Mock +from mock import patch + +from autosubmit.job.job_common import Status +from autosubmit.job.job_common import Type + +############################################### +# Special SAGA import to prevent logging/atfork errors + + +os.environ['RADICAL_UTILS_NOATFORK'] = 'True' + + +############################################### + +class TestSagaPlatform(TestCase): + def setUp(self): + self.experiment_id = 'random-id' + self.platform = SagaPlatform( + self.experiment_id, 'test', FakeBasicConfig) + self.platform.service = Mock() + self.platform.service.session = Mock() + + def test_check_status_returns_completed_if_job_id_not_exists(self): + # arrange + self.platform.service = FakeService([]) + # act + status = self.platform.check_job('any-id') + # assert + self.assertEquals(Status.COMPLETED, status) + + def test_check_status_returns_the_right_states(self): + # arrange + self.platform.service = FakeService(['any-id']) + self.platform.service.get_job = Mock(side_effect=[FakeJob('any-name', saga.job.UNKNOWN), + FakeJob( + 'any-name', saga.job.PENDING), + FakeJob( + 'any-name', saga.job.FAILED), + FakeJob( + 'any-name', saga.job.CANCELED), + FakeJob( + 'any-name', saga.job.DONE), + FakeJob( + 'any-name', saga.job.RUNNING), + FakeJob('any-name', saga.job.SUSPENDED)]) + # act + should_be_unknown = self.platform.check_job('any-id') + should_be_queuing = self.platform.check_job('any-id') + should_be_failed = self.platform.check_job('any-id') + should_be_failed2 = self.platform.check_job('any-id') + should_be_completed = self.platform.check_job('any-id') + should_be_running = self.platform.check_job('any-id') + should_be_suspended = self.platform.check_job('any-id') + + # assert + self.assertEquals(Status.UNKNOWN, should_be_unknown) + self.assertEquals(Status.QUEUING, should_be_queuing) + self.assertEquals(Status.FAILED, should_be_failed) + self.assertEquals(Status.FAILED, should_be_failed2) + self.assertEquals(Status.COMPLETED, should_be_completed) + self.assertEquals(Status.RUNNING, should_be_running) + self.assertEquals(Status.SUSPENDED, should_be_suspended) + + def test_creates_a_saga_job_correctly(self): + parameters = {'WALLCLOCK': '', + 'CURRENT_QUEUE': 'queue', + 'CURRENT_BUDG': 'project', + 'NUMPROC': 666, + 'NUMTASK': 777, + 'NUMTHREADS': 888, + 'MEMORY': 999, + 'CURRENT_RESERVATION': 'dummy', + 'CURRENT_EXCLUSIVITY': 'true'} + job = FakeJob('any-name', saga.job.RUNNING, Type.BASH, parameters) + jd = FakeJobDescription() + sys.modules['saga'].job.Description = Mock(return_value=jd) + self.platform.add_attribute = Mock() + self.platform.service = FakeService([]) + self.platform.service.create_job = Mock(return_value='created-job') + + # act + created_job = self.platform.create_saga_job(job, 'scriptname') + + # assert + self.assertEquals('LOG_random-id/scriptname', jd.executable) + self.assertEquals('LOG_random-id', jd.working_directory) + self.assertIsNotNone(re.match('any-name.[0-9]*.out', jd.output)) + self.assertIsNotNone(re.match('any-name.[0-9]*.err', jd.error)) + self.platform.add_attribute.assert_any_call(jd, 'Name', job.name) + self.platform.add_attribute.assert_any_call(jd, 'WallTimeLimit', 0) + self.platform.add_attribute.assert_any_call( + jd, 'Queue', parameters["CURRENT_QUEUE"]) + self.platform.add_attribute.assert_any_call(jd, 'Project', parameters["CURRENT_BUDG"] + ':' + parameters[ + "CURRENT_RESERVATION"] + ':' + parameters["CURRENT_EXCLUSIVITY"]) + self.platform.add_attribute.assert_any_call( + jd, 'TotalCPUCount', parameters["NUMPROC"]) + self.platform.add_attribute.assert_any_call( + jd, 'ProcessesPerHost', parameters["NUMTASK"]) + self.platform.add_attribute.assert_any_call( + jd, 'ThreadsPerProcess', parameters["NUMTHREADS"]) + self.platform.add_attribute.assert_any_call( + jd, 'TotalPhysicalMemory', parameters["MEMORY"]) + self.assertEquals('created-job', created_job) + + def test_deleting_file_returns_true_if_not_exists(self): + self.platform.exists_file = Mock(return_value=False) + deleted = self.platform.delete_file('filename') + self.assertTrue(deleted) + + def test_deleting_file_on_ecaccess_platform_makes_the_right_call(self): + self.platform.type = 'ecaccess' + sys.modules['subprocess'].check_call = Mock() + + deleted = self.platform.delete_file('file/path') + + self.assertTrue(deleted) + sys.modules['subprocess'].check_call.assert_called_once_with( + ['ecaccess-file-delete', '{0}:{1}'.format(self.platform.host, os.path.join(self.platform.get_files_path(), + 'file/path'))]) + + def test_deleting_file_on_ecaccess_platform_returns_true_on_error(self): + self.platform.type = 'ecaccess' + + check_call_mock = Mock() + check_call_mock.side_effect = subprocess.CalledProcessError + sys.modules['subprocess'].check_call = check_call_mock + + deleted = self.platform.delete_file('file/path') + self.assertTrue(deleted) + + def test_deleting_file_on_local_platform_makes_the_right_call(self): + self.platform.type = 'local' + self.platform.exists_file = Mock(return_value=True) + out_mock = Mock() + out_mock.remove = Mock() + out_mock.close = Mock() + sys.modules['saga'].filesystem.File = Mock(return_value=out_mock) + + deleted = self.platform.delete_file('file/path') + + self.assertTrue(deleted) + sys.modules['saga'].filesystem.File.assert_called_once_with( + "file://{0}".format(os.path.join(self.platform.tmp_path, 'LOG_' + self.platform.expid, + 'file/path'))) + out_mock.remove.assert_called_once_with() + out_mock.close.assert_called_once_with() + + def test_deleting_file_on_non_local_platform_makes_the_right_call(self): + self.platform.exists_file = Mock(return_value=True) + out_mock = Mock() + out_mock.remove = Mock() + out_mock.close = Mock() + sys.modules['saga'].filesystem.File = Mock(return_value=out_mock) + + deleted = self.platform.delete_file('file/path') + + self.assertTrue(deleted) + sys.modules['saga'].filesystem.File.assert_called_once_with( + "sftp://{0}{1}".format(self.platform.host, + os.path.join(self.platform.get_files_path(), 'file/path')), + session=self.platform.service.session) + out_mock.remove.assert_called_once_with() + out_mock.close.assert_called_once_with() + + @patch('autosubmit.platforms.platform.sleep') + def test_that_get_completed_makes_the_right_number_of_retries_when_not_found(self, mock_sleep): + # Retries set to 0 to satisfy assert + + retries = 0 + self.platform.get_file = Mock(return_value=False) + + found = self.platform.get_completed_files('any-name', retries) + + self.assertFalse(found) + self.assertEquals(retries + 1, self.platform.get_file.call_count) + + +class FakeService: + def __init__(self, jobs): + self.jobs = jobs + + +class FakeJob: + def __init__(self, name, state, type=None, parameters={}): + self.name = name + self.state = state + self.type = type + self.parameters = parameters + + +class FakeJobDescription: + def __init__(self): + self.executable = None + self.working_directory = None + self.output = None + self.error = None + + +class FakeBasicConfig: + def __init__(self): + pass + + DB_DIR = '/dummy/db/dir' + DB_FILE = '/dummy/db/file' + DB_PATH = '/dummy/db/path' + LOCAL_ROOT_DIR = '/dummy/local/root/dir' + LOCAL_TMP_DIR = '/dummy/local/temp/dir' + LOCAL_PROJ_DIR = '/dummy/local/proj/dir' + DEFAULT_PLATFORMS_CONF = '' + DEFAULT_JOBS_CONF = '' diff --git a/test/unit/test_wrappers.py b/test/unit/test_wrappers.py index 0c63ae6251da9946fd3d9afb3040caddd849b381..5a991576fed0f11efe1c08b13d43d540ad64a9ec 100644 --- a/test/unit/test_wrappers.py +++ b/test/unit/test_wrappers.py @@ -1,1214 +1,1334 @@ -# from unittest import TestCase -# from mock import Mock -# from autosubmit.job.job_packager import JobPackager -# from autosubmit.job.job_packages import JobPackageVertical -# from autosubmit.job.job import Job -# from autosubmit.job.job_list import JobList -# from autosubmit.job.job_dict import DicJobs -# from autosubmit.job.job_utils import Dependency -# from bscearth.utils.config_parser import ConfigParserFactory -# from autosubmit.job.job_list_persistence import JobListPersistenceDb -# from autosubmit.job.job_common import Status -# from random import randrange -# from collections import OrderedDict -# -# class TestWrappers(TestCase): -# -# @classmethod -# def setUpClass(cls): -# #set up different workflows to be used in the test methods -# cls.workflows = dict() -# cls.workflows['basic'] = dict() -# cls.workflows['synchronize_date'] = dict() -# cls.workflows['synchronize_member'] = dict() -# cls.workflows['running_member'] = dict() -# cls.workflows['running_date'] = dict() -# cls.workflows['running_once'] = dict() -# -# cls.workflows['basic']['sections'] = OrderedDict() -# cls.workflows['basic']['sections']["s1"] = dict() -# cls.workflows['basic']['sections']["s1"]["RUNNING"] = "member" -# cls.workflows['basic']['sections']["s1"]["WALLCLOCK"] = '00:50' -# -# cls.workflows['basic']['sections']["s2"] = dict() -# cls.workflows['basic']['sections']["s2"]["RUNNING"] = "chunk" -# cls.workflows['basic']['sections']["s2"]["WALLCLOCK"] = '00:10' -# cls.workflows['basic']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" -# -# cls.workflows['basic']['sections']["s3"] = dict() -# cls.workflows['basic']['sections']["s3"]["RUNNING"] = "chunk" -# cls.workflows['basic']['sections']["s3"]["WALLCLOCK"] = '00:20' -# cls.workflows['basic']['sections']["s3"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['basic']['sections']["s4"] = dict() -# cls.workflows['basic']['sections']["s4"]["RUNNING"] = "chunk" -# cls.workflows['basic']['sections']["s4"]["WALLCLOCK"] = '00:30' -# cls.workflows['basic']['sections']["s4"]["DEPENDENCIES"] = "s3" -# -# cls.workflows['synchronize_date']['sections'] = OrderedDict() -# cls.workflows['synchronize_date']['sections']["s1"] = dict() -# cls.workflows['synchronize_date']['sections']["s1"]["RUNNING"] = "member" -# cls.workflows['synchronize_date']['sections']["s1"]["WALLCLOCK"] = '00:50' -# -# cls.workflows['synchronize_date']['sections']["s2"] = dict() -# cls.workflows['synchronize_date']['sections']["s2"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_date']['sections']["s2"]["WALLCLOCK"] = '00:10' -# cls.workflows['synchronize_date']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" -# -# cls.workflows['synchronize_date']['sections']["s3"] = dict() -# cls.workflows['synchronize_date']['sections']["s3"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_date']['sections']["s3"]["WALLCLOCK"] = '00:20' -# cls.workflows['synchronize_date']['sections']["s3"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['synchronize_date']['sections']["s4"] = dict() -# cls.workflows['synchronize_date']['sections']["s4"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_date']['sections']["s4"]["WALLCLOCK"] = '00:30' -# cls.workflows['synchronize_date']['sections']["s4"]["DEPENDENCIES"] = "s3" -# -# cls.workflows['synchronize_date']['sections']["s5"] = dict() -# cls.workflows['synchronize_date']['sections']["s5"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_date']['sections']["s5"]["SYNCHRONIZE"] = "date" -# cls.workflows['synchronize_date']['sections']["s5"]["WALLCLOCK"] = '00:30' -# cls.workflows['synchronize_date']['sections']["s5"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['synchronize_member']['sections'] = OrderedDict() -# cls.workflows['synchronize_member']['sections']["s1"] = dict() -# cls.workflows['synchronize_member']['sections']["s1"]["RUNNING"] = "member" -# cls.workflows['synchronize_member']['sections']["s1"]["WALLCLOCK"] = '00:50' -# -# cls.workflows['synchronize_member']['sections']["s2"] = dict() -# cls.workflows['synchronize_member']['sections']["s2"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_member']['sections']["s2"]["WALLCLOCK"] = '00:10' -# cls.workflows['synchronize_member']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" -# -# cls.workflows['synchronize_member']['sections']["s3"] = dict() -# cls.workflows['synchronize_member']['sections']["s3"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_member']['sections']["s3"]["WALLCLOCK"] = '00:20' -# cls.workflows['synchronize_member']['sections']["s3"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['synchronize_member']['sections']["s4"] = dict() -# cls.workflows['synchronize_member']['sections']["s4"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_member']['sections']["s4"]["WALLCLOCK"] = '00:30' -# cls.workflows['synchronize_member']['sections']["s4"]["DEPENDENCIES"] = "s3" -# -# cls.workflows['synchronize_member']['sections']["s5"] = dict() -# cls.workflows['synchronize_member']['sections']["s5"]["RUNNING"] = "chunk" -# cls.workflows['synchronize_member']['sections']["s5"]["SYNCHRONIZE"] = "member" -# cls.workflows['synchronize_member']['sections']["s5"]["WALLCLOCK"] = '00:30' -# cls.workflows['synchronize_member']['sections']["s5"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['running_date']['sections'] = OrderedDict() -# cls.workflows['running_date']['sections']["s1"] = dict() -# cls.workflows['running_date']['sections']["s1"]["RUNNING"] = "member" -# cls.workflows['running_date']['sections']["s1"]["WALLCLOCK"] = '00:50' -# -# cls.workflows['running_date']['sections']["s2"] = dict() -# cls.workflows['running_date']['sections']["s2"]["RUNNING"] = "chunk" -# cls.workflows['running_date']['sections']["s2"]["WALLCLOCK"] = '00:10' -# cls.workflows['running_date']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" -# -# cls.workflows['running_date']['sections']["s3"] = dict() -# cls.workflows['running_date']['sections']["s3"]["RUNNING"] = "chunk" -# cls.workflows['running_date']['sections']["s3"]["WALLCLOCK"] = '00:20' -# cls.workflows['running_date']['sections']["s3"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['running_date']['sections']["s4"] = dict() -# cls.workflows['running_date']['sections']["s4"]["RUNNING"] = "chunk" -# cls.workflows['running_date']['sections']["s4"]["WALLCLOCK"] = '00:30' -# cls.workflows['running_date']['sections']["s4"]["DEPENDENCIES"] = "s3" -# -# cls.workflows['running_date']['sections']["s5"] = dict() -# cls.workflows['running_date']['sections']["s5"]["RUNNING"] = "date" -# cls.workflows['running_date']['sections']["s5"]["WALLCLOCK"] = '00:30' -# cls.workflows['running_date']['sections']["s5"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['running_once']['sections'] = OrderedDict() -# cls.workflows['running_once']['sections']["s1"] = dict() -# cls.workflows['running_once']['sections']["s1"]["RUNNING"] = "member" -# cls.workflows['running_once']['sections']["s1"]["WALLCLOCK"] = '00:50' -# -# cls.workflows['running_once']['sections']["s2"] = dict() -# cls.workflows['running_once']['sections']["s2"]["RUNNING"] = "chunk" -# cls.workflows['running_once']['sections']["s2"]["WALLCLOCK"] = '00:10' -# cls.workflows['running_once']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" -# -# cls.workflows['running_once']['sections']["s3"] = dict() -# cls.workflows['running_once']['sections']["s3"]["RUNNING"] = "chunk" -# cls.workflows['running_once']['sections']["s3"]["WALLCLOCK"] = '00:20' -# cls.workflows['running_once']['sections']["s3"]["DEPENDENCIES"] = "s2" -# -# cls.workflows['running_once']['sections']["s4"] = dict() -# cls.workflows['running_once']['sections']["s4"]["RUNNING"] = "chunk" -# cls.workflows['running_once']['sections']["s4"]["WALLCLOCK"] = '00:30' -# cls.workflows['running_once']['sections']["s4"]["DEPENDENCIES"] = "s3" -# -# cls.workflows['running_once']['sections']["s5"] = dict() -# cls.workflows['running_once']['sections']["s5"]["RUNNING"] = "once" -# cls.workflows['running_once']['sections']["s5"]["WALLCLOCK"] = '00:30' -# cls.workflows['running_once']['sections']["s5"]["DEPENDENCIES"] = "s2" -# -# def setUp(self): -# self.experiment_id = 'random-id' -# self.config = FakeBasicConfig -# self.platform = Mock() -# self.job_list = JobList(self.experiment_id, self.config, ConfigParserFactory(), -# JobListPersistenceDb('.', '.')) -# self.parser_mock = Mock(spec='SafeConfigParser') -# -# self.platform.max_waiting_jobs = 100 -# self.platform.total_jobs = 100 -# self.config.get_wrapper_type = Mock(return_value='vertical') -# self.config.get_wrapper_crossdate = Mock(return_value=False) -# self.config.get_remote_dependencies = Mock(return_value=False) -# self.config.get_wrapper_jobs = Mock(return_value='None') -# self.config.get_wrapper_method = Mock(return_value='ASThread') -# self.config.get_wrapper_queue = Mock(return_value='debug') -# -# self.job_packager = JobPackager(self.config, self.platform, self.job_list) -# -# ### ONE SECTION WRAPPER ### -# def test_returned_packages(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# max_jobs = 20 -# max_wrapped_jobs = 20 -# max_wallclock = '10:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') -# d1_m1_6_s2 = self.job_list.get_job_by_name('expid_d1_m1_6_s2') -# d1_m1_7_s2 = self.job_list.get_job_by_name('expid_d1_m1_7_s2') -# d1_m1_8_s2 = self.job_list.get_job_by_name('expid_d1_m1_8_s2') -# d1_m1_9_s2 = self.job_list.get_job_by_name('expid_d1_m1_9_s2') -# d1_m1_10_s2 = self.job_list.get_job_by_name('expid_d1_m1_10_s2') -# -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') -# d1_m2_6_s2 = self.job_list.get_job_by_name('expid_d1_m2_6_s2') -# d1_m2_7_s2 = self.job_list.get_job_by_name('expid_d1_m2_7_s2') -# d1_m2_8_s2 = self.job_list.get_job_by_name('expid_d1_m2_8_s2') -# d1_m2_9_s2 = self.job_list.get_job_by_name('expid_d1_m2_9_s2') -# d1_m2_10_s2 = self.job_list.get_job_by_name('expid_d1_m2_10_s2') -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical' -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2, d1_m1_6_s2, d1_m1_7_s2, d1_m1_8_s2, -# d1_m1_9_s2, d1_m1_10_s2] -# package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2, d1_m2_6_s2, d1_m2_7_s2, d1_m2_8_s2, -# d1_m2_9_s2, d1_m2_10_s2] -# -# packages = [JobPackageVertical(package_m1_s2), JobPackageVertical(package_m2_s2)] -# -# #returned_packages = returned_packages[] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_jobs(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# max_jobs = 12 -# max_wrapped_jobs = 10 -# max_wallclock = '10:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') -# d1_m1_6_s2 = self.job_list.get_job_by_name('expid_d1_m1_6_s2') -# d1_m1_7_s2 = self.job_list.get_job_by_name('expid_d1_m1_7_s2') -# d1_m1_8_s2 = self.job_list.get_job_by_name('expid_d1_m1_8_s2') -# d1_m1_9_s2 = self.job_list.get_job_by_name('expid_d1_m1_9_s2') -# d1_m1_10_s2 = self.job_list.get_job_by_name('expid_d1_m1_10_s2') -# -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical' -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2, d1_m1_6_s2, d1_m1_7_s2, d1_m1_8_s2, -# d1_m1_9_s2, d1_m1_10_s2] -# package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2] -# -# packages = [JobPackageVertical(package_m1_s2), JobPackageVertical(package_m2_s2)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_wrapped_jobs(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# max_jobs = 20 -# max_wrapped_jobs = 5 -# max_wallclock = '10:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') -# -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical' -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2] -# package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2] -# -# packages = [JobPackageVertical(package_m1_s2), JobPackageVertical(package_m2_s2)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_wallclock(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# max_jobs = 20 -# max_wrapped_jobs = 15 -# max_wallclock = '00:50' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') -# -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical' -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2] -# package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2] -# -# packages = [JobPackageVertical(package_m1_s2), JobPackageVertical(package_m2_s2)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_section_not_self_dependent(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s3').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s3').status = Status.READY -# -# max_jobs = 20 -# max_wrapped_jobs = 20 -# max_wallclock = '10:00' -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# -# section_list = [d1_m1_1_s3, d1_m2_1_s3] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical' -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2 = [d1_m1_1_s3] -# package_m2_s2 = [d1_m2_1_s3] -# -# packages = [JobPackageVertical(package_m1_s2), JobPackageVertical(package_m2_s2)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# ### MIXED WRAPPER ### -# def test_returned_packages_mixed_wrapper(self): -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# wrapper_expression = "s2 s3" -# max_jobs = 18 -# max_wrapped_jobs = 18 -# max_wallclock = '10:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, -# d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, -# d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, -# d1_m1_4_s3] -# package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, -# d1_m2_4_s3] -# -# packages = [JobPackageVertical(package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_parent_failed_mixed_wrapper(self): -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.FAILED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# -# wrapper_expression = "s2 s3" -# max_jobs = 18 -# max_wrapped_jobs = 18 -# max_wallclock = '10:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, -# d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, -# d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# section_list = [d1_m1_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, -# d1_m1_4_s3] -# -# packages = [JobPackageVertical(package_m1_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_jobs_mixed_wrapper(self): -# wrapper_expression = "s2 s3" -# max_jobs = 10 -# max_wrapped_jobs = 10 -# max_wallclock = '10:00' -# -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, -# d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, -# d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, -# d1_m1_4_s3] -# package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3] -# -# packages = [JobPackageVertical(package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_wrapped_jobs_mixed_wrapper(self): -# wrapper_expression = "s2 s3" -# max_jobs = 15 -# max_wrapped_jobs = 5 -# max_wallclock = '10:00' -# -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, -# d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, -# d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2] -# package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2] -# -# packages = [JobPackageVertical(package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_max_wallclock_mixed_wrapper(self): -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# wrapper_expression = "s2 s3" -# max_jobs = 18 -# max_wrapped_jobs = 18 -# max_wallclock = '01:00' -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, -# d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, -# d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# section_list = [d1_m1_1_s2, d1_m2_1_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3] -# package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3] -# -# packages = [JobPackageVertical(package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_returned_packages_first_chunks_completed_mixed_wrapper(self): -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m1_2_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m1_3_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_2_s2').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m1_1_s3').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_1_s3').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_2_s3').status = Status.COMPLETED -# -# -# self.job_list.get_job_by_name('expid_d1_m1_4_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_3_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m1_2_s3').status = Status.READY -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.job_list._ordered_jobs_by_date_member["d1"] = dict() -# self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, -# d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# wrapper_expression = "s2 s3" -# max_wrapped_jobs = 18 -# max_jobs = 18 -# max_wallclock = '10:00' -# -# section_list = [d1_m1_2_s3, d1_m1_4_s2, d1_m2_3_s2] -# -# self.job_packager.max_jobs = max_jobs -# self.job_packager._platform.max_wallclock = max_wallclock -# self.job_packager.wrapper_type = 'vertical-mixed' -# self.job_packager.jobs_in_wrapper = wrapper_expression -# -# returned_packages = self.job_packager._build_vertical_packages(section_list, max_wrapped_jobs) -# -# package_m1_s2_s3 = [d1_m1_2_s3, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# package_m2_s2_s3 = [d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# packages = [JobPackageVertical(package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] -# -# #returned_packages = returned_packages[0] -# for i in range(0, len(returned_packages)): -# self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) -# -# def test_ordered_dict_jobs_simple_workflow_mixed_wrapper(self): -# date_list = ["d1"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['basic'], date_list, member_list, chunk_list) -# -# self.job_list.get_job_by_name('expid_d1_m1_s1').status = Status.COMPLETED -# self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.COMPLETED -# -# self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY -# self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# self.parser_mock.has_option = Mock(return_value=True) -# self.parser_mock.get = Mock(return_value="chunk") -# self.job_list._get_date = Mock(return_value='d1') -# -# ordered_jobs_by_date_member = dict() -# ordered_jobs_by_date_member["d1"] = dict() -# ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, -# d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# -# self.assertDictEqual(self.job_list._create_sorted_dict_jobs("s2 s3"), ordered_jobs_by_date_member) -# -# def test_ordered_dict_jobs_running_date_mixed_wrapper(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['running_date'], date_list, member_list, chunk_list) -# -# self.parser_mock.has_option = Mock(return_value=True) -# self.parser_mock.get = Mock(side_effect=["chunk", "chunk", "date"]) -# self.job_list._get_date = Mock(side_effect=['d1', 'd2']) -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# d1_s5 = self.job_list.get_job_by_name('expid_d1_s5') -# -# d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') -# d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') -# d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') -# d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') -# d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') -# d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') -# d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') -# d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') -# -# d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') -# d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') -# d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') -# d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') -# d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') -# d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') -# d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') -# d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') -# -# d2_s5 = self.job_list.get_job_by_name('expid_d2_s5') -# -# ordered_jobs_by_date_member = dict() -# ordered_jobs_by_date_member["d1"] = dict() -# ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, -# d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3, d1_s5] -# ordered_jobs_by_date_member["d2"] = dict() -# ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, -# d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] -# -# ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_m2_2_s2, d2_m2_2_s3, d2_m2_3_s2, -# d2_m2_3_s3, d2_m2_4_s2, d2_m2_4_s3, d2_s5] -# -# self.assertDictEqual(self.job_list._create_sorted_dict_jobs("s2 s3 s5"), ordered_jobs_by_date_member) -# -# def test_ordered_dict_jobs_running_once_mixed_wrapper(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['running_once'], date_list, member_list, chunk_list) -# -# self.parser_mock.has_option = Mock(return_value=True) -# self.parser_mock.get = Mock(side_effect=["chunk", "chunk", "once"]) -# self.job_list._get_date = Mock(side_effect=['d2', 'd1', 'd2']) -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') -# d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') -# d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') -# d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') -# d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') -# d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') -# d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') -# d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') -# -# d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') -# d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') -# d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') -# d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') -# d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') -# d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') -# d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') -# d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') -# -# s5 = self.job_list.get_job_by_name('expid_s5') -# -# ordered_jobs_by_date_member = dict() -# ordered_jobs_by_date_member["d1"] = dict() -# ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, -# d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# ordered_jobs_by_date_member["d2"] = dict() -# ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, -# d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] -# -# ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_m2_2_s2, d2_m2_2_s3, d2_m2_3_s2, -# d2_m2_3_s3, d2_m2_4_s2, d2_m2_4_s3, s5] -# -# self.assertDictEqual(self.job_list._create_sorted_dict_jobs("s2 s3 s5"), ordered_jobs_by_date_member) -# -# def test_ordered_dict_jobs_synchronize_date_mixed_wrapper(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['synchronize_date'], date_list, member_list, chunk_list) -# -# self.parser_mock.has_option = Mock(return_value=True) -# self.parser_mock.get = Mock(return_value="chunk") -# self.job_list._get_date = Mock(side_effect=['d2', 'd2', 'd2', 'd2', 'd1', 'd2']) -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') -# d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') -# d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') -# d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') -# d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') -# d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') -# d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') -# d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') -# -# d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') -# d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') -# d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') -# d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') -# d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') -# d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') -# d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') -# d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') -# -# _1_s5 = self.job_list.get_job_by_name('expid_1_s5') -# _2_s5 = self.job_list.get_job_by_name('expid_2_s5') -# _3_s5 = self.job_list.get_job_by_name('expid_3_s5') -# _4_s5 = self.job_list.get_job_by_name('expid_4_s5') -# -# ordered_jobs_by_date_member = dict() -# ordered_jobs_by_date_member["d1"] = dict() -# ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, -# d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] -# ordered_jobs_by_date_member["d2"] = dict() -# ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, -# d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] -# -# ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, _1_s5, d2_m2_2_s2, d2_m2_2_s3, _2_s5, d2_m2_3_s2, -# d2_m2_3_s3, _3_s5, d2_m2_4_s2, d2_m2_4_s3, _4_s5] -# -# self.assertDictEqual(self.job_list._create_sorted_dict_jobs("s2 s3 s5"), ordered_jobs_by_date_member) -# -# def test_ordered_dict_jobs_synchronize_member_mixed_wrapper(self): -# date_list = ["d1", "d2"] -# member_list = ["m1", "m2"] -# chunk_list = [1, 2, 3, 4] -# -# self._createDummyJobs(self.workflows['synchronize_member'], date_list, member_list, chunk_list) -# -# self.parser_mock.has_option = Mock(return_value=True) -# self.parser_mock.get = Mock(return_value="chunk") -# self.job_list._get_date = Mock(side_effect=['d1', 'd2']) -# -# d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') -# d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') -# d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') -# d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') -# d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') -# d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') -# d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') -# d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') -# -# d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') -# d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') -# d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') -# d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') -# d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') -# d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') -# d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') -# d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') -# -# d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') -# d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') -# d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') -# d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') -# d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') -# d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') -# d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') -# d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') -# -# d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') -# d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') -# d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') -# d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') -# d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') -# d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') -# d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') -# d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') -# -# d1_1_s5 = self.job_list.get_job_by_name('expid_d1_1_s5') -# d1_2_s5 = self.job_list.get_job_by_name('expid_d1_2_s5') -# d1_3_s5 = self.job_list.get_job_by_name('expid_d1_3_s5') -# d1_4_s5 = self.job_list.get_job_by_name('expid_d1_4_s5') -# -# d2_1_s5 = self.job_list.get_job_by_name('expid_d2_1_s5') -# d2_2_s5 = self.job_list.get_job_by_name('expid_d2_2_s5') -# d2_3_s5 = self.job_list.get_job_by_name('expid_d2_3_s5') -# d2_4_s5 = self.job_list.get_job_by_name('expid_d2_4_s5') -# -# ordered_jobs_by_date_member = dict() -# ordered_jobs_by_date_member["d1"] = dict() -# ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, -# d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] -# -# ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_1_s5, d1_m2_2_s2, d1_m2_2_s3, d1_2_s5, d1_m2_3_s2, -# d1_m2_3_s3, d1_3_s5, d1_m2_4_s2, d1_m2_4_s3, d1_4_s5] -# ordered_jobs_by_date_member["d2"] = dict() -# ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, -# d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] -# -# ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_1_s5, d2_m2_2_s2, d2_m2_2_s3, d2_2_s5, d2_m2_3_s2, -# d2_m2_3_s3, d2_3_s5, d2_m2_4_s2, d2_m2_4_s3, d2_4_s5] -# -# self.assertDictEqual(self.job_list._create_sorted_dict_jobs("s2 s3 s5"), ordered_jobs_by_date_member) -# -# def _createDummyJobs(self, sections_dict, date_list, member_list, chunk_list): -# for section, section_dict in sections_dict.get('sections').items(): -# running = section_dict['RUNNING'] -# wallclock = section_dict['WALLCLOCK'] -# -# if running == 'once': -# name = 'expid_' + section -# job = self._createDummyJob(name, wallclock, section) -# self.job_list._job_list.append(job) -# elif running == 'date': -# for date in date_list: -# name = 'expid_' + date + "_" + section -# job = self._createDummyJob(name, wallclock, section, date) -# self.job_list._job_list.append(job) -# elif running == 'member': -# for date in date_list: -# for member in member_list: -# name = 'expid_' + date + "_" + member + "_" + section -# job = self._createDummyJob(name, wallclock, section, date, member) -# self.job_list._job_list.append(job) -# elif running == 'chunk': -# synchronize_type = section_dict['SYNCHRONIZE'] if 'SYNCHRONIZE' in section_dict else None -# if synchronize_type == 'date': -# for chunk in chunk_list: -# name = 'expid_' + str(chunk) + "_" + section -# job = self._createDummyJob(name, wallclock, section, None, None, chunk) -# self.job_list._job_list.append(job) -# elif synchronize_type == 'member': -# for date in date_list: -# for chunk in chunk_list: -# name = 'expid_' + date + "_" + str(chunk) + "_" + section -# job = self._createDummyJob(name, wallclock, section, date, None, chunk) -# self.job_list._job_list.append(job) -# else: -# for date in date_list: -# for member in member_list: -# for chunk in chunk_list: -# name = 'expid_' + date + "_" + member + "_" + str(chunk) + "_" + section -# job = self._createDummyJob(name, wallclock, section, date, member, chunk) -# self.job_list._job_list.append(job) -# -# self.job_list._date_list = date_list -# self.job_list._member_list = member_list -# self.job_list._chunk_list = chunk_list -# -# self.job_list._dic_jobs = DicJobs(self.job_list, self.parser_mock, date_list, member_list, chunk_list, "", 0) -# self._manage_dependencies(sections_dict) -# -# def _manage_dependencies(self, sections_dict): -# for job in self.job_list.get_job_list(): -# section = job.section -# dependencies = sections_dict['sections'][section]['DEPENDENCIES'] if 'DEPENDENCIES' in sections_dict['sections'][section] else '' -# self._manage_job_dependencies(job, dependencies, sections_dict) -# -# def _manage_job_dependencies(self, job, dependencies, sections_dict): -# for key in dependencies.split(): -# if '-' not in key: -# dependency = Dependency(key) -# else: -# sign = '-' if '-' in key else '+' -# key_split = key.split(sign) -# section = key_split[0] -# distance = key_split[1] -# dependency_running_type = sections_dict['sections'][section]['RUNNING'] -# dependency = Dependency(section, int(distance), dependency_running_type, sign) -# -# skip, (chunk, member, date) = self.job_list._calculate_dependency_metadata(job.chunk, self.job_list.get_chunk_list(), -# job.member, self.job_list.get_member_list(), -# job.date, self.job_list.get_date_list(), -# dependency) -# if skip: -# continue -# -# for parent in self._filter_jobs(dependency.section, date, member, chunk): -# job.add_parent(parent) -# -# def _filter_jobs(self, section, date=None, member=None, chunk=None): -# # TODO: improve the efficiency -# jobs = filter(lambda job: job.section == section and job.date == date and job.member == member and job.chunk == chunk, -# self.job_list.get_job_list()) -# return jobs -# -# def _createDummyJob(self, name, total_wallclock, section, date=None, member=None, chunk=None): -# job_id = randrange(1, 999) -# job = Job(name, job_id, Status.WAITING, 0) -# job.type = randrange(0, 2) -# job.packed = False -# job.wallclock = total_wallclock -# job.platform = self.platform -# -# job.date = date -# job.member = member -# job.chunk = chunk -# job.section = section -# -# return job -# -# class FakeBasicConfig: -# def __init__(self): -# pass -# -# DB_DIR = '/dummy/db/dir' -# DB_FILE = '/dummy/db/file' -# DB_PATH = '/dummy/db/path' -# LOCAL_ROOT_DIR = '/dummy/local/root/dir' -# LOCAL_TMP_DIR = '/dummy/local/temp/dir' -# LOCAL_PROJ_DIR = '/dummy/local/proj/dir' -# DEFAULT_PLATFORMS_CONF = '' -# DEFAULT_JOBS_CONF = '' \ No newline at end of file +from unittest import TestCase +from mock import Mock +from autosubmit.job.job_packager import JobPackager +from autosubmit.job.job_packages import JobPackageVertical +from autosubmit.job.job import Job +from autosubmit.job.job_list import JobList +from autosubmit.job.job_dict import DicJobs +from autosubmit.job.job_utils import Dependency +from bscearth.utils.config_parser import ConfigParserFactory +from autosubmit.job.job_list_persistence import JobListPersistenceDb +from autosubmit.job.job_common import Status +from random import randrange +from collections import OrderedDict + + +class TestWrappers(TestCase): + + @classmethod + def setUpClass(cls): + # set up different workflows to be used in the test methods + cls.workflows = dict() + cls.workflows['basic'] = dict() + cls.workflows['synchronize_date'] = dict() + cls.workflows['synchronize_member'] = dict() + cls.workflows['running_member'] = dict() + cls.workflows['running_date'] = dict() + cls.workflows['running_once'] = dict() + + cls.workflows['basic']['sections'] = OrderedDict() + cls.workflows['basic']['sections']["s1"] = dict() + cls.workflows['basic']['sections']["s1"]["RUNNING"] = "member" + cls.workflows['basic']['sections']["s1"]["WALLCLOCK"] = '00:50' + + cls.workflows['basic']['sections']["s2"] = dict() + cls.workflows['basic']['sections']["s2"]["RUNNING"] = "chunk" + cls.workflows['basic']['sections']["s2"]["WALLCLOCK"] = '00:10' + cls.workflows['basic']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" + + cls.workflows['basic']['sections']["s3"] = dict() + cls.workflows['basic']['sections']["s3"]["RUNNING"] = "chunk" + cls.workflows['basic']['sections']["s3"]["WALLCLOCK"] = '00:20' + cls.workflows['basic']['sections']["s3"]["DEPENDENCIES"] = "s2" + + cls.workflows['basic']['sections']["s4"] = dict() + cls.workflows['basic']['sections']["s4"]["RUNNING"] = "chunk" + cls.workflows['basic']['sections']["s4"]["WALLCLOCK"] = '00:30' + cls.workflows['basic']['sections']["s4"]["DEPENDENCIES"] = "s3" + + cls.workflows['synchronize_date']['sections'] = OrderedDict() + cls.workflows['synchronize_date']['sections']["s1"] = dict() + cls.workflows['synchronize_date']['sections']["s1"]["RUNNING"] = "member" + cls.workflows['synchronize_date']['sections']["s1"]["WALLCLOCK"] = '00:50' + + cls.workflows['synchronize_date']['sections']["s2"] = dict() + cls.workflows['synchronize_date']['sections']["s2"]["RUNNING"] = "chunk" + cls.workflows['synchronize_date']['sections']["s2"]["WALLCLOCK"] = '00:10' + cls.workflows['synchronize_date']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" + + cls.workflows['synchronize_date']['sections']["s3"] = dict() + cls.workflows['synchronize_date']['sections']["s3"]["RUNNING"] = "chunk" + cls.workflows['synchronize_date']['sections']["s3"]["WALLCLOCK"] = '00:20' + cls.workflows['synchronize_date']['sections']["s3"]["DEPENDENCIES"] = "s2" + + cls.workflows['synchronize_date']['sections']["s4"] = dict() + cls.workflows['synchronize_date']['sections']["s4"]["RUNNING"] = "chunk" + cls.workflows['synchronize_date']['sections']["s4"]["WALLCLOCK"] = '00:30' + cls.workflows['synchronize_date']['sections']["s4"]["DEPENDENCIES"] = "s3" + + cls.workflows['synchronize_date']['sections']["s5"] = dict() + cls.workflows['synchronize_date']['sections']["s5"]["RUNNING"] = "chunk" + cls.workflows['synchronize_date']['sections']["s5"]["SYNCHRONIZE"] = "date" + cls.workflows['synchronize_date']['sections']["s5"]["WALLCLOCK"] = '00:30' + cls.workflows['synchronize_date']['sections']["s5"]["DEPENDENCIES"] = "s2" + + cls.workflows['synchronize_member']['sections'] = OrderedDict() + cls.workflows['synchronize_member']['sections']["s1"] = dict() + cls.workflows['synchronize_member']['sections']["s1"]["RUNNING"] = "member" + cls.workflows['synchronize_member']['sections']["s1"]["WALLCLOCK"] = '00:50' + + cls.workflows['synchronize_member']['sections']["s2"] = dict() + cls.workflows['synchronize_member']['sections']["s2"]["RUNNING"] = "chunk" + cls.workflows['synchronize_member']['sections']["s2"]["WALLCLOCK"] = '00:10' + cls.workflows['synchronize_member']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" + + cls.workflows['synchronize_member']['sections']["s3"] = dict() + cls.workflows['synchronize_member']['sections']["s3"]["RUNNING"] = "chunk" + cls.workflows['synchronize_member']['sections']["s3"]["WALLCLOCK"] = '00:20' + cls.workflows['synchronize_member']['sections']["s3"]["DEPENDENCIES"] = "s2" + + cls.workflows['synchronize_member']['sections']["s4"] = dict() + cls.workflows['synchronize_member']['sections']["s4"]["RUNNING"] = "chunk" + cls.workflows['synchronize_member']['sections']["s4"]["WALLCLOCK"] = '00:30' + cls.workflows['synchronize_member']['sections']["s4"]["DEPENDENCIES"] = "s3" + + cls.workflows['synchronize_member']['sections']["s5"] = dict() + cls.workflows['synchronize_member']['sections']["s5"]["RUNNING"] = "chunk" + cls.workflows['synchronize_member']['sections']["s5"]["SYNCHRONIZE"] = "member" + cls.workflows['synchronize_member']['sections']["s5"]["WALLCLOCK"] = '00:30' + cls.workflows['synchronize_member']['sections']["s5"]["DEPENDENCIES"] = "s2" + + cls.workflows['running_date']['sections'] = OrderedDict() + cls.workflows['running_date']['sections']["s1"] = dict() + cls.workflows['running_date']['sections']["s1"]["RUNNING"] = "member" + cls.workflows['running_date']['sections']["s1"]["WALLCLOCK"] = '00:50' + + cls.workflows['running_date']['sections']["s2"] = dict() + cls.workflows['running_date']['sections']["s2"]["RUNNING"] = "chunk" + cls.workflows['running_date']['sections']["s2"]["WALLCLOCK"] = '00:10' + cls.workflows['running_date']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" + + cls.workflows['running_date']['sections']["s3"] = dict() + cls.workflows['running_date']['sections']["s3"]["RUNNING"] = "chunk" + cls.workflows['running_date']['sections']["s3"]["WALLCLOCK"] = '00:20' + cls.workflows['running_date']['sections']["s3"]["DEPENDENCIES"] = "s2" + + cls.workflows['running_date']['sections']["s4"] = dict() + cls.workflows['running_date']['sections']["s4"]["RUNNING"] = "chunk" + cls.workflows['running_date']['sections']["s4"]["WALLCLOCK"] = '00:30' + cls.workflows['running_date']['sections']["s4"]["DEPENDENCIES"] = "s3" + + cls.workflows['running_date']['sections']["s5"] = dict() + cls.workflows['running_date']['sections']["s5"]["RUNNING"] = "date" + cls.workflows['running_date']['sections']["s5"]["WALLCLOCK"] = '00:30' + cls.workflows['running_date']['sections']["s5"]["DEPENDENCIES"] = "s2" + + cls.workflows['running_once']['sections'] = OrderedDict() + cls.workflows['running_once']['sections']["s1"] = dict() + cls.workflows['running_once']['sections']["s1"]["RUNNING"] = "member" + cls.workflows['running_once']['sections']["s1"]["WALLCLOCK"] = '00:50' + + cls.workflows['running_once']['sections']["s2"] = dict() + cls.workflows['running_once']['sections']["s2"]["RUNNING"] = "chunk" + cls.workflows['running_once']['sections']["s2"]["WALLCLOCK"] = '00:10' + cls.workflows['running_once']['sections']["s2"]["DEPENDENCIES"] = "s1 s2-1" + + cls.workflows['running_once']['sections']["s3"] = dict() + cls.workflows['running_once']['sections']["s3"]["RUNNING"] = "chunk" + cls.workflows['running_once']['sections']["s3"]["WALLCLOCK"] = '00:20' + cls.workflows['running_once']['sections']["s3"]["DEPENDENCIES"] = "s2" + + cls.workflows['running_once']['sections']["s4"] = dict() + cls.workflows['running_once']['sections']["s4"]["RUNNING"] = "chunk" + cls.workflows['running_once']['sections']["s4"]["WALLCLOCK"] = '00:30' + cls.workflows['running_once']['sections']["s4"]["DEPENDENCIES"] = "s3" + + cls.workflows['running_once']['sections']["s5"] = dict() + cls.workflows['running_once']['sections']["s5"]["RUNNING"] = "once" + cls.workflows['running_once']['sections']["s5"]["WALLCLOCK"] = '00:30' + cls.workflows['running_once']['sections']["s5"]["DEPENDENCIES"] = "s2" + + def setUp(self): + self.experiment_id = 'random-id' + self.config = FakeBasicConfig + self.platform = Mock() + self.job_list = JobList(self.experiment_id, self.config, ConfigParserFactory(), + JobListPersistenceDb('.', '.')) + self.parser_mock = Mock(spec='SafeConfigParser') + + self.platform.max_waiting_jobs = 100 + self.platform.total_jobs = 100 + self.config.get_wrapper_type = Mock(return_value='vertical') + self.config.get_wrapper_crossdate = Mock(return_value=False) + self.config.get_remote_dependencies = Mock(return_value=False) + self.config.get_wrapper_jobs = Mock(return_value='None') + self.config.get_wrapper_method = Mock(return_value='ASThread') + self.config.get_wrapper_queue = Mock(return_value='debug') + + self.job_packager = JobPackager( + self.config, self.platform, self.job_list) + + ### ONE SECTION WRAPPER ### + def test_returned_packages(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + max_jobs = 20 + max_wrapped_jobs = 20 + max_wallclock = '10:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') + d1_m1_6_s2 = self.job_list.get_job_by_name('expid_d1_m1_6_s2') + d1_m1_7_s2 = self.job_list.get_job_by_name('expid_d1_m1_7_s2') + d1_m1_8_s2 = self.job_list.get_job_by_name('expid_d1_m1_8_s2') + d1_m1_9_s2 = self.job_list.get_job_by_name('expid_d1_m1_9_s2') + d1_m1_10_s2 = self.job_list.get_job_by_name('expid_d1_m1_10_s2') + + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') + d1_m2_6_s2 = self.job_list.get_job_by_name('expid_d1_m2_6_s2') + d1_m2_7_s2 = self.job_list.get_job_by_name('expid_d1_m2_7_s2') + d1_m2_8_s2 = self.job_list.get_job_by_name('expid_d1_m2_8_s2') + d1_m2_9_s2 = self.job_list.get_job_by_name('expid_d1_m2_9_s2') + d1_m2_10_s2 = self.job_list.get_job_by_name('expid_d1_m2_10_s2') + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical' + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2, d1_m1_6_s2, d1_m1_7_s2, d1_m1_8_s2, + d1_m1_9_s2, d1_m1_10_s2] + package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2, d1_m2_6_s2, d1_m2_7_s2, d1_m2_8_s2, + d1_m2_9_s2, d1_m2_10_s2] + + packages = [JobPackageVertical( + package_m1_s2), JobPackageVertical(package_m2_s2)] + + # returned_packages = returned_packages[] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_jobs(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + max_jobs = 12 + max_wrapped_jobs = 10 + max_wallclock = '10:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') + d1_m1_6_s2 = self.job_list.get_job_by_name('expid_d1_m1_6_s2') + d1_m1_7_s2 = self.job_list.get_job_by_name('expid_d1_m1_7_s2') + d1_m1_8_s2 = self.job_list.get_job_by_name('expid_d1_m1_8_s2') + d1_m1_9_s2 = self.job_list.get_job_by_name('expid_d1_m1_9_s2') + d1_m1_10_s2 = self.job_list.get_job_by_name('expid_d1_m1_10_s2') + + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') + d1_m2_6_s2 = self.job_list.get_job_by_name('expid_d1_m2_6_s2') + d1_m2_7_s2 = self.job_list.get_job_by_name('expid_d1_m2_7_s2') + d1_m2_8_s2 = self.job_list.get_job_by_name('expid_d1_m2_8_s2') + d1_m2_9_s2 = self.job_list.get_job_by_name('expid_d1_m2_9_s2') + d1_m2_10_s2 = self.job_list.get_job_by_name('expid_d1_m2_10_s2') + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical' + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2, d1_m1_6_s2, d1_m1_7_s2, d1_m1_8_s2, + d1_m1_9_s2, d1_m1_10_s2] + package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2, d1_m2_6_s2, d1_m2_7_s2, d1_m2_8_s2, + d1_m2_9_s2, d1_m2_10_s2] + + packages = [JobPackageVertical( + package_m1_s2), JobPackageVertical(package_m2_s2)] + + #returned_packages = returned_packages[0] + #print("max jobs test") + for i in range(0, len(returned_packages)): + # print("Element " + str(i)) + # print("Returned from packager") + # for job in returned_packages[i]._jobs: + # print(job.name) + # print("Build for test") + # for _job in packages[i]._jobs: + # print(_job.name) + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_wrapped_jobs(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + max_jobs = 20 + max_wrapped_jobs = 5 + max_wallclock = '10:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') + + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical' + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, + d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2] + package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, + d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2] + + packages = [JobPackageVertical( + package_m1_s2), JobPackageVertical(package_m2_s2)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_wallclock(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + max_jobs = 20 + max_wrapped_jobs = 15 + max_wallclock = '00:50' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m1_5_s2 = self.job_list.get_job_by_name('expid_d1_m1_5_s2') + + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + d1_m2_5_s2 = self.job_list.get_job_by_name('expid_d1_m2_5_s2') + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical' + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2 = [d1_m1_1_s2, d1_m1_2_s2, + d1_m1_3_s2, d1_m1_4_s2, d1_m1_5_s2] + package_m2_s2 = [d1_m2_1_s2, d1_m2_2_s2, + d1_m2_3_s2, d1_m2_4_s2, d1_m2_5_s2] + + packages = [JobPackageVertical( + package_m1_s2), JobPackageVertical(package_m2_s2)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_section_not_self_dependent(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m1_1_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_1_s2').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s3').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s3').status = Status.READY + + max_jobs = 20 + max_wrapped_jobs = 20 + max_wallclock = '10:00' + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + + section_list = [d1_m1_1_s3, d1_m2_1_s3] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical' + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2 = [d1_m1_1_s3] + package_m2_s2 = [d1_m2_1_s3] + + packages = [JobPackageVertical( + package_m1_s2), JobPackageVertical(package_m2_s2)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + ### MIXED WRAPPER ### + def test_returned_packages_mixed_wrapper(self): + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + wrapper_expression = "s2 s3" + max_jobs = 18 + max_wrapped_jobs = 18 + max_wallclock = '10:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, + d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, + d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, + d1_m1_4_s3] + package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, + d1_m2_4_s3] + + packages = [JobPackageVertical( + package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_parent_failed_mixed_wrapper(self): + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name('expid_d1_m2_s1').status = Status.FAILED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + + wrapper_expression = "s2 s3" + max_jobs = 18 + max_wrapped_jobs = 18 + max_wallclock = '10:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, + d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, + d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + section_list = [d1_m1_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, + d1_m1_4_s3] + + packages = [JobPackageVertical(package_m1_s2_s3)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_jobs_mixed_wrapper(self): + wrapper_expression = "s2 s3" + max_jobs = 10 + max_wrapped_jobs = 10 + max_wallclock = '10:00' + + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, + d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, + d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, + d1_m1_4_s3] + package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, + d1_m2_4_s3] + + packages = [JobPackageVertical( + package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] + + #returned_packages = returned_packages[0] + # print("test_returned_packages_max_jobs_mixed_wrapper") + for i in range(0, len(returned_packages)): + # print("Element " + str(i)) + # print("Returned from packager") + # for job in returned_packages[i]._jobs: + # print(job.name) + # print("Build for test") + # for _job in packages[i]._jobs: + # print(_job.name) + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_wrapped_jobs_mixed_wrapper(self): + wrapper_expression = "s2 s3" + max_jobs = 15 + max_wrapped_jobs = 5 + max_wallclock = '10:00' + + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, + d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, + d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, + d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2] + package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, + d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2] + + packages = [JobPackageVertical( + package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_max_wallclock_mixed_wrapper(self): + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + wrapper_expression = "s2 s3" + max_jobs = 18 + max_wrapped_jobs = 18 + max_wallclock = '01:00' + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, + d1_m1_3_s2, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, + d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + section_list = [d1_m1_1_s2, d1_m2_1_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3] + package_m2_s2_s3 = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3] + + packages = [JobPackageVertical( + package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_returned_packages_first_chunks_completed_mixed_wrapper(self): + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name( + 'expid_d1_m1_1_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m1_2_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m1_3_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_1_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_2_s2').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m1_1_s3').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_1_s3').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_2_s3').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_4_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_3_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m1_2_s3').status = Status.READY + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.job_list._ordered_jobs_by_date_member["d1"] = dict() + self.job_list._ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + self.job_list._ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, + d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + wrapper_expression = "s2 s3" + max_wrapped_jobs = 18 + max_jobs = 18 + max_wallclock = '10:00' + + section_list = [d1_m1_2_s3, d1_m1_4_s2, d1_m2_3_s2] + + self.job_packager.max_jobs = max_jobs + self.job_packager._platform.max_wallclock = max_wallclock + self.job_packager.wrapper_type = 'vertical-mixed' + self.job_packager.jobs_in_wrapper = wrapper_expression + + returned_packages = self.job_packager._build_vertical_packages( + section_list, max_wrapped_jobs) + + package_m1_s2_s3 = [d1_m1_2_s3, d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + package_m2_s2_s3 = [d1_m2_3_s2, d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + packages = [JobPackageVertical( + package_m1_s2_s3), JobPackageVertical(package_m2_s2_s3)] + + #returned_packages = returned_packages[0] + for i in range(0, len(returned_packages)): + self.assertListEqual(returned_packages[i]._jobs, packages[i]._jobs) + + def test_ordered_dict_jobs_simple_workflow_mixed_wrapper(self): + date_list = ["d1"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['basic'], date_list, member_list, chunk_list) + + self.job_list.get_job_by_name( + 'expid_d1_m1_s1').status = Status.COMPLETED + self.job_list.get_job_by_name( + 'expid_d1_m2_s1').status = Status.COMPLETED + + self.job_list.get_job_by_name('expid_d1_m1_1_s2').status = Status.READY + self.job_list.get_job_by_name('expid_d1_m2_1_s2').status = Status.READY + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + self.parser_mock.has_option = Mock(return_value=True) + self.parser_mock.get = Mock(return_value="chunk") + self.job_list._get_date = Mock(return_value='d1') + + ordered_jobs_by_date_member = dict() + ordered_jobs_by_date_member["d1"] = dict() + ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, + d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + + self.assertDictEqual(self.job_list._create_sorted_dict_jobs( + "s2 s3"), ordered_jobs_by_date_member) + + def test_ordered_dict_jobs_running_date_mixed_wrapper(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['running_date'], date_list, member_list, chunk_list) + + self.parser_mock.has_option = Mock(return_value=True) + self.parser_mock.get = Mock(side_effect=["chunk", "chunk", "date"]) + self.job_list._get_date = Mock(side_effect=['d1', 'd2']) + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + d1_s5 = self.job_list.get_job_by_name('expid_d1_s5') + + d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') + d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') + d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') + d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') + d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') + d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') + d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') + d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') + + d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') + d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') + d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') + d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') + d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') + d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') + d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') + d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') + + d2_s5 = self.job_list.get_job_by_name('expid_d2_s5') + + ordered_jobs_by_date_member = dict() + ordered_jobs_by_date_member["d1"] = dict() + ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, + d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3, d1_s5] + ordered_jobs_by_date_member["d2"] = dict() + ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, + d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] + + ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_m2_2_s2, d2_m2_2_s3, d2_m2_3_s2, + d2_m2_3_s3, d2_m2_4_s2, d2_m2_4_s3, d2_s5] + + self.assertDictEqual(self.job_list._create_sorted_dict_jobs( + "s2 s3 s5"), ordered_jobs_by_date_member) + + def test_ordered_dict_jobs_running_once_mixed_wrapper(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['running_once'], date_list, member_list, chunk_list) + + self.parser_mock.has_option = Mock(return_value=True) + self.parser_mock.get = Mock(side_effect=["chunk", "chunk", "once"]) + self.job_list._get_date = Mock(side_effect=['d2', 'd1', 'd2']) + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') + d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') + d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') + d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') + d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') + d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') + d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') + d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') + + d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') + d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') + d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') + d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') + d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') + d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') + d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') + d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') + + s5 = self.job_list.get_job_by_name('expid_s5') + + ordered_jobs_by_date_member = dict() + ordered_jobs_by_date_member["d1"] = dict() + ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, + d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + ordered_jobs_by_date_member["d2"] = dict() + ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, + d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] + + ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_m2_2_s2, d2_m2_2_s3, d2_m2_3_s2, + d2_m2_3_s3, d2_m2_4_s2, d2_m2_4_s3, s5] + + self.assertDictEqual(self.job_list._create_sorted_dict_jobs( + "s2 s3 s5"), ordered_jobs_by_date_member) + + def test_ordered_dict_jobs_synchronize_date_mixed_wrapper(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['synchronize_date'], date_list, member_list, chunk_list) + + self.parser_mock.has_option = Mock(return_value=True) + self.parser_mock.get = Mock(return_value="chunk") + self.job_list._get_date = Mock( + side_effect=['d2', 'd2', 'd2', 'd2', 'd1', 'd2']) + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') + d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') + d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') + d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') + d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') + d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') + d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') + d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') + + d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') + d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') + d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') + d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') + d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') + d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') + d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') + d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') + + _1_s5 = self.job_list.get_job_by_name('expid_1_s5') + _2_s5 = self.job_list.get_job_by_name('expid_2_s5') + _3_s5 = self.job_list.get_job_by_name('expid_3_s5') + _4_s5 = self.job_list.get_job_by_name('expid_4_s5') + + ordered_jobs_by_date_member = dict() + ordered_jobs_by_date_member["d1"] = dict() + ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_m2_2_s2, d1_m2_2_s3, d1_m2_3_s2, + d1_m2_3_s3, d1_m2_4_s2, d1_m2_4_s3] + ordered_jobs_by_date_member["d2"] = dict() + ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, + d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] + + ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, _1_s5, d2_m2_2_s2, d2_m2_2_s3, _2_s5, d2_m2_3_s2, + d2_m2_3_s3, _3_s5, d2_m2_4_s2, d2_m2_4_s3, _4_s5] + + self.assertDictEqual(self.job_list._create_sorted_dict_jobs( + "s2 s3 s5"), ordered_jobs_by_date_member) + + def test_ordered_dict_jobs_synchronize_member_mixed_wrapper(self): + date_list = ["d1", "d2"] + member_list = ["m1", "m2"] + chunk_list = [1, 2, 3, 4] + + self._createDummyJobs( + self.workflows['synchronize_member'], date_list, member_list, chunk_list) + + self.parser_mock.has_option = Mock(return_value=True) + self.parser_mock.get = Mock(return_value="chunk") + self.job_list._get_date = Mock(side_effect=['d1', 'd2']) + + d1_m1_1_s2 = self.job_list.get_job_by_name('expid_d1_m1_1_s2') + d1_m1_2_s2 = self.job_list.get_job_by_name('expid_d1_m1_2_s2') + d1_m1_3_s2 = self.job_list.get_job_by_name('expid_d1_m1_3_s2') + d1_m1_4_s2 = self.job_list.get_job_by_name('expid_d1_m1_4_s2') + d1_m2_1_s2 = self.job_list.get_job_by_name('expid_d1_m2_1_s2') + d1_m2_2_s2 = self.job_list.get_job_by_name('expid_d1_m2_2_s2') + d1_m2_3_s2 = self.job_list.get_job_by_name('expid_d1_m2_3_s2') + d1_m2_4_s2 = self.job_list.get_job_by_name('expid_d1_m2_4_s2') + + d1_m1_1_s3 = self.job_list.get_job_by_name('expid_d1_m1_1_s3') + d1_m1_2_s3 = self.job_list.get_job_by_name('expid_d1_m1_2_s3') + d1_m1_3_s3 = self.job_list.get_job_by_name('expid_d1_m1_3_s3') + d1_m1_4_s3 = self.job_list.get_job_by_name('expid_d1_m1_4_s3') + d1_m2_1_s3 = self.job_list.get_job_by_name('expid_d1_m2_1_s3') + d1_m2_2_s3 = self.job_list.get_job_by_name('expid_d1_m2_2_s3') + d1_m2_3_s3 = self.job_list.get_job_by_name('expid_d1_m2_3_s3') + d1_m2_4_s3 = self.job_list.get_job_by_name('expid_d1_m2_4_s3') + + d2_m1_1_s2 = self.job_list.get_job_by_name('expid_d2_m1_1_s2') + d2_m1_2_s2 = self.job_list.get_job_by_name('expid_d2_m1_2_s2') + d2_m1_3_s2 = self.job_list.get_job_by_name('expid_d2_m1_3_s2') + d2_m1_4_s2 = self.job_list.get_job_by_name('expid_d2_m1_4_s2') + d2_m2_1_s2 = self.job_list.get_job_by_name('expid_d2_m2_1_s2') + d2_m2_2_s2 = self.job_list.get_job_by_name('expid_d2_m2_2_s2') + d2_m2_3_s2 = self.job_list.get_job_by_name('expid_d2_m2_3_s2') + d2_m2_4_s2 = self.job_list.get_job_by_name('expid_d2_m2_4_s2') + + d2_m1_1_s3 = self.job_list.get_job_by_name('expid_d2_m1_1_s3') + d2_m1_2_s3 = self.job_list.get_job_by_name('expid_d2_m1_2_s3') + d2_m1_3_s3 = self.job_list.get_job_by_name('expid_d2_m1_3_s3') + d2_m1_4_s3 = self.job_list.get_job_by_name('expid_d2_m1_4_s3') + d2_m2_1_s3 = self.job_list.get_job_by_name('expid_d2_m2_1_s3') + d2_m2_2_s3 = self.job_list.get_job_by_name('expid_d2_m2_2_s3') + d2_m2_3_s3 = self.job_list.get_job_by_name('expid_d2_m2_3_s3') + d2_m2_4_s3 = self.job_list.get_job_by_name('expid_d2_m2_4_s3') + + d1_1_s5 = self.job_list.get_job_by_name('expid_d1_1_s5') + d1_2_s5 = self.job_list.get_job_by_name('expid_d1_2_s5') + d1_3_s5 = self.job_list.get_job_by_name('expid_d1_3_s5') + d1_4_s5 = self.job_list.get_job_by_name('expid_d1_4_s5') + + d2_1_s5 = self.job_list.get_job_by_name('expid_d2_1_s5') + d2_2_s5 = self.job_list.get_job_by_name('expid_d2_2_s5') + d2_3_s5 = self.job_list.get_job_by_name('expid_d2_3_s5') + d2_4_s5 = self.job_list.get_job_by_name('expid_d2_4_s5') + + ordered_jobs_by_date_member = dict() + ordered_jobs_by_date_member["d1"] = dict() + ordered_jobs_by_date_member["d1"]["m1"] = [d1_m1_1_s2, d1_m1_1_s3, d1_m1_2_s2, d1_m1_2_s3, d1_m1_3_s2, + d1_m1_3_s3, d1_m1_4_s2, d1_m1_4_s3] + + ordered_jobs_by_date_member["d1"]["m2"] = [d1_m2_1_s2, d1_m2_1_s3, d1_1_s5, d1_m2_2_s2, d1_m2_2_s3, d1_2_s5, d1_m2_3_s2, + d1_m2_3_s3, d1_3_s5, d1_m2_4_s2, d1_m2_4_s3, d1_4_s5] + ordered_jobs_by_date_member["d2"] = dict() + ordered_jobs_by_date_member["d2"]["m1"] = [d2_m1_1_s2, d2_m1_1_s3, d2_m1_2_s2, d2_m1_2_s3, d2_m1_3_s2, + d2_m1_3_s3, d2_m1_4_s2, d2_m1_4_s3] + + ordered_jobs_by_date_member["d2"]["m2"] = [d2_m2_1_s2, d2_m2_1_s3, d2_1_s5, d2_m2_2_s2, d2_m2_2_s3, d2_2_s5, d2_m2_3_s2, + d2_m2_3_s3, d2_3_s5, d2_m2_4_s2, d2_m2_4_s3, d2_4_s5] + + self.assertDictEqual(self.job_list._create_sorted_dict_jobs( + "s2 s3 s5"), ordered_jobs_by_date_member) + + def _createDummyJobs(self, sections_dict, date_list, member_list, chunk_list): + for section, section_dict in sections_dict.get('sections').items(): + running = section_dict['RUNNING'] + wallclock = section_dict['WALLCLOCK'] + + if running == 'once': + name = 'expid_' + section + job = self._createDummyJob(name, wallclock, section) + self.job_list._job_list.append(job) + elif running == 'date': + for date in date_list: + name = 'expid_' + date + "_" + section + job = self._createDummyJob(name, wallclock, section, date) + self.job_list._job_list.append(job) + elif running == 'member': + for date in date_list: + for member in member_list: + name = 'expid_' + date + "_" + member + "_" + section + job = self._createDummyJob( + name, wallclock, section, date, member) + self.job_list._job_list.append(job) + elif running == 'chunk': + synchronize_type = section_dict['SYNCHRONIZE'] if 'SYNCHRONIZE' in section_dict else None + if synchronize_type == 'date': + for chunk in chunk_list: + name = 'expid_' + str(chunk) + "_" + section + job = self._createDummyJob( + name, wallclock, section, None, None, chunk) + self.job_list._job_list.append(job) + elif synchronize_type == 'member': + for date in date_list: + for chunk in chunk_list: + name = 'expid_' + date + "_" + \ + str(chunk) + "_" + section + job = self._createDummyJob( + name, wallclock, section, date, None, chunk) + self.job_list._job_list.append(job) + else: + for date in date_list: + for member in member_list: + for chunk in chunk_list: + name = 'expid_' + date + "_" + member + \ + "_" + str(chunk) + "_" + section + job = self._createDummyJob( + name, wallclock, section, date, member, chunk) + self.job_list._job_list.append(job) + + self.job_list._date_list = date_list + self.job_list._member_list = member_list + self.job_list._chunk_list = chunk_list + + self.job_list._dic_jobs = DicJobs( + self.job_list, self.parser_mock, date_list, member_list, chunk_list, "", 0) + self._manage_dependencies(sections_dict) + + def _manage_dependencies(self, sections_dict): + for job in self.job_list.get_job_list(): + section = job.section + dependencies = sections_dict['sections'][section][ + 'DEPENDENCIES'] if 'DEPENDENCIES' in sections_dict['sections'][section] else '' + self._manage_job_dependencies(job, dependencies, sections_dict) + + def _manage_job_dependencies(self, job, dependencies, sections_dict): + for key in dependencies.split(): + if '-' not in key: + dependency = Dependency(key) + else: + sign = '-' if '-' in key else '+' + key_split = key.split(sign) + section = key_split[0] + distance = key_split[1] + dependency_running_type = sections_dict['sections'][section]['RUNNING'] + dependency = Dependency(section, int( + distance), dependency_running_type, sign) + + skip, (chunk, member, date) = self.job_list._calculate_dependency_metadata(job.chunk, self.job_list.get_chunk_list(), + job.member, self.job_list.get_member_list(), + job.date, self.job_list.get_date_list(), + dependency) + if skip: + continue + + for parent in self._filter_jobs(dependency.section, date, member, chunk): + job.add_parent(parent) + + def _filter_jobs(self, section, date=None, member=None, chunk=None): + # TODO: improve the efficiency + jobs = filter(lambda job: job.section == section and job.date == date and job.member == member and job.chunk == chunk, + self.job_list.get_job_list()) + return jobs + + def _createDummyJob(self, name, total_wallclock, section, date=None, member=None, chunk=None): + job_id = randrange(1, 999) + job = Job(name, job_id, Status.WAITING, 0) + job.type = randrange(0, 2) + job.packed = False + job.hold = False + job.wallclock = total_wallclock + job.platform = self.platform + + job.date = date + job.member = member + job.chunk = chunk + job.section = section + + return job + + +class FakeBasicConfig: + def __init__(self): + pass + + DB_DIR = '/dummy/db/dir' + DB_FILE = '/dummy/db/file' + DB_PATH = '/dummy/db/path' + LOCAL_ROOT_DIR = '/dummy/local/root/dir' + LOCAL_TMP_DIR = '/dummy/local/temp/dir' + LOCAL_PROJ_DIR = '/dummy/local/proj/dir' + DEFAULT_PLATFORMS_CONF = '' + DEFAULT_JOBS_CONF = ''