Commit 93ccfc00 authored by sparonuz's avatar sparonuz
Browse files

[ExceptionManager]: Changed how exeption manager inherits job info

parent 93fdac27
......@@ -127,8 +127,8 @@ class BinarySearchJob(Job):
variables = [self.vault.get_variable_by_id(_id) for _id in self.analysis_set]
routines = set([v.routine for v in variables])
modules = set([r.module for r in routines])
if len(modules) == 1:
return False
# if len(modules) == 1:
# return False
if len(self.analysis_set) >= 2:
_set1, _set2 = divide_set(self.analysis_set, self.vault)
self.child = []
......@@ -219,9 +219,9 @@ class BinarySearchJob(Job):
# If the set can not be split, check whether its a exception and handle it
else:
from AutoRPE.UtilsWorkflow.ExceptionManager import ExceptionManager
from AutoRPE.UtilsWorkflow.Errors import NoException
exception_manager = ExceptionManager(bsj=self, exception=self.kind_of_exception())
exception_manager.divide_and_force(queue)
exception_manager = ExceptionManager(self.kind_of_exception())
exception_manager.divide_and_force(self, queue)
# queue.seed_job.graph()
def check_child(self, queue):
# Check if all the child jobs have finished
......@@ -297,4 +297,12 @@ class BinarySearchJob(Job):
graph += "%s --> %s[*]\n" % (
fix_identifier(member.parent.identifier()), fix_identifier(member.identifier()))
graph += style(member)
import calendar;
import time;
ts = calendar.timegm(time.gmtime())
mermaid_graph_file = open("/home/Earth/sparonuz/Pictures/mermaid/mermaid_chart_"+str(ts)+".txt", "w")
mermaid_graph_file.write(graph)
mermaid_graph_file.close()
return graph
from AutoRPE.UtilsWorkflow.BinaryJob import BinarySearchJob
class ExceptionManager(BinarySearchJob):
def __init__(self, bsj, exception):
class ExceptionManager():
def __init__(self, exception):
self.exception = exception
# Call the copy constructor
super().__init__(bsj=bsj)
def divide_and_force(self, queue):
from AutoRPE.UtilsWorkflow.Errors import NoException, QueueError
analized_job = [q_el for q_el in queue[self.status] if q_el.hash == self.hash]
if len(analized_job) > 1:
raise QueueError("More than one job found with the same hash")
if not len(analized_job):
print("Here")
analized_job = analized_job[0]
def divide_and_force(self, analized_job, queue):
from AutoRPE.UtilsWorkflow.Errors import NoException
if self.exception == NoException:
queue[analized_job.status].remove(analized_job)
analized_job.status = "FAILED"
......@@ -29,6 +19,7 @@ class ExceptionManager(BinarySearchJob):
analized_job.status = "SUSPENDED"
child_1, child_2 = analized_job.child
hash_1, hash_2 = child_1.hash, child_2.hash
# Make that the analysis variables of one child are the forced variables of the other one
if child_1.forced_variables is not None:
child_1.forced_variables.extend(child_2.analysis_set)
......@@ -36,6 +27,10 @@ class ExceptionManager(BinarySearchJob):
else:
child_1.forced_variables = child_2.analysis_set
child_2.forced_variables = child_1.analysis_set
# Avoid loops
if child_1.hash == hash_1 or child_2.hash == hash_2:
raise AssertionError
for ch in analized_job.child:
if ch in queue.all():
queue[ch.status].remove(ch)
......@@ -53,8 +48,8 @@ class ExceptionManager(BinarySearchJob):
queue.disinherited.append(job)
# Do not perform other action, the set is considered as FAILED
def divide_or_fail(self, queue):
queue[self.status].remove(self)
self.status = "FAILED"
queue.failed.append(self)
self.print_info_job("failed")
def divide_or_fail(self, analyzed_job, queue):
queue[analyzed_job.status].remove(analyzed_job)
analyzed_job.status = "FAILED"
queue.failed.append(analyzed_job)
analyzed_job.print_info_job("failed")
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment