diff --git a/atdb/taskdatabase/models.py b/atdb/taskdatabase/models.py index cd389229a0f03b98ec9a2ab2136753d7350d5dd8..16cd9d7468733d2f4e9a833ee710a507ba2e27a9 100644 --- a/atdb/taskdatabase/models.py +++ b/atdb/taskdatabase/models.py @@ -21,8 +21,9 @@ class Task(models.Model): task_type = models.CharField(max_length=20, default="task") # note: the apparent naming reversal is intentional. Predecessors are somebody elses successors. - predecessors = models.ForeignKey('self', related_name='task_successors', on_delete=models.SET_NULL, null=True,blank=True) - successors = models.ForeignKey('self', related_name='task_predecessors', on_delete=models.SET_NULL, null=True,blank=True) + desired_predecessor_id = models.CharField(max_length=12, blank=True, null=True) + predecessor = models.ForeignKey('self', related_name='task_successors', on_delete=models.SET_NULL, null=True,blank=True) + successor = models.ForeignKey('self', related_name='task_predecessors', on_delete=models.SET_NULL, null=True,blank=True) workflow = models.ForeignKey(Workflow, related_name='tasks', on_delete=models.SET_NULL, null=True) diff --git a/atdb/taskdatabase/serializers.py b/atdb/taskdatabase/serializers.py index 2ef09247763c5a407f5edf46807ac1a48d92844f..7454684574ebece930d23fed74690c62e933b2ce 100644 --- a/atdb/taskdatabase/serializers.py +++ b/atdb/taskdatabase/serializers.py @@ -12,10 +12,10 @@ class TaskSerializer(serializers.ModelSerializer): class Meta: model = Task fields = ('id','task_type','taskID', - 'predecessors','successors', + 'desired_predecessor_id','predecessor','successor', 'project','sas_id','priority','purge_policy','skip', 'desired_workflow_id','desired_workflow_uri','workflow', - 'status','desired_status','desired_workflow_uri', + 'status','desired_status', 'inputs','outputs','status_history') diff --git a/atdb/taskdatabase/services/signals.py b/atdb/taskdatabase/services/signals.py index 264e88f92b9b349ad150981da3084ef5ce7ff07b..03ce4475a2d896bceb94a9f90bdc55da32365365 100644 --- a/atdb/taskdatabase/services/signals.py +++ b/atdb/taskdatabase/services/signals.py @@ -34,6 +34,46 @@ def pre_save_task_handler(sender, **kwargs): handle_pre_save(sender, **kwargs) +def add_workflow(myTaskObject): + desired_workflow_id = myTaskObject.desired_workflow_id + desired_workflow_uri = myTaskObject.desired_workflow_uri + + # first try to find the workflow by desired workflow_id + try: + desired_workflow = Workflow.objects.get(id=desired_workflow_id) + except: + desired_workflow = None + + if (desired_workflow == None): + # then try workflow_uri + desired_workflow = Workflow.objects.get(workflow_uri=desired_workflow_uri) + + # first check if works needs to be done at all + if (myTaskObject.workflow != desired_workflow): + # set the new status + myTaskObject.workflow = desired_workflow + + return myTaskObject + + +def add_predecessor(myTaskObject): + # connect the task to a workflow after posting a (flat) task through the REST API + + try: + desired_predecessor_id = myTaskObject.desired_predecessor_id + + # first try to find the workflow by desired workflow_id + desired_predecessor = Task.objects.get(id=desired_predecessor_id) + + if (myTaskObject.predecessor != desired_predecessor): + # set the new status + myTaskObject.predecessor = desired_predecessor + except: + pass + + return myTaskObject + + def handle_pre_save(sender, **kwargs): """ pre_save handler. Mainly to check status changes and dispatch jobs in needed. @@ -61,19 +101,9 @@ def handle_pre_save(sender, **kwargs): myStatus.save() # connect the task to a workflow after posting a (flat) task through the REST API - desired_workflow_id = myTaskObject.desired_workflow_id - desired_workflow_uri = myTaskObject.desired_workflow_uri + myTaskObject = add_workflow(myTaskObject) + myTaskObject = add_predecessor(myTaskObject) - # first try to find the workflow by desired workflow_id - desired_workflow = Workflow.objects.get(id=desired_workflow_id) - if (desired_workflow == None): - # then try workflow_uri - desired_workflow = Workflow.objects.get(uri=desired_workflow_uri) - - # first check if works needs to be done at all - if (myTaskObject.workflow != desired_workflow): - # set the new status - myTaskObject.workflow = desired_workflow # temporarily disconnect the post_save handler to save the dataproduct (again) and avoiding recursion. # I don't use pre_save, because then the 'created' key is not available, which is the most handy way to