Skip to content
Snippets Groups Projects
Commit 96222768 authored by Nico Vermaas's avatar Nico Vermaas
Browse files

refactoring signals

parent e4813003
No related branches found
No related tags found
4 merge requests!39Dev nico,!35Master,!34Master,!33Dev nico
atdb/docs/ATDB Workflow Diagram - Imaging.png

432 KiB

atdb/docs/ATDB-LDV Workflow Diagram.png

133 KiB | W: | H:

atdb/docs/ATDB-LDV Workflow Diagram.png

140 KiB | W: | H:

atdb/docs/ATDB-LDV Workflow Diagram.png
atdb/docs/ATDB-LDV Workflow Diagram.png
atdb/docs/ATDB-LDV Workflow Diagram.png
atdb/docs/ATDB-LDV Workflow Diagram.png
  • 2-up
  • Swipe
  • Onion skin
"""
Jobs contains the business logic for the different system jobs that have to be executed based on status changes
for Observations or DataProducts in ATDB.
"""
import logging;
logger = logging.getLogger(__name__)
def dispatchJob(myTaskObject, new_status):
"""
Adds a job to the jobs table (or executes it directly)
:param (in) myObject: Observation or Dataproduct that triggers the action
:param (in) status: The status that triggers the action
"""
# logger.info("*** dispatchJob(" + str(myTaskObject) + "," + str(new_status) + ") ***")
......@@ -6,7 +6,6 @@ from django.contrib.auth.models import User
from django.dispatch import receiver
from django.contrib.contenttypes.models import ContentType
from taskdatabase.models import Task, Workflow, LogEntry, Status
from . import jobs
"""
Signals sent from different parts of the backend are centrally defined and handled here.
......@@ -15,17 +14,6 @@ Signals sent from different parts of the backend are centrally defined and handl
logger = logging.getLogger(__name__)
#--- HTTP REQUEST signals-------------
@receiver(request_started)
def request_started_handler(sender, **kwargs):
logger.debug("signal : request_started")
@receiver(request_finished)
def request_finished_handler(sender, **kwargs):
logger.debug("signal : request_finished")
#--- Task signals-------------
@receiver(pre_save, sender=Task)
......@@ -33,50 +21,6 @@ def pre_save_task_handler(sender, **kwargs):
logger.info("SIGNAL : pre_save Task(" + str(kwargs.get('instance')) + ")")
handle_pre_save(sender, **kwargs)
def add_workflow(myTaskObject):
new_workflow_id = myTaskObject.new_workflow_id
new_workflow_uri = myTaskObject.new_workflow_uri
# first try to find the workflow by desired workflow_id
try:
new_workflow = Workflow.objects.get(id=new_workflow_id)
except:
new_workflow = None
if (new_workflow == None):
# then try workflow_uri
try:
new_workflow = Workflow.objects.get(workflow_uri=new_workflow_uri)
except:
pass
# first check if works needs to be done at all
if (myTaskObject.workflow != new_workflow):
# set the new workflow
myTaskObject.workflow = new_workflow
return myTaskObject
def add_predecessor_obsolete(myTaskObject):
# connect the task to a workflow after posting a (flat) task through the REST API
try:
new_predecessor_id = myTaskObject.new_predecessor_id
# first try to find the workflow by desired workflow_id
new_predecessor = Task.objects.get(id=new_predecessor_id)
if (myTaskObject.predecessor != new_predecessor):
# set the new status
myTaskObject.predecessor = new_predecessor
except:
pass
return myTaskObject
def handle_pre_save(sender, **kwargs):
"""
pre_save handler. Mainly to check status changes and dispatch jobs in needed.
......@@ -99,7 +43,7 @@ def handle_pre_save(sender, **kwargs):
# set the new status
myTaskObject.status = new_status
# add the new to the status history by brewing a status object out of it
# add the new status to the status history
myStatus = Status(name=new_status, task=myTaskObject)
myStatus.save()
......@@ -111,54 +55,11 @@ def handle_pre_save(sender, **kwargs):
myTaskObject.save()
connect_signals()
# dispatch a job if the status has changed.
if (new_status != None) and (status != new_status):
jobs.dispatchJob(myTaskObject, new_status)
@receiver(post_save, sender=Task)
def post_save_task_handler(sender, **kwargs):
#logger.info("SIGNAL : post_save Task(" + str(kwargs.get('instance')) + ")")
handle_post_save(sender, **kwargs)
def handle_post_save(sender, **kwargs):
"""
post_save handler for Task. To create and write its initial status
:param (in) sender: The model class that sends the trigger
:param (in) kwargs: The instance of the object that sends the trigger.
"""
logger.info("handle_post_save("+str(kwargs.get('instance'))+")")
myTaskObject = kwargs.get('instance')
# Create new task
if kwargs['created']:
logger.info("save new "+str(myTaskObject.task_type))
# set status
myTaskObject.status = myTaskObject.new_status
# add the new to the status history by brewing a status object out of it
myStatus = Status(name=myTaskObject.new_status, task=myTaskObject)
myStatus.save()
# connect the task to a workflow after posting a (flat) task through the REST API
#myTaskObject = add_workflow(myTaskObject)
#myTaskObject = add_predecessor(myTaskObject)
# temporarily disconnect the post_save handler to save the dataproduct (again) and avoiding recursion.
# I don't use pre_save, because then the 'created' key is not available, which is the most handy way to
# determine if this dataproduct already exists. (I could also check the database, but this is easier).
disconnect_signals()
myTaskObject.save()
connect_signals()
def connect_signals():
#logger.info("connect_signals")
pre_save.connect(pre_save_task_handler, sender=Task)
post_save.connect(post_save_task_handler, sender=Task)
def disconnect_signals():
#logger.info("disconnect_signals")
pre_save.disconnect(pre_save_task_handler, sender=Task)
post_save.disconnect(post_save_task_handler, sender=Task)
......@@ -45,7 +45,7 @@
</div>
{% include 'taskdatabase/pagination.html' %}
</div>
<p class="footer"> Version 1.0.0 (26 feb 2021 - 7:00)
<p class="footer"> Version 1.0.0 (1 mar 2021 - 11:00)
<script type="text/javascript">
(function(seconds) {
var refresh,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment