-
Nico Vermaas authoredNico Vermaas authored
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
signals.py 4.83 KiB
import logging;
import datetime
from django.db.models.signals import pre_save, post_save
from django.core.signals import request_started, request_finished
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.contrib.contenttypes.models import ContentType
from taskdatabase.models import Task, Workflow, LogEntry, Status
from . import jobs
"""
Signals sent from different parts of the backend are centrally defined and handled here.
"""
logger = logging.getLogger(__name__)
#--- HTTP REQUEST signals-------------
@receiver(request_started)
def request_started_handler(sender, **kwargs):
logger.debug("signal : request_started")
@receiver(request_finished)
def request_finished_handler(sender, **kwargs):
logger.debug("signal : request_finished")
#--- Task signals-------------
@receiver(pre_save, sender=Task)
def pre_save_task_handler(sender, **kwargs):
logger.info("SIGNAL : pre_save Task(" + str(kwargs.get('instance')) + ")")
handle_pre_save(sender, **kwargs)
def handle_pre_save(sender, **kwargs):
"""
pre_save handler. Mainly to check status changes and dispatch jobs in needed.
:param (in) sender: The model class that sends the trigger
:param (in) kwargs: The instance of the object that sends the trigger.
"""
logger.info("handle_pre_save(" + str(kwargs.get('instance')) + ")")
myTaskObject = kwargs.get('instance')
# IF this object does not exist yet, then abort, and let it first be handled by handle_post_save (get get a id).
if myTaskObject.id==None:
return None
# handle status change
status = str(myTaskObject.status)
desired_status = str(myTaskObject.desired_status)
if (desired_status!=None) and (status!=desired_status):
# set the new status
myTaskObject.status = desired_status
# add the new to the status history by brewing a status object out of it
myStatus = Status(name=desired_status, task=myTaskObject)
myStatus.save()
# connect the task to a workflow after posting a (flat) task through the REST API
desired_workflow_id = myTaskObject.desired_workflow_id
desired_workflow_uri = myTaskObject.desired_workflow_uri
# first try to find the workflow by desired workflow_id
desired_workflow = Workflow.objects.get(id=desired_workflow_id)
if (desired_workflow == None):
# then try workflow_uri
desired_workflow = Workflow.objects.get(uri=desired_workflow_uri)
# first check if works needs to be done at all
if (myTaskObject.workflow != desired_workflow):
# set the new status
myTaskObject.workflow = desired_workflow
# temporarily disconnect the post_save handler to save the dataproduct (again) and avoiding recursion.
# I don't use pre_save, because then the 'created' key is not available, which is the most handy way to
# determine if this dataproduct already exists. (I could also check the database, but this is easier).
disconnect_signals()
myTaskObject.save()
connect_signals()
# dispatch a job if the status has changed.
if (desired_status != None) and (status != desired_status):
jobs.dispatchJob(myTaskObject, desired_status)
@receiver(post_save, sender=Task)
def post_save_task_handler(sender, **kwargs):
#logger.info("SIGNAL : post_save Task(" + str(kwargs.get('instance')) + ")")
handle_post_save(sender, **kwargs)
def handle_post_save(sender, **kwargs):
"""
post_save handler for Task. To create and write its initial status
:param (in) sender: The model class that sends the trigger
:param (in) kwargs: The instance of the object that sends the trigger.
"""
logger.info("handle_post_save("+str(kwargs.get('instance'))+")")
myTaskObject = kwargs.get('instance')
# Create new task
if kwargs['created']:
logger.info("save new "+str(myTaskObject.task_type))
# set status
myTaskObject.status = myTaskObject.desired_status
# add the new to the status history by brewing a status object out of it
myStatus = Status(name=myTaskObject.desired_status, task=myTaskObject)
myStatus.save()
# temporarily disconnect the post_save handler to save the dataproduct (again) and avoiding recursion.
# I don't use pre_save, because then the 'created' key is not available, which is the most handy way to
# determine if this dataproduct already exists. (I could also check the database, but this is easier).
disconnect_signals()
myTaskObject.save()
connect_signals()
def connect_signals():
#logger.info("connect_signals")
pre_save.connect(pre_save_task_handler, sender=Task)
post_save.connect(post_save_task_handler, sender=Task)
def disconnect_signals():
#logger.info("disconnect_signals")
pre_save.disconnect(pre_save_task_handler, sender=Task)
post_save.disconnect(post_save_task_handler, sender=Task)