Skip to content
Snippets Groups Projects
scheduling.py 15.8 KiB
Newer Older
"""
This file contains the database models
"""

from django.db.models import ForeignKey, CharField, DateTimeField, BooleanField, IntegerField, BigIntegerField, \
    ManyToManyField, CASCADE, SET_NULL, PROTECT
from django.contrib.postgres.fields import ArrayField, JSONField
from django.contrib.auth.models import User
from .specification import AbstractChoice, BasicCommon, Template, NamedCommon # , <TaskBlueprint
from rest_framework.serializers import HyperlinkedRelatedField
from django.dispatch import receiver

from lofar.sas.tmss.tmss.exceptions import *

import json
import jsonschema

    """
    Represents the relation between input and output of the Subtasks. Some of these relations implement the Task
    Relations. An input is tied to an output of another Subtask, and allows a filter to be specified.
    """
    role = ForeignKey('Role', null=False, on_delete=PROTECT)
    datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT)
    dataformats = ManyToManyField('Dataformat', blank=True)
    output_of = ForeignKey('SubtaskTemplate', related_name='inputs', blank=True, on_delete=PROTECT)
    input_of = ForeignKey('SubtaskTemplate', related_name='outputs', blank=True, on_delete=PROTECT)
class SubtaskState(AbstractChoice):
    """Defines the model and predefined list of possible SubtaskStatusChoice's for Subtask.
    The items in the Choices class below are automagically populated into the database via a data migration."""
        SCHEDULING = "scheduling"
        SCHEDULED = "scheduled"
        QUEUEING = "queueing"
        QUEUED = "queued"
        STARTING = "starting"
        STARTED = "started"
        FINISHING = "finishing"
        FINISHED = "finished"
        CANCELLING = "cancelling"
        CANCELLED = "cancelled"
        ERROR = "error"


class SubtaskType(AbstractChoice):
    """Defines the model and predefined list of possible SubtaskType's for Subtask.
    The items in the Choices class below are automagically populated into the database via a data migration."""
    class Choices(Enum):
        OBSERVATION = "observation"
        PIPELINE = "pipeline"
        COPY = "copy"
        INSPECTION = "inspection"
        DELETION = "deletion"
class StationType(AbstractChoice):
        """Defines the model and predefined list of possible StationType's for AntennaSet.
        The items in the Choices class below are automagically populated into the database via a data migration."""

        class Choices(Enum):
            CORE = "core"
            REMOTE = "remote"
            INTERNATIONAL = "international"


class Algorithm(AbstractChoice):
    """Defines the model and predefined list of possible Algorithm's for DataproductHash.
    The items in the Choices class below are automagically populated into the database via a data migration."""

    class Choices(Enum):
        MD5 = 'md5'
        AES256 = 'aes256'


class ScheduleMethod(AbstractChoice):
    """Defines the model and predefined list of possible Algorithm's for DataproductHash.
    The items in the Choices class below are automagically populated into the database via a data migration."""

    class Choices(Enum):
        MANUAL = 'manual'
        BATCH = 'batch'
        DYNAMIC = 'dynamic'

    type = ForeignKey('SubtaskType', null=False, on_delete=PROTECT)
    queue = BooleanField(default=False)
    realtime = BooleanField(default=False)

class DefaultSubtaskTemplate(BasicCommon):
    name = CharField(max_length=128, unique=True)
    template = ForeignKey('SubtaskTemplate', on_delete=PROTECT)
class DataproductSpecificationsTemplate(Template):
class DefaultDataproductSpecificationsTemplate(BasicCommon):
    name = CharField(max_length=128, unique=True)
    template = ForeignKey('DataproductSpecificationsTemplate', on_delete=PROTECT)
class SubtaskInputSelectionTemplate(Template):
    pass

# todo: so we need to specify a default?

# todo: do we need to specify a default?

#
# Mix-Ins
#
class JSONValidatorMixin():
    def validate_specification_against_schema(self):
        if self.specifications_doc is None or self.specifications_template_id is None:
            return

        try:
            # ensure the specification and schema are both valid json in the first place
            spec = json.loads(self.specifications_doc) if type(self.specifications_doc) == str else self.specifications_doc
            schema = json.loads(self.specifications_template.schema) if type(self.specifications_template.schema) == str else self.specifications_template.schema
        except json.decoder.JSONDecodeError as e:
            raise SpecificationException("Invalid JSON: %s" % str(e))

        try:
            jsonschema.validate(spec, schema)
        except jsonschema.ValidationError as e:
            raise SpecificationException(str(e))

    def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
        '''override of normal save method, doing a validation of the specification against the schema first
        :raises SpecificationException in case the specification does not validate against the schema'''
        self.validate_specification_against_schema()
        super().save(force_insert, force_update, using, update_fields)

class Subtask(JSONValidatorMixin, BasicCommon):
    """
    Represents a low-level task, which is an atomic unit of execution, such as running an observation, running
    inspection plots on the observed data, etc. Each task has a specific configuration, will have resources allocated
    to it, and represents a single run.
    """
    start_time = DateTimeField(null=True, help_text='Start this subtask at the specified time (NULLable).')
    stop_time = DateTimeField(null=True, help_text='Stop this subtask at the specified time (NULLable).')
    state = ForeignKey('SubtaskState', null=False, on_delete=PROTECT, related_name='task_states', help_text='Subtask state (see Subtask State Machine).')
    specifications_doc = JSONField(help_text='Final specifications, as input for the controller.')
    task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.')
    specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.')
    do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
    priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')
    schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).')
    cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
    scheduler_input_doc = JSONField(help_text='Partial specifications, as input for the scheduler.')
    # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work?
    created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.')

    def __init__(self, *args, **kwargs):
        super(Subtask, self).__init__(*args, **kwargs)
        self.__original_state = self.state
    def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
        creating = self._state.adding  # True on create, False on update
        super().save(force_insert, force_update, using, update_fields)
        # log if either state update or new entry:
        if self.state != self.__original_state or creating is True:
            if self.created_or_updated_by_user is None:
                identifier = None
            else:
                identifier = self.created_or_updated_by_user.email
            log_entry = SubtaskStateLog(subtask=self, old_state=self.__original_state, new_state=self.state,
                                           user=self.created_or_updated_by_user, user_identifier=identifier)
            log_entry.save()
class SubtaskStateLog(BasicCommon):
    """
    History of state changes on subtasks
    This is now a very specific solution and based on what SOS communicated is what they are regularly interested in.
    Maybe one or two additional log tables for other models are benefitial and should be added at some point.

    Note that we could of course also log on the db level and there is also a variety of audit middlewares for Django
    available to keep track of changes more generally: https://djangopackages.org/grids/g/model-audit/
    This seems a bit overkill at the moment and we have to manage access to those logs etc., this needs tbd.
    user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who changed the state of the subtask.')
    user_identifier = CharField(null=True, editable=False, max_length=128, help_text='The ID of the user who changed the state of the subtask.')
    subtask = ForeignKey('Subtask', null=False,  editable=False, on_delete=CASCADE, help_text='Subtask to which this state change refers.')
    old_state = ForeignKey('SubtaskState', null=True,  editable=False, on_delete=PROTECT, related_name='is_old_state_of', help_text='Subtask state before update (see Subtask State Machine).')
    new_state = ForeignKey('SubtaskState', null=False, editable=False, on_delete=PROTECT, related_name='is_new_state_of', help_text='Subtask state after update (see Subtask State Machine).')


    subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this input specification refers.')
    task_relation_blueprint = ForeignKey('TaskRelationBlueprint', null=True, on_delete=SET_NULL, help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).')
    connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Task Input implements.')
    producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='The Subtask Output providing the input dataproducts.')
    dataproducts = ManyToManyField('Dataproduct', help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..')
    selection_doc = JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')
    selection_template = ForeignKey('SubtaskInputSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.')
    subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this output specification refers.')
    connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Subtask Output implements.')
    """
    A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those
    resulting from interpreting the Subtask Connector filters of the inputs. These links are explicitly saved, should
    the interpretation of the filter change over time. The produced dataproducts enumerate everything produced by a
    Subtask.
    """
    filename = CharField(max_length=128, help_text='Name of the file (or top-level directory) of the dataproduct. Adheres to a naming convention, but is not meant for parsing.')
    directory = CharField(max_length=1024, help_text='Directory where this dataproduct is (to be) stored.')
    dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT)
    deleted_since = DateTimeField(null=True, help_text='When this dataproduct was removed from disk, or NULL if not deleted (NULLable).')
    pinned_since = DateTimeField(null=True, help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).')
    specifications_doc = JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.')
    specifications_template = ForeignKey('DataproductSpecificationsTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.')
    producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='Subtask Output which generates this dataproduct.')
    do_cancel = DateTimeField(null=True, help_text='When this dataproduct was cancelled (NULLable).  Cancelling a dataproduct triggers cleanup if necessary.')
    expected_size = BigIntegerField(null=True, help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).')
    size = BigIntegerField(null=True, help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).')
    feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.')
    feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.')
    station_type = ForeignKey('StationType', null=False, on_delete=PROTECT)
    rcus = ArrayField(IntegerField(), size=128, blank=False)
    inputs = ArrayField(CharField(max_length=128), size=128, blank=True)
    """
    Each output dataproduct of a Subtask is linked to the input dataproducts that are used to produce it.
    These transforms encode the provenance information needed when tracking dependencies between dataproducts.
    """
    input = ForeignKey('Dataproduct', related_name='inputs', on_delete=PROTECT, help_text='A dataproduct that was the input of a transformation.')
    output = ForeignKey('Dataproduct',  related_name='outputs', on_delete=PROTECT, help_text='A dataproduct that was produced from the input dataproduct.')
    identity = BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')
    capacity = BigIntegerField(help_text='Capacity in bytes')
    cluster = ForeignKey('Cluster', on_delete=PROTECT, help_text='Cluster hosting this filesystem.')
    location = CharField(max_length=128, help_text='Human-readable location of the cluster.')
    dataproduct = ForeignKey('Dataproduct', on_delete=PROTECT, help_text='A dataproduct residing in the archive.')
    storage_ticket = CharField(max_length=128, help_text='Archive-system identifier.')
    public_since = DateTimeField(null=True, help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).')
    corrupted_since = DateTimeField(null=True, help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).')
    dataproduct = ForeignKey('Dataproduct', on_delete=PROTECT, help_text='The dataproduct to which this hash refers.')
    algorithm = ForeignKey('Algorithm', null=False, on_delete=PROTECT, help_text='Algorithm used (MD5, AES256).')
    hash = CharField(max_length=128, help_text='Hash value.')