diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py index 98fc0174fa93a45b9bc645e9085fb4d680c3ea37..4dcf57e8dae0e610460d2c71b365d00470132266 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py @@ -138,6 +138,12 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess TMSSEventMessageHandler.start_handling(self) self.tmss_client.open() + # enqueue all runnable ingest subtasks upon startup. (should be none in normal circumstances) + # this may result in dataproducts being submitted to ingestjobmanager multiple times. + # That's ok. Ingest deals with that. + self.queue_all_runnable_ingest_subtasks() + + def stop_handling(self): TMSSEventMessageHandler.stop_handling(self) UsingToBusMixin.stop_handling(self) @@ -155,45 +161,69 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess super().onSubTaskStatusChanged(id, status) if status == 'scheduled': - subtask = self.tmss_client.get_subtask(id) - if subtask['subtask_type'] == 'ingest': - input_dataproducts = self.tmss_client.get_url_as_json_object(subtask['url'] + '/input_dataproducts') - logger.info("TMSS Ingest subtask id=%s was scheduled. Creating and enqueing ingest jobs for all %s dataproducts...", id, len(input_dataproducts)) + self.queue_ingest_subtask(id) + + def queue_ingest_subtask(self, subtask_id: int): + subtask = self.tmss_client.get_subtask(subtask_id) + if subtask['subtask_type'] == 'ingest': + input_dataproducts = self.tmss_client.get_url_as_json_object(subtask['url'] + '/input_dataproducts') + logger.info("TMSS Ingest subtask id=%s was scheduled. Creating and enqueing ingest jobs for all %s dataproducts...", id, len(input_dataproducts)) + if subtask['state_value'] != 'queueing': self.tmss_client.set_subtask_status(subtask['id'], 'queueing') - # gather all relevant and needed info... - task_blueprint = self.tmss_client.get_url_as_json_object(subtask['task_blueprints'][0]) - task_draft = self.tmss_client.get_url_as_json_object(task_blueprint['draft']) - scheduling_unit_draft = self.tmss_client.get_url_as_json_object(task_draft['scheduling_unit_draft']) - scheduling_set = self.tmss_client.get_url_as_json_object(scheduling_unit_draft['scheduling_set']) - project = self.tmss_client.get_url_as_json_object(scheduling_set['project']) - - # create an ingest xml job for each input dataproduct - # store the jobs in a list, and submit them in one go to the queue - jobs = [] - for input_dp in input_dataproducts: - dp_global_identifier = self.tmss_client.get_url_as_json_object(input_dp['global_identifier']) + # gather all relevant and needed info... + task_blueprint = self.tmss_client.get_url_as_json_object(subtask['task_blueprint']) + task_draft = self.tmss_client.get_url_as_json_object(task_blueprint['draft']) + scheduling_unit_draft = self.tmss_client.get_url_as_json_object(task_draft['scheduling_unit_draft']) + scheduling_set = self.tmss_client.get_url_as_json_object(scheduling_unit_draft['scheduling_set']) + project = self.tmss_client.get_url_as_json_object(scheduling_set['project']) + + # create an ingest xml job for each input dataproduct + # store the jobs in a list, and submit them in one go to the queue + jobs = [] + + # use cache for properties that are the same for many dataproducts (saves many tmss rest calls) + producer_cache = {} + producing_subtask_cache = {} + + for input_dp in input_dataproducts: + try: + producer = producer_cache[input_dp['producer_id']] + except KeyError: producer = self.tmss_client.get_url_as_json_object(input_dp['producer']) + producer_cache[input_dp['producer_id']] = producer + + try: + producing_subtask = producing_subtask_cache[producer['subtask_id']] + except KeyError: producing_subtask = self.tmss_client.get_url_as_json_object(producer['subtask']) + producing_subtask_cache[producer['subtask_id']] = producing_subtask + + job = createJobXml(project_name=project['name'], + obs_id=producing_subtask['id'], # the name 'obs_id' is somewhat misleading, but that's a legacy name 'forced' by MoM/OTDB. TODO: refactor when removing MoM/OTDB. + dataproduct_name=input_dp['filename'], + archive_id=input_dp['global_identifier_id'], + location=subtask['cluster_name'] + ':' + input_dp['filepath'], + tmss_ingest_subtask_id=subtask['id'], + tmss_input_dataproduct_id=input_dp['id']) + jobs.append(job) + + # submit all jobs to the in one go to ingest-incoming-job-queue + for job in jobs: + msg = CommandMessage(content=job, subject=DEFAULT_INGEST_INCOMING_JOB_SUBJECT) + logger.info('submitting job %s to exchange %s with subject %s at broker %s', + parseJobXml(job)['JobId'], self._tobus.exchange, msg.subject, self._tobus.broker) + self.send(msg) + + self.tmss_client.set_subtask_status(subtask['id'], 'queued') + logger.info("Created and enqueued ingest jobs for all dataproducts in TMSS Ingest subtask id=%s", id) + + def queue_all_runnable_ingest_subtasks(self): + runnable_states = ['queueing', 'scheduled'] + for state in runnable_states: + for ingest_subtask in self.tmss_client.get_subtasks(state=state, subtask_type='ingest'): + self.queue_ingest_subtask(ingest_subtask['id']) - job = createJobXml(project_name=project['name'], - obs_id=producing_subtask['id'], # the name 'obs_id' is somewhat misleading, but that's a legacy name 'forced' by MoM/OTDB. TODO: refactor when removing MoM/OTDB. - dataproduct_name=input_dp['filename'], - archive_id=dp_global_identifier['unique_identifier'], - location=subtask['cluster_name']+':'+os.path.join(input_dp['directory'], input_dp['filename']), - tmss_ingest_subtask_id=subtask['id'], - tmss_input_dataproduct_id=input_dp['id']) - jobs.append(job) - - # submit all jobs to the in one go to ingest-incoming-job-queue - for job in jobs: - msg = CommandMessage(content=job, subject=DEFAULT_INGEST_INCOMING_JOB_SUBJECT) - logger.info('submitting job %s to exchange %s with subject %s at broker %s', - parseJobXml(job)['JobId'], self._tobus.exchange, msg.subject, self._tobus.broker) - self.send(msg) - - self.tmss_client.set_subtask_status(subtask['id'], 'queued') - logger.info("Created and enqueued ingest jobs for all dataproducts in TMSS Ingest subtask id=%s", id) class IngestTMSSAdapter: diff --git a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py index f107a47b8ebb97185997b4def131ce7a53f9e4e1..6921f03f0dda72667dff9138b79305be8655c530 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py @@ -191,11 +191,11 @@ def do_dynamic_schedule() -> models.SchedulingUnitBlueprint: # determine next possible start time for remaining scheduling_units if scheduled_unit: - lower_bound_start_time = scheduled_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP + lower_bound_start_time = scheduled_unit.scheduled_on_sky_stop_time + DEFAULT_INTER_OBSERVATION_GAP else: try: scheduled_units = get_scheduled_scheduling_units(datetime.utcnow(), datetime.utcnow()) - lower_bound_start_time = max([s.stop_time for s in scheduled_units if s.stop_time is not None]) + DEFAULT_INTER_OBSERVATION_GAP + lower_bound_start_time = max([s.scheduled_on_sky_stop_time for s in scheduled_units if s.scheduled_on_sky_stop_time is not None]) + DEFAULT_INTER_OBSERVATION_GAP except: lower_bound_start_time = datetime.utcnow() diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0010_subtaskinputoutput.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0010_subtaskinputoutput.py new file mode 100644 index 0000000000000000000000000000000000000000..bff0eb7b9cba54bbdf20d99585286e7432faccfc --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0010_subtaskinputoutput.py @@ -0,0 +1,45 @@ +# Generated by Django 3.0.9 on 2021-10-28 07:20 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('tmssapp', '0009_populate_subtask_allowed_state_transitions_extra'), + ] + + operations = [ + migrations.AddField( + model_name='subtaskoutput', + name='output_role', + field=models.ForeignKey(help_text='Output connector type (what kind of data is taken from the producer).', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subtask_outputs', to='tmssapp.TaskConnectorType'), + ), + migrations.AlterField( + model_name='dataproducttransform', + name='input', + field=models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='consumer_transforms', to='tmssapp.Dataproduct'), + ), + migrations.AlterField( + model_name='dataproducttransform', + name='output', + field=models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='producer_transforms', to='tmssapp.Dataproduct'), + ), + migrations.AlterField( + model_name='subtaskinput', + name='dataproducts', + field=models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', related_name='consumers', to='tmssapp.Dataproduct'), + ), + migrations.RemoveField( + model_name='subtaskinput', + name='task_relation_blueprint', + ), + migrations.AddField( + model_name='subtaskinput', + name='input_role', + field=models.ForeignKey(help_text='Input connector type (what kind of data can be consumed).', null=True, + on_delete=django.db.models.deletion.CASCADE, related_name='subtask_inputs', + to='tmssapp.TaskConnectorType'), + ), + ] diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py index ba5e610b7522f3bf6499ddd44da78a0a6f8f7998..71e988bfb99cfaccc7c11ea19b42533424ec2263 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py @@ -12,7 +12,7 @@ from django.db.models import Model, ForeignKey, OneToOneField, CharField, DateTi ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet, BigAutoField, UniqueConstraint from django.contrib.postgres.fields import ArrayField, JSONField from .permissions import TMSSUser as User -from .common import AbstractChoice, BasicCommon, Template, NamedCommon, TemplateSchemaMixin, ProjectPropertyMixin +from .common import AbstractChoice, BasicCommon, Template, NamedCommon, TemplateSchemaMixin, ProjectPropertyMixin, RefreshFromDbInvalidatesCachedPropertiesMixin from enum import Enum from django.db.models.expressions import RawSQL from django.core.exceptions import ValidationError @@ -22,6 +22,7 @@ from django.conf import settings from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC import uuid from django.db.models.functions import Coalesce +from django.utils.functional import cached_property # # I/O @@ -195,7 +196,11 @@ class Subtask(BasicCommon, ProjectPropertyMixin, TemplateSchemaMixin): if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value: # pipelines usually do not have a specified duration, so make a guess (half the obs duration?). # In case we have several associated tasks: this guess is probably in no way accurate anyway, so we assume it does not really matter which task blueprint we refer to here - return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2)) + try: + return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2)) + except ValueError: + # no predecessors (should not happen). Return a default guess. + return timedelta(minutes=1) # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now) return timedelta(minutes=5) @@ -234,7 +239,7 @@ class Subtask(BasicCommon, ProjectPropertyMixin, TemplateSchemaMixin): '''return the input dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) If you want the result, add .all() like so: my_subtask.input_dataproducts.all() ''' - return Dataproduct.objects.filter(subtaskinput__subtask_id=self.id) + return Dataproduct.objects.filter(consumers__subtask_id=self.id) @property def output_dataproducts(self) -> QuerySet: @@ -252,11 +257,11 @@ class Subtask(BasicCommon, ProjectPropertyMixin, TemplateSchemaMixin): def get_transformed_input_dataproduct(self, output_dataproduct_id: int) -> 'Dataproduct': '''return the transformed input dataproduct for the given output_dataproduct_id.''' - return self.input_dataproducts.get(consumers__output_id=output_dataproduct_id) + return self.input_dataproducts.get(consumer_transforms__output_id=output_dataproduct_id) def get_transformed_output_dataproduct(self, input_dataproduct_id: int) -> 'Dataproduct': '''return the transformed output dataproduct for the given input_dataproduct_id.''' - return self.output_dataproducts.get(producers__input_id=input_dataproduct_id) + return self.output_dataproducts.get(producer_transforms__input_id=input_dataproduct_id) @property def is_feedback_complete(self) -> bool: @@ -418,9 +423,9 @@ class SubtaskStateLog(BasicCommon): class SubtaskInput(BasicCommon, TemplateSchemaMixin): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='inputs', help_text='Subtask to which this input specification refers.') - task_relation_blueprint = ForeignKey('TaskRelationBlueprint', null=True, on_delete=SET_NULL, help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).') + input_role = ForeignKey('TaskConnectorType', null=True, related_name='subtask_inputs', on_delete=CASCADE, help_text='Input connector type (what kind of data can be consumed).') producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name='consumers', help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.') - dataproducts = ManyToManyField('Dataproduct', help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..') + dataproducts = ManyToManyField('Dataproduct', related_name='consumers', help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..') selection_doc = JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.') selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.') @@ -431,6 +436,7 @@ class SubtaskInput(BasicCommon, TemplateSchemaMixin): class SubtaskOutput(BasicCommon): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.') + output_role = ForeignKey('TaskConnectorType', null=True, related_name='subtask_outputs', on_delete=CASCADE, help_text='Output connector type (what kind of data is taken from the producer).') class SAP(BasicCommon, TemplateSchemaMixin): @@ -445,7 +451,7 @@ class SAP(BasicCommon, TemplateSchemaMixin): super().save(force_insert, force_update, using, update_fields) -class Dataproduct(BasicCommon, TemplateSchemaMixin): +class Dataproduct(TemplateSchemaMixin, RefreshFromDbInvalidatesCachedPropertiesMixin, BasicCommon): """ A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those resulting from interpreting the Subtask Connector filters of the inputs. These links are explicitly saved, should @@ -476,7 +482,7 @@ class Dataproduct(BasicCommon, TemplateSchemaMixin): SIPidentifier.assign_new_id_to_model(self) super().save(force_insert, force_update, using, update_fields) - @property + @cached_property def filepath(self): '''return the full path of the dataproduct''' return os.path.join(self.directory, self.filename) @@ -492,8 +498,8 @@ class DataproductTransform(BasicCommon): Each output dataproduct of a Subtask is linked to the input dataproducts that are used to produce it. These transforms encode the provenance information needed when tracking dependencies between dataproducts. """ - input = ForeignKey('Dataproduct', related_name='consumers', on_delete=PROTECT, help_text='A dataproduct that was the input of a transformation.') - output = ForeignKey('Dataproduct', related_name='producers', on_delete=PROTECT, help_text='A dataproduct that was produced from the input dataproduct.') + input = ForeignKey('Dataproduct', related_name='consumer_transforms', on_delete=PROTECT, help_text='A dataproduct that was the input of a transformation.') + output = ForeignKey('Dataproduct', related_name='producer_transforms', on_delete=PROTECT, help_text='A dataproduct that was produced from the input dataproduct.') identity = BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.') diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py index 621c1f82ff7faf396782bd636b1d2f578e181701..bda5dca95884460dd655f123642fab5070b5f177 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py @@ -291,11 +291,23 @@ class SchedulingUnitObservingStrategyTemplate(BaseStrategyTemplate): cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE) # add the default constraints given the scheduling_constraints_template - if 'scheduling_constraints_template' in template_doc: - constraints_template = SchedulingConstraintsTemplate.objects.get(name=template_doc.get('scheduling_constraints_template')) - constraints_doc = add_defaults_to_json_object_for_schema(template_doc.get('scheduling_constraints_doc', {}), constraints_template.schema, - cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE) - template_doc['scheduling_constraints_doc'] = constraints_doc + if 'scheduling_constraints_template' not in template_doc: + try: + # try to use a known default SchedulingConstraintsTemplate... + constraints_template = SchedulingConstraintsTemplate.objects.get(name='constraints', version=1) + except SchedulingConstraintsTemplate.DoesNotExist: + # or use the latest if the default does not exist + constraints_template = SchedulingConstraintsTemplate.objects.order_by('-updated_at').first() + logger.info("SchedulingUnitObservingStrategyTemplate.template_doc_complete_with_defaults id=%s using SchedulingConstraintsTemplate name='%s' version=%s as default because none was specified.", self.id, constraints_template.name, constraints_template.version) + + # inject name/version into template_doc + template_doc['scheduling_constraints_template'] = {"name": constraints_template.name, "version": constraints_template.version} + + constraints_template = SchedulingConstraintsTemplate.objects.get(name=template_doc['scheduling_constraints_template']['name'], + version=template_doc['scheduling_constraints_template'].get('version',1)) + constraints_doc = add_defaults_to_json_object_for_schema(template_doc.get('scheduling_constraints_doc', {}), constraints_template.schema, + cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE) + template_doc['scheduling_constraints_doc'] = constraints_doc return template_doc @@ -620,8 +632,9 @@ class SchedulingUnitDraft(NamedCommon, TemplateSchemaMixin, ProjectPropertyMixin # copy the scheduling_constraints if self.scheduling_constraints_template: - specifications_doc['scheduling_constraints_template'] = self.scheduling_constraints_template.name - specifications_doc['scheduling_constraints_doc'] = self.scheduling_constraints_doc or {} + specifications_doc['scheduling_constraints_template'] = {'name': self.scheduling_constraints_template.name, + 'version': self.scheduling_constraints_template.version} + specifications_doc['scheduling_constraints_doc'] = self.scheduling_constraints_doc or self.scheduling_constraints_template.get_default_json_document_for_schema() # if this scheduling unit was created from an observation_strategy_template, then copy the parameters list if self.observation_strategy_template and self.observation_strategy_template.template: @@ -956,8 +969,9 @@ class SchedulingUnitBlueprint(ProjectPropertyMixin, TemplateSchemaMixin, NamedCo if self.scheduling_constraints_template: # copy the scheduling_constraints - specifications_doc['scheduling_constraints_doc'] = self.scheduling_constraints_doc - specifications_doc['scheduling_constraints_template'] = self.scheduling_constraints_template.name + specifications_doc['scheduling_constraints_template'] = {'name': self.scheduling_constraints_template.name, + 'version': self.scheduling_constraints_template.version} + specifications_doc['scheduling_constraints_doc'] = self.scheduling_constraints_doc or self.scheduling_constraints_template.get_default_json_document_for_schema() # if this scheduling unit was created from an observation_strategy_template, then copy the parameters list if self.draft.observation_strategy_template and self.draft.observation_strategy_template.template: diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Beamforming_(Complex_Voltages)_Observation-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Beamforming_(Complex_Voltages)_Observation-1.json index 6baf4aff5978b78d4c0e68ee77c7bf7c8af8c01f..0c5454ac2fc6d5b680c4d45942a94c00bbe465ec 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Beamforming_(Complex_Voltages)_Observation-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Beamforming_(Complex_Voltages)_Observation-1.json @@ -70,7 +70,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [], "task_scheduling_relations": [], "tasks": { diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/HBA_single_beam_imaging-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/HBA_single_beam_imaging-1.json index 13072a37756baab29c06a1394446c57a651b965b..934c2548121450208826f0dda6c611488a6ab1ca 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/HBA_single_beam_imaging-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/HBA_single_beam_imaging-1.json @@ -88,7 +88,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Calibrator Pipeline 1", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/IM_LBA_Survey_Strategy_-_3_Beams-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/IM_LBA_Survey_Strategy_-_3_Beams-1.json index cfbc92e551fe5e000d3d57556ed7d963582dd7ee..fc962334e52588de1989d026f938009364398b81 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/IM_LBA_Survey_Strategy_-_3_Beams-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/IM_LBA_Survey_Strategy_-_3_Beams-1.json @@ -7,6 +7,12 @@ }, "template": { "parameters": [ + { + "name": "Scheduling Constraints", + "refs": [ + "#/scheduling_constraints_doc" + ] + }, { "name": "Target Pointing 1", "refs": [ diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/LoTSS_Observing_strategy-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/LoTSS_Observing_strategy-1.json index 99a21f92089d612bef4f40b2b2ec6e3e5d3cea61..7b0babfde967c59a8ae7f149fa75ae67c0d32be8 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/LoTSS_Observing_strategy-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/LoTSS_Observing_strategy-1.json @@ -82,7 +82,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Calibrator Pipeline 1", @@ -133,7 +133,7 @@ "producer": "Target Observation", "selection_doc": { "sap": [ - "target1" + "sap1" ] }, "selection_template": "SAP", @@ -154,7 +154,7 @@ "producer": "Target Observation", "selection_doc": { "sap": [ - "target2" + "sap2" ] }, "selection_template": "SAP", @@ -412,7 +412,7 @@ "direction_type": "J2000", "target": "PXXX+YY" }, - "name": "target1", + "name": "sap1", "subbands": [ 104, 105, @@ -666,7 +666,7 @@ "direction_type": "J2000", "target": "PXXX+YY" }, - "name": "target2", + "name": "sap2", "subbands": [ 104, 105, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Pulsar_timing-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Pulsar_timing-1.json index 375899c8addc7320295ae531de381b0d6b0b707c..68fae2c12fce302e097b55ea1c1f7a00ee95183d 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Pulsar_timing-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Pulsar_timing-1.json @@ -52,7 +52,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Pipeline", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Responsive_Telescope_HBA_LoTSS-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Responsive_Telescope_HBA_LoTSS-1.json index f6d8ca5fe91f4834fe9508b6c7b8d90a2d7e31a6..a037d704e927a8452ed3a4840c9bbd730e487e69 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Responsive_Telescope_HBA_LoTSS-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Responsive_Telescope_HBA_LoTSS-1.json @@ -6,581 +6,572 @@ "version": 1 }, "template": { - "description": "This observation strategy template defines a similar observation strategy as for LoTSS, but then with a single Calibrator at the end so that the Target Observation can start immediately once the trigger is submitted.", - "name": "Responsive Telescope HBA LoTSS", - "scheduling_unit_template": { - "name": "scheduling unit", - "version": 1 + "parameters": [ + { + "name": "Scheduling Constraints", + "refs": [ + "#/scheduling_constraints_doc/time" + ] + }, + { + "name": "Target Name", + "refs": [ + "#/tasks/Target Observation/specifications_doc/SAPs/0/name" + ] + }, + { + "name": "Target Pointing", + "refs": [ + "#/tasks/Target Observation/specifications_doc/SAPs/0/digital_pointing" + ] + }, + { + "name": "Subbands", + "refs": [ + "#/tasks/Target Observation/specifications_doc/SAPs/0/subbands" + ] + }, + { + "name": "Tile Beam", + "refs": [ + "#/tasks/Target Observation/specifications_doc/tile_beam" + ] + }, + { + "name": "Target Duration", + "refs": [ + "#/tasks/Target Observation/specifications_doc/duration" + ] + }, + { + "name": "Calibrator Name", + "refs": [ + "#/tasks/Calibrator Observation 2/specifications_doc/name" + ] + }, + { + "name": "Calibrator Pointing", + "refs": [ + "#/tasks/Calibrator Observation 2/specifications_doc/pointing" + ] + }, + { + "name": "Calibrator Duration", + "refs": [ + "#/tasks/Calibrator Observation 2/specifications_doc/duration" + ] + } + ], + "scheduling_constraints_doc": { + "sky": { + "transit_offset": { + "from": -86400, + "to": 86400 + } + }, + "time": { + "between": [] + } }, - "template": { - "parameters": [ - { - "name": "Scheduling Constraints", - "refs": [ - "#/scheduling_constraints_doc/time" - ] + "scheduling_constraints_template": "constraints", + "task_relations": [ + { + "consumer": "Calibrator Pipeline", + "input": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" }, - { - "name": "Target Name", - "refs": [ - "#/tasks/Target Observation/specifications_doc/SAPs/0/name" - ] + "output": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "correlator" }, - { - "name": "Target Pointing", - "refs": [ - "#/tasks/Target Observation/specifications_doc/SAPs/0/digital_pointing" - ] + "producer": "Calibrator Observation", + "selection_doc": {}, + "selection_template": "all", + "tags": [] + }, + { + "consumer": "Target Pipeline", + "input": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" }, - { - "name": "Subbands", - "refs": [ - "#/tasks/Target Observation/specifications_doc/SAPs/0/subbands" - ] + "output": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "correlator" }, - { - "name": "Tile Beam", - "refs": [ - "#/tasks/Target Observation/specifications_doc/tile_beam" + "producer": "Target Observation", + "selection_doc": { + "sap": [ + "target1" ] }, - { - "name": "Target Duration", - "refs": [ - "#/tasks/Target Observation/specifications_doc/duration" - ] + "selection_template": "SAP", + "tags": [] + }, + { + "consumer": "Ingest", + "input": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" }, - { - "name": "Calibrator Name", - "refs": [ - "#/tasks/Calibrator Observation 2/specifications_doc/name" - ] + "output": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" }, - { - "name": "Calibrator Pointing", - "refs": [ - "#/tasks/Calibrator Observation 2/specifications_doc/pointing" - ] + "producer": "Calibrator Pipeline", + "selection_doc": {}, + "selection_template": "all", + "tags": [] + }, + { + "consumer": "Ingest", + "input": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" }, - { - "name": "Calibrator Duration", - "refs": [ - "#/tasks/Calibrator Observation 2/specifications_doc/duration" - ] - } - ], - "scheduling_constraints_doc": { - "sky": { - "transit_offset": { - "from": -86400, - "to": 86400 + "output": { + "dataformat": "MeasurementSet", + "datatype": "visibilities", + "role": "any" + }, + "producer": "Target Pipeline", + "selection_doc": {}, + "selection_template": "all", + "tags": [] + } + ], + "task_scheduling_relations": [ + { + "first": "Calibrator Observation", + "placement": "after", + "second": "Target Observation", + "time_offset": 60 + } + ], + "tasks": { + "Calibrator Observation": { + "description": "Calibrator Observation after Target Observation", + "specifications_doc": { + "autoselect": false, + "duration": 600, + "name": "calibrator", + "pointing": { + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, + "direction_type": "J2000", + "target": "PXXX+YY" } }, - "time": { - "between": [] - } - }, - "scheduling_constraints_template": "constraints", - "task_relations": [ - { - "consumer": "Calibrator Pipeline", - "input": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" - }, - "output": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "correlator" - }, - "producer": "Calibrator Observation", - "selection_doc": {}, - "selection_template": "all", - "tags": [] + "specifications_template": { + "name": "calibrator observation" }, - { - "consumer": "Target Pipeline", - "input": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" + "tags": [] + }, + "Calibrator Pipeline": { + "description": "Preprocessing Pipeline for Calibrator Observation", + "specifications_doc": { + "average": { + "frequency_steps": 4, + "time_steps": 1 }, - "output": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "correlator" + "demix": { + "frequency_steps": 64, + "ignore_target": false, + "sources": {}, + "time_steps": 10 }, - "producer": "Target Observation", - "selection_doc": { - "sap": [ - "target1" - ] + "flag": { + "autocorrelations": true, + "outerchannels": true, + "rfi_strategy": "HBAdefault" }, - "selection_template": "SAP", - "tags": [] + "storagemanager": "dysco" }, - { - "consumer": "Ingest", - "input": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" - }, - "output": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" - }, - "producer": "Calibrator Pipeline", - "selection_doc": {}, - "selection_template": "all", - "tags": [] + "specifications_template": { + "name": "preprocessing pipeline" }, - { - "consumer": "Ingest", - "input": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" - }, - "output": { - "dataformat": "MeasurementSet", - "datatype": "visibilities", - "role": "any" - }, - "producer": "Target Pipeline", - "selection_doc": {}, - "selection_template": "all", - "tags": [] - } - ], - "task_scheduling_relations": [ - { - "first": "Calibrator Observation", - "placement": "after", - "second": "Target Observation", - "time_offset": 60 - } - ], - "tasks": { - "Calibrator Observation": { - "description": "Calibrator Observation after Target Observation", - "specifications_doc": { - "autoselect": false, - "duration": 600, - "name": "calibrator", - "pointing": { - "angle1": 0.6624317181687094, - "angle2": 1.5579526427549426, - "direction_type": "J2000", - "target": "PXXX+YY" - } - }, - "specifications_template": { - "name": "calibrator observation" - }, - "tags": [] + "tags": [] + }, + "Ingest": { + "description": "Ingest all preprocessed dataproducts", + "specifications_doc": {}, + "specifications_template": { + "name": "ingest" }, - "Calibrator Pipeline": { - "description": "Preprocessing Pipeline for Calibrator Observation", - "specifications_doc": { - "average": { - "frequency_steps": 4, - "time_steps": 1 - }, - "demix": { - "frequency_steps": 64, - "ignore_target": false, - "sources": {}, - "time_steps": 10 - }, - "flag": { - "autocorrelations": true, - "outerchannels": true, - "rfi_strategy": "HBAdefault" + "tags": [] + }, + "Target Observation": { + "description": "Target Observation", + "specifications_doc": { + "QA": { + "file_conversion": { + "enabled": true, + "nr_of_subbands": -1, + "nr_of_timestamps": 256 }, - "storagemanager": "dysco" - }, - "specifications_template": { - "name": "preprocessing pipeline" - }, - "tags": [] - }, - "Ingest": { - "description": "Ingest all preprocessed dataproducts", - "specifications_doc": {}, - "specifications_template": { - "name": "ingest" + "inspection_plots": "msplots", + "plots": { + "autocorrelation": true, + "crosscorrelation": true, + "enabled": true + } }, - "tags": [] - }, - "Target Observation": { - "description": "Target Observation", - "specifications_doc": { - "QA": { - "file_conversion": { - "enabled": true, - "nr_of_subbands": -1, - "nr_of_timestamps": 256 + "SAPs": [ + { + "digital_pointing": { + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, + "direction_type": "J2000", + "target": "PXXX+YY" }, - "inspection_plots": "msplots", - "plots": { - "autocorrelation": true, - "crosscorrelation": true, - "enabled": true - } - }, - "SAPs": [ - { - "digital_pointing": { - "angle1": 0.6624317181687094, - "angle2": 1.5579526427549426, - "direction_type": "J2000", - "target": "PXXX+YY" - }, - "name": "target", - "subbands": [ - 104, - 105, - 106, - 107, - 108, - 109, - 110, - 111, - 112, - 113, - 114, - 115, - 116, - 117, - 118, - 119, - 120, - 121, - 122, - 123, - 124, - 125, - 126, - 127, - 128, - 129, - 130, - 131, - 132, - 133, - 134, - 135, - 136, - 137, - 138, - 139, - 140, - 141, - 142, - 143, - 144, - 145, - 146, - 147, - 148, - 149, - 150, - 151, - 152, - 153, - 154, - 155, - 156, - 157, - 158, - 159, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 174, - 175, - 176, - 177, - 178, - 179, - 180, - 181, - 182, - 183, - 184, - 185, - 186, - 187, - 188, - 189, - 190, - 191, - 192, - 193, - 194, - 195, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 210, - 211, - 212, - 213, - 214, - 215, - 216, - 217, - 218, - 219, - 220, - 221, - 222, - 223, - 224, - 225, - 226, - 227, - 228, - 229, - 230, - 231, - 232, - 233, - 234, - 235, - 236, - 237, - 238, - 239, - 240, - 241, - 242, - 243, - 244, - 245, - 246, - 247, - 248, - 249, - 250, - 251, - 252, - 253, - 254, - 255, - 256, - 257, - 258, - 259, - 260, - 261, - 262, - 263, - 264, - 265, - 266, - 267, - 268, - 269, - 270, - 271, - 272, - 273, - 274, - 275, - 276, - 277, - 278, - 279, - 280, - 281, - 282, - 283, - 284, - 285, - 286, - 287, - 288, - 289, - 290, - 291, - 292, - 293, - 294, - 295, - 296, - 297, - 298, - 299, - 300, - 301, - 302, - 303, - 304, - 305, - 306, - 307, - 308, - 309, - 310, - 311, - 312, - 313, - 314, - 315, - 316, - 317, - 318, - 319, - 320, - 321, - 322, - 323, - 324, - 325, - 326, - 327, - 328, - 329, - 330, - 331, - 332, - 333, - 334, - 335, - 336, - 337, - 338, - 339, - 340, - 341, - 342, - 343, - 344, - 345, - 346, - 347 - ] - } - ], - "antenna_set": "HBA_DUAL_INNER", - "correlator": { - "channels_per_subband": 64, - "integration_time": 1, - "storage_cluster": "CEP4" - }, - "duration": 7200, - "filter": "HBA_110_190", - "station_groups": [ - { - "max_nr_missing": 4, - "stations": [ - "CS001", - "CS002", - "CS003", - "CS004", - "CS005", - "CS006", - "CS007", - "CS011", - "CS013", - "CS017", - "CS021", - "CS024", - "CS026", - "CS028", - "CS030", - "CS031", - "CS032", - "CS101", - "CS103", - "CS201", - "CS301", - "CS302", - "CS401", - "CS501", - "RS106", - "RS205", - "RS208", - "RS210", - "RS305", - "RS306", - "RS307", - "RS310", - "RS406", - "RS407", - "RS409", - "RS503", - "RS508", - "RS509" - ] - } - ], - "tile_beam": { - "angle1": 0.6624317181687094, - "angle2": 1.5579526427549426, - "direction_type": "J2000", - "target": "PXXX+YY" + "name": "target", + "subbands": [ + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149, + 150, + 151, + 152, + 153, + 154, + 155, + 156, + 157, + 158, + 159, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 174, + 175, + 176, + 177, + 178, + 179, + 180, + 181, + 182, + 183, + 184, + 185, + 186, + 187, + 188, + 189, + 190, + 191, + 192, + 193, + 194, + 195, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 210, + 211, + 212, + 213, + 214, + 215, + 216, + 217, + 218, + 219, + 220, + 221, + 222, + 223, + 224, + 225, + 226, + 227, + 228, + 229, + 230, + 231, + 232, + 233, + 234, + 235, + 236, + 237, + 238, + 239, + 240, + 241, + 242, + 243, + 244, + 245, + 246, + 247, + 248, + 249, + 250, + 251, + 252, + 253, + 254, + 255, + 256, + 257, + 258, + 259, + 260, + 261, + 262, + 263, + 264, + 265, + 266, + 267, + 268, + 269, + 270, + 271, + 272, + 273, + 274, + 275, + 276, + 277, + 278, + 279, + 280, + 281, + 282, + 283, + 284, + 285, + 286, + 287, + 288, + 289, + 290, + 291, + 292, + 293, + 294, + 295, + 296, + 297, + 298, + 299, + 300, + 301, + 302, + 303, + 304, + 305, + 306, + 307, + 308, + 309, + 310, + 311, + 312, + 313, + 314, + 315, + 316, + 317, + 318, + 319, + 320, + 321, + 322, + 323, + 324, + 325, + 326, + 327, + 328, + 329, + 330, + 331, + 332, + 333, + 334, + 335, + 336, + 337, + 338, + 339, + 340, + 341, + 342, + 343, + 344, + 345, + 346, + 347 + ] } + ], + "antenna_set": "HBA_DUAL_INNER", + "correlator": { + "channels_per_subband": 64, + "integration_time": 1, + "storage_cluster": "CEP4" }, - "specifications_template": { - "name": "target observation" - }, - "tags": [] + "duration": 7200, + "filter": "HBA_110_190", + "station_groups": [ + { + "max_nr_missing": 4, + "stations": [ + "CS001", + "CS002", + "CS003", + "CS004", + "CS005", + "CS006", + "CS007", + "CS011", + "CS013", + "CS017", + "CS021", + "CS024", + "CS026", + "CS028", + "CS030", + "CS031", + "CS032", + "CS101", + "CS103", + "CS201", + "CS301", + "CS302", + "CS401", + "CS501", + "RS106", + "RS205", + "RS208", + "RS210", + "RS305", + "RS306", + "RS307", + "RS310", + "RS406", + "RS407", + "RS409", + "RS503", + "RS508", + "RS509" + ] + } + ], + "tile_beam": { + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, + "direction_type": "J2000", + "target": "PXXX+YY" + } }, - "Target Pipeline": { - "description": "Preprocessing Pipeline for Target Observation", - "specifications_doc": { - "average": { - "frequency_steps": 4, - "time_steps": 1 - }, - "demix": { - "frequency_steps": 64, - "ignore_target": false, - "sources": {}, - "time_steps": 10 - }, - "flag": { - "autocorrelations": true, - "outerchannels": true, - "rfi_strategy": "HBAdefault" - }, - "storagemanager": "dysco" + "specifications_template": { + "name": "target observation" + }, + "tags": [] + }, + "Target Pipeline": { + "description": "Preprocessing Pipeline for Target Observation", + "specifications_doc": { + "average": { + "frequency_steps": 4, + "time_steps": 1 }, - "specifications_template": { - "name": "preprocessing pipeline" + "demix": { + "frequency_steps": 64, + "ignore_target": false, + "sources": {}, + "time_steps": 10 }, - "tags": [] - } + "flag": { + "autocorrelations": true, + "outerchannels": true, + "rfi_strategy": "HBAdefault" + }, + "storagemanager": "dysco" + }, + "specifications_template": { + "name": "preprocessing pipeline" + }, + "tags": [] } - }, - "version": 1 + } }, "version": 1 } \ No newline at end of file diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Beamformed_Observation_-_Pipeline_-_Ingest-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Beamformed_Observation_-_Pipeline_-_Ingest-1.json index b0b78c52a629653237b71720926a6437b548db22..d53b74058e780fce0b7be941b4b54cd40f8ca97e 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Beamformed_Observation_-_Pipeline_-_Ingest-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Beamformed_Observation_-_Pipeline_-_Ingest-1.json @@ -46,7 +46,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Pipeline", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Observation_-_Pipeline_-_Ingest-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Observation_-_Pipeline_-_Ingest-1.json index f8ae5efe1ca11c2c6fb42a8a7c2c02082840ff93..b5944434f9319866a5e85b91d3783f49354ac701 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Observation_-_Pipeline_-_Ingest-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Short_Test_Observation_-_Pipeline_-_Ingest-1.json @@ -46,7 +46,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Pipeline", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Beamforming_Observation-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Beamforming_Observation-1.json index 6baa06767c3d53d7f336abc9457bfb90ab4e58a8..076df3484822a3b3cc3aea1d07b69fd87e6e08c8 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Beamforming_Observation-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Beamforming_Observation-1.json @@ -52,7 +52,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [], "task_scheduling_relations": [], "tasks": { diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Observation-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Observation-1.json index 3b1125376357c6ffd307d99c6e5a752b349635d9..6e642e1661f287463fa5f34f99318b27352e67f7 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Observation-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/Simple_Observation-1.json @@ -46,7 +46,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [], "task_scheduling_relations": [], "tasks": { diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/UC1_CTC+pipelines-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/UC1_CTC+pipelines-1.json index 1ff3c1500868c4345b4df6b9922704b054bce4e1..80204cefd32db9c39033c8f69754db8af1513bb2 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/UC1_CTC+pipelines-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_observing_strategy_template/UC1_CTC+pipelines-1.json @@ -40,7 +40,7 @@ } } }, - "scheduling_constraints_template": "constraints", + "scheduling_constraints_template": {"name": "constraints"}, "task_relations": [ { "consumer": "Pipeline 1", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template/scheduling_unit-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template/scheduling_unit-1.json index 2f714ebc0cf47690862679b42a64c5b0ab5c403b..298a79939b782df484a0ade8b0d65bb2c5ee7ca5 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template/scheduling_unit-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template/scheduling_unit-1.json @@ -52,9 +52,25 @@ "type": "object" }, "scheduling_constraints_template": { - "default": "constraints", - "title": "Name of Scheduling Constraints Template which defines the json schema for the scheduling_constraints_doc", - "type": "string" + "title": "Scheduling Constraints Template", + "description": "Name and version of Scheduling Constraints Template which defines the json schema for the scheduling_constraints_doc", + "type": "object", + "default": {}, + "properties": { + "name": { + "default": "constraints", + "minLength": 1, + "type": "string" + }, + "version": { + "default": 1, + "minimum": 1, + "type": "integer" + } + }, + "required": [ + "name" + ] }, "task_relations": { "additionalItems": false, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/common.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/common.py index c12e879675249229317935fbcd6b883bd18239b0..1fa36b3e5ea2e2bb93eeb648cdfb662b9e614b5f 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/common.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/common.py @@ -16,7 +16,7 @@ class FloatDurationField(serializers.FloatField): return value.total_seconds() class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerializer): - _accepted_pk_names = ('id', 'name') + _accepted_pk_names = ('id', 'name', 'unique_identifier') def get_field_names(self, declared_fields, info): field_names = super().get_field_names(declared_fields, info) @@ -101,3 +101,9 @@ class AbstractTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: abstract = True + + +class TaskConnectorTypeSerializer(DynamicRelationalHyperlinkedModelSerializer): + class Meta: + model = models.TaskConnectorType + fields = '__all__' diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py index 92746b6c3c340e7b90ed9683c8d60c5e033c1f80..1b6e0d0fff95efaef6b2eaeed0314d84c104640a 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py @@ -9,7 +9,7 @@ from rest_framework import serializers from rest_framework.exceptions import ValidationError from .. import models from .widgets import JSONEditorField -from .common import FloatDurationField, RelationalHyperlinkedModelSerializer, AbstractTemplateSerializer, DynamicRelationalHyperlinkedModelSerializer +from .common import FloatDurationField, RelationalHyperlinkedModelSerializer, AbstractTemplateSerializer, DynamicRelationalHyperlinkedModelSerializer, TaskConnectorTypeSerializer class SubtaskStateSerializer(DynamicRelationalHyperlinkedModelSerializer): class Meta: @@ -66,19 +66,17 @@ class DataproductFeedbackTemplateSerializer(AbstractTemplateSerializer): class SubtaskSerializer(DynamicRelationalHyperlinkedModelSerializer): - # If this is OK then we can extend API with NO url ('flat' values) on more places if required cluster_name = serializers.StringRelatedField(source='cluster', label='cluster_name', read_only=True, help_text='The cluster name as defined in the specifications template, provided here to safe an addition lookup.') subtask_type = serializers.StringRelatedField(source='specifications_template.type', label='subtask_type', read_only=True, help_text='The subtask type as defined in the specifications template, provided here to safe an addition lookup.') specifications_doc = JSONEditorField(schema_source='specifications_template.schema') duration = FloatDurationField(read_only=True) primary = serializers.BooleanField() - input_dataproducts = serializers.HyperlinkedRelatedField(many=True, read_only=True, view_name='dataproduct-detail') - output_dataproducts = serializers.HyperlinkedRelatedField(many=True, read_only=True, view_name='dataproduct-detail') class Meta: model = models.Subtask fields = '__all__' - extra_fields = ['input_dataproducts', 'output_dataproducts', 'on_sky_start_time', 'on_sky_stop_time', 'process_start_time', 'process_stop_time'] + extra_fields = ['on_sky_start_time', 'on_sky_stop_time', 'process_start_time', 'process_stop_time', 'inputs', 'outputs'] + read_only_fields = ['inputs', 'outputs'] expandable_fields = { 'input_dataproducts': ('lofar.sas.tmss.tmss.tmssapp.serializers.DataproductSerializer', {'many': True}), 'output_dataproducts': ('lofar.sas.tmss.tmss.tmssapp.serializers.DataproductSerializer', {'many': True}) @@ -93,11 +91,25 @@ class SubtaskInputSerializer(DynamicRelationalHyperlinkedModelSerializer): fields = '__all__' +class SubtaskInputExtendedSerializer(SubtaskInputSerializer): + input_role = TaskConnectorTypeSerializer(read_only=True, help_text='The connector type which defines what kind of data this subtask consumes.') + + class SubtaskOutputSerializer(DynamicRelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskOutput fields = '__all__' - #extra_fields = ['dataproducts', 'consumers'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? + extra_fields = ['dataproducts', 'consumers'] + read_only_fields = ['dataproducts', 'consumers'] + + +class SubtaskOutputExtendedSerializer(SubtaskOutputSerializer): + output_role = TaskConnectorTypeSerializer(read_only=True, help_text='The connector type which defines what kind of data this subtask produces.') + + +class SubtaskExtendedSerializer(SubtaskSerializer): + inputs = SubtaskInputExtendedSerializer(read_only=True, many=True) + outputs = SubtaskOutputExtendedSerializer(read_only=True, many=True) class DataproductSerializer(DynamicRelationalHyperlinkedModelSerializer): @@ -107,6 +119,8 @@ class DataproductSerializer(DynamicRelationalHyperlinkedModelSerializer): class Meta: model = models.Dataproduct fields = '__all__' + extra_fields = ['filepath', 'consumers'] + read_only_fields = ['filepath', 'consumers'] class AntennaSetSerializer(DynamicRelationalHyperlinkedModelSerializer): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py index d08254939dcb3a0acc09173a8d376aeb3570726a..d83ab0f04f4692fe51421b37ad6b82cf166631f5 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py @@ -4,8 +4,8 @@ This file contains the serializers (for the elsewhere defined data models) from rest_framework import serializers from .. import models -from .scheduling import SubtaskSerializer -from .common import FloatDurationField, RelationalHyperlinkedModelSerializer, AbstractTemplateSerializer, DynamicRelationalHyperlinkedModelSerializer +from .scheduling import SubtaskSerializer, SubtaskExtendedSerializer +from .common import FloatDurationField, RelationalHyperlinkedModelSerializer, AbstractTemplateSerializer, DynamicRelationalHyperlinkedModelSerializer, TaskConnectorTypeSerializer from .widgets import JSONEditorField from ..models import TMSSUser as User @@ -69,6 +69,10 @@ class TaskTemplateSerializer(AbstractTemplateSerializer): fields = '__all__' +class TaskTemplateExtendedSerializer(TaskTemplateSerializer): + connector_types = TaskConnectorTypeSerializer(many=True, read_only=True, help_text='The connector types which define what kind of data this task template consumes/produces.') + + class TaskRelationSelectionTemplateSerializer(AbstractTemplateSerializer): class Meta: model = models.TaskRelationSelectionTemplate @@ -109,18 +113,6 @@ class QuantitySerializer(DynamicRelationalHyperlinkedModelSerializer): fields = '__all__' -class TaskConnectorTypeSerializer(DynamicRelationalHyperlinkedModelSerializer): - class Meta: - model = models.TaskConnectorType - fields = '__all__' - - -class TaskConnectorTypeModelSerializer(serializers.ModelSerializer): - class Meta: - model = models.TaskConnectorType - fields = '__all__' - - class CycleSerializer(DynamicRelationalHyperlinkedModelSerializer): duration = FloatDurationField(read_only=True, help_text="Duration of the cycle [seconds]") @@ -267,7 +259,7 @@ class TaskDraftSerializer(DynamicRelationalHyperlinkedModelSerializer): relative_stop_time = FloatDurationField(read_only=True) specifications_doc = JSONEditorField(schema_source='specifications_template.schema') task_type = serializers.StringRelatedField(source='specifications_template.type', label='task_type', read_only=True, help_text='The task type as defined in the specifications template.') - connector_types = TaskConnectorTypeModelSerializer(source='specifications_template.connector_types', label='connector_types', many=True, read_only=True, help_text='The connector types which define what kind of data this task consumes/produces.') + connector_types = TaskConnectorTypeSerializer(source='specifications_template.connector_types', label='connector_types', many=True, read_only=True, help_text='The connector types which define what kind of data this task consumes/produces.') class Meta: model = models.TaskDraft @@ -287,7 +279,6 @@ class TaskBlueprintSerializer(DynamicRelationalHyperlinkedModelSerializer): relative_stop_time = FloatDurationField(read_only=True) specifications_doc = JSONEditorField(schema_source='specifications_template.schema') task_type = serializers.StringRelatedField(source='specifications_template.type', label='task_type', read_only=True, help_text='The task type as defined in the specifications template.') - connector_types = TaskConnectorTypeModelSerializer(source='specifications_template.connector_types', label='connector_types', many=True, read_only=True, help_text='The connector types which define what kind of data this task consumes/produces.') class Meta: model = models.TaskBlueprint @@ -314,6 +305,11 @@ class TaskRelationDraftSerializer(DynamicRelationalHyperlinkedModelSerializer): extra_fields = ['blueprints'] +class TaskRelationDraftExtendedSerializer(TaskRelationDraftSerializer): + input_role = TaskConnectorTypeSerializer(help_text='The connector type which defines what kind of data this subtask consumes.') + output_role = TaskConnectorTypeSerializer(help_text='The connector type which defines what kind of data this subtask produces.') + + class TaskRelationBlueprintSerializer(DynamicRelationalHyperlinkedModelSerializer): selection_doc = JSONEditorField(schema_source='selection_template.schema') @@ -322,6 +318,11 @@ class TaskRelationBlueprintSerializer(DynamicRelationalHyperlinkedModelSerialize fields = '__all__' +class TaskRelationBlueprintExtendedSerializer(TaskRelationBlueprintSerializer): + input_role = TaskConnectorTypeSerializer(read_only=True, help_text='The connector type which defines what kind of data this subtask consumes.') + output_role = TaskConnectorTypeSerializer(read_only=True, help_text='The connector type which defines what kind of data this subtask produces.') + + class TaskSchedulingRelationDraftSerializer(DynamicRelationalHyperlinkedModelSerializer): class Meta: model = models.TaskSchedulingRelationDraft @@ -376,8 +377,8 @@ class TaskBlueprintExtendedSerializer(TaskBlueprintSerializer): expanded into the json response for a single API call (for convenience/optimization). """ - subtasks = SubtaskSerializer(many=True) # we set many=True because this field represents a sth-to-many relationship, i.e. we serialize a list of objects here - specifications_template = TaskTemplateSerializer() + subtasks = SubtaskExtendedSerializer(many=True) # we set many=True because this field represents a sth-to-many relationship, i.e. we serialize a list of objects here + specifications_template = TaskTemplateExtendedSerializer() class TaskDraftExtendedSerializer(TaskDraftSerializer): @@ -386,8 +387,7 @@ class TaskDraftExtendedSerializer(TaskDraftSerializer): expanded into the json response for a single API call (for convenience/optimization). """ task_blueprints = TaskBlueprintExtendedSerializer(many=True) - specifications_template = TaskTemplateSerializer() - + specifications_template = TaskTemplateExtendedSerializer() class SchedulingUnitDraftExtendedSerializer(SchedulingUnitDraftSerializer): """ diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py index 931bb1e0dd41ace8ff8ae9176932a442cfe669fa..e9fe29f48fc6c271800ecc0dd94924265e4540ae 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py @@ -31,6 +31,7 @@ from lofar.mac.pipeline_control_rpc import PipelineControlRPCClient from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset_and_station from lofar.sas.tmss.tmss.exceptions import TMSSException from django.db import transaction +from django.db.models import Q # ==== various create* methods to convert/create a TaskBlueprint into one or more Subtasks ==== @@ -643,7 +644,9 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB # step 2: create and link subtask input/output # an observation has no input, it just produces output data - subtask_output = SubtaskOutput.objects.create(subtask=subtask) + # create a subtask_output per task_connector_type of the task, but only with role Correlator or Beamformer because that is what observations produce. + for task_connector_type in task_blueprint.specifications_template.connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).filter(Q(role__value=Role.Choices.CORRELATOR.value)|Q(role__value=Role.Choices.BEAMFORMER.value)).all(): + subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type) # step 3: set state to DEFINED subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -705,10 +708,12 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) for obs_out in observation_subtask.outputs.all(): qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask, producer=obs_out, # TODO: determine proper producer based on spec in task_relation_blueprint + input_role=None, # TODO: determine proper role based on spec in task_relation_blueprint selection_doc=selection_doc, selection_template=selection_template) - qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask) + # create an internal SubtaskOutput (output_role=None), because we do not expose qa data yet at task level + qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask, output_role=None) # step 3: set state to DEFINED qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -781,10 +786,12 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta selection_doc = selection_template.get_default_json_document_for_schema() qaplots_subtask_input = SubtaskInput.objects.create(subtask=qaplots_subtask, producer=qafile_subtask.outputs.first(), + input_role=None, selection_doc=selection_doc, selection_template=selection_template) - qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask) + # create an internal SubtaskOutput (output_role=None), because we do not expose qa data yet at task level + qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask, output_role=None) # step 3: set state to DEFINED qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -794,6 +801,28 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta return qaplots_subtask +def _create_subtask_inputs(subtask: Subtask): + # loop over all incoming task_relations... + for task_relation_blueprint in subtask.task_blueprint.produced_by.all(): + # ... and check which task_relation matches with this task's connector_types (including the *-asterix-like ANY role for inputs) + for input_connector_type in subtask.task_blueprint.specifications_template.connector_types.filter(Q(role=task_relation_blueprint.output_role.role) | Q(role__value=Role.Choices.ANY.value), + datatype=task_relation_blueprint.output_role.datatype, + dataformat=task_relation_blueprint.output_role.dataformat, + iotype__value=IOType.Choices.INPUT.value).all(): + # find the producing_subtask_output for this relation + for producing_subtask_output in SubtaskOutput.objects.filter(subtask__in=task_relation_blueprint.producer.subtasks.all()).filter(output_role__role=task_relation_blueprint.output_role.role, + output_role__datatype=task_relation_blueprint.output_role.datatype, + output_role__dataformat=task_relation_blueprint.output_role.dataformat, + output_role__iotype=task_relation_blueprint.output_role.iotype).all(): + # create the SubtaskInput + # this is a "socket" with a 'input_connector_type'-form into which dataproducts of the correct type can be fed upon scheduling + subtask_input = SubtaskInput.objects.create(subtask=subtask, + producer=producing_subtask_output, + input_role=input_connector_type, + selection_doc=task_relation_blueprint.selection_doc, + selection_template=task_relation_blueprint.selection_template) + + def create_pipeline_subtask_from_task_blueprint(task_blueprint: TaskBlueprint, subtask_template_name: str, generate_subtask_specs_from_task_spec_func) -> Subtask: ''' Create a subtask to for the preprocessing pipeline. This method implements "Instantiate subtasks" step from the "Specification Flow" @@ -826,21 +855,12 @@ def create_pipeline_subtask_from_task_blueprint(task_blueprint: TaskBlueprint, s "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) - # step 2: create and link subtask input/output - for task_relation_blueprint in task_blueprint.produced_by.all(): - producing_task_blueprint = task_relation_blueprint.producer - - # create inputs for all predecessor observation subtask outputs that belong to the producer task of this task relation - # TODO: more filtering needed? - predecessor_observation_subtasks = [st for st in producing_task_blueprint.subtasks.order_by('id').all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] - for predecessor_obs_subtask in predecessor_observation_subtasks: - for predecessor_subtask_output in predecessor_obs_subtask.outputs.all(): - subtask_input = SubtaskInput.objects.create(subtask=subtask, - producer=predecessor_subtask_output, - selection_doc=task_relation_blueprint.selection_doc, - selection_template=task_relation_blueprint.selection_template) + # step 2: create and link subtask input + _create_subtask_inputs(subtask) - subtask_output = SubtaskOutput.objects.create(subtask=subtask) + # create a subtask_output per task_connector_type of the task + for task_connector_type in task_blueprint.specifications_template.connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).all(): + subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type) # step 3: set state to DEFINED subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -882,16 +902,7 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> subtask = Subtask.objects.create(**subtask_data) # step 2: create and link subtask input - for task_relation_blueprint in task_blueprint.produced_by.all(): - producing_task_blueprint = task_relation_blueprint.producer - - predecessor_subtasks = [st for st in producing_task_blueprint.subtasks.filter(specifications_template__type__value__in=(SubtaskType.Choices.OBSERVATION.value, SubtaskType.Choices.PIPELINE.value)).order_by('id').all()] - for predecessor_subtask in predecessor_subtasks: - for predecessor_subtask_output in predecessor_subtask.outputs.all(): - SubtaskInput.objects.create(subtask=subtask, - producer=predecessor_subtask_output, - selection_doc=task_relation_blueprint.selection_doc, - selection_template=task_relation_blueprint.selection_template) + _create_subtask_inputs(subtask) # step 3: set state to DEFINED subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -931,16 +942,7 @@ def create_cleanup_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> # step 2: create and link subtask input # for this cleanup subtask an 'input' seems a bit weird, but it actually makes sense! # this cleanup subtask will cleanup the output data of all linked input predecessors. - for task_relation_blueprint in task_blueprint.produced_by.all(): - producing_task_blueprint = task_relation_blueprint.producer - - predecessor_subtasks = [st for st in producing_task_blueprint.subtasks.filter(specifications_template__type__value__in=(SubtaskType.Choices.OBSERVATION.value, SubtaskType.Choices.PIPELINE.value)).order_by('id').all()] - for predecessor_subtask in predecessor_subtasks: - for predecessor_subtask_output in predecessor_subtask.outputs.all(): - SubtaskInput.objects.create(subtask=subtask, - producer=predecessor_subtask_output, - selection_doc=task_relation_blueprint.selection_doc, - selection_template=task_relation_blueprint.selection_template) + _create_subtask_inputs(subtask) # step 3: set state to DEFINED subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) @@ -962,26 +964,27 @@ def schedule_subtask(subtask: Subtask) -> Subtask: subtask.save() try: - if subtask.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value: - return schedule_pipeline_subtask(subtask) + with transaction.atomic(): + if subtask.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value: + return schedule_pipeline_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value: - return schedule_observation_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value: + return schedule_observation_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value: - return schedule_qafile_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value: + return schedule_qafile_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value: - return schedule_qaplots_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value: + return schedule_qaplots_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.INGEST.value: - return schedule_ingest_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.INGEST.value: + return schedule_ingest_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.CLEANUP.value: - return schedule_cleanup_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.CLEANUP.value: + return schedule_cleanup_subtask(subtask) - if subtask.specifications_template.type.value == SubtaskType.Choices.COPY.value: - return schedule_copy_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.COPY.value: + return schedule_copy_subtask(subtask) raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % (subtask.pk, subtask.specifications_template.type.value)) @@ -1702,9 +1705,6 @@ def _create_preprocessing_output_dataproducts_and_transforms(pipeline_subtask: S return output_dataproducts def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: Subtask, input_dataproducts: list): - # select subtask output the new dataproducts will be linked to - pipeline_subtask_output = pipeline_subtask.outputs.first() # TODO: if we have several, how to map input to output? - dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty") directory = os.path.join(_output_root_directory(pipeline_subtask), "pulp") @@ -1712,7 +1712,7 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: # we process per data type, as later on we produce summaries per data type # different datatypes need different treatment - data_types = ["cs", "is", "cv"] + bf_data_types = ["cs", "is", "cv"] # sort input dataproducts by data type input_dataproducts = { @@ -1740,22 +1740,25 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: "cv": "CV" } - for data_type in data_types: + for bf_data_type in bf_data_types: # do not generate output for a data type that is not in the input - if not input_dataproducts[data_type]: + if not input_dataproducts[bf_data_type]: continue dataformat = Dataformat.objects.get(value="pulp analysis") datatype = Datatype.objects.get(value="pulsar profile") dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="time series") + # select subtask output the new dataproducts will be linked to + pipeline_subtask_output = pipeline_subtask.outputs.filter(output_role__dataformat=dataformat, output_role__datatype=datatype).first() + # how the output is constructed from the input. we do this based on filenames: input dataproducts that result in the same filename # are combined to produce the output with that filename. # # format: { output_dp_filename: {"output_dp": output_dp, "input_dps": [input_dps]} } transformation_map = {} - for input_dp in input_dataproducts[data_type]: + for input_dp in input_dataproducts[bf_data_type]: # the filename doesn't contain the stokes number if there is only 1 stokes for this data type or when recording cv: # # type example filename input:output mapping @@ -1780,7 +1783,7 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: else: # a new output to model output_dp = Dataproduct(filename=output_filename, - directory=os.path.join(directory, output_subdir[data_type]), + directory=os.path.join(directory, output_subdir[bf_data_type]), dataformat=dataformat, datatype=datatype, producer=pipeline_subtask_output, @@ -1820,17 +1823,20 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: datatype = Datatype.objects.get(value="quality") dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="pulp summary") + # select subtask output the new dataproducts will be linked to + pipeline_subtask_output = pipeline_subtask.outputs.filter(output_role__dataformat=dataformat, output_role__datatype=datatype).first() + # 1. create dataproducts # type example filename # CV pulp/cs/L808292_summaryCV.tar # IS pulp/is/L808290_summaryIS.tar # CS pulp/cs/L808288_summaryCS.tar - summary_dataproduct = Dataproduct(filename="L%s_summary%s.tar" % (pipeline_subtask.id, summary_filename_suffix[data_type]), - directory=os.path.join(directory, output_subdir[data_type]), + summary_dataproduct = Dataproduct(filename="L%s_summary%s.tar" % (pipeline_subtask.id, summary_filename_suffix[bf_data_type]), + directory=os.path.join(directory, output_subdir[bf_data_type]), dataformat=dataformat, datatype=datatype, producer=pipeline_subtask_output, - specifications_doc={ "coherent": data_type != "is", "identifiers": { "data_type": data_type } }, + specifications_doc={ "coherent": bf_data_type != "is", "identifiers": { "data_type": bf_data_type } }, specifications_template=dataproduct_specifications_template, feedback_doc=dataproduct_feedback_template.get_default_json_document_for_schema(), feedback_template=dataproduct_feedback_template, @@ -1843,7 +1849,7 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: # 2. create transforms from the input # populate the transform, each input_dp of this datatype is input for this summary - transforms = [DataproductTransform(input=input_dp, output=summary_dataproduct, identity=False) for input_dp in input_dataproducts[data_type]] + transforms = [DataproductTransform(input=input_dp, output=summary_dataproduct, identity=False) for input_dp in input_dataproducts[bf_data_type]] DataproductTransform.objects.bulk_create(transforms) return None @@ -1958,21 +1964,22 @@ def schedule_ingest_subtask(ingest_subtask: Subtask): raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no input dataproduct(s)" % (ingest_subtask.pk, ingest_subtask.specifications_template.type)) - # iterate over all inputs - for ingest_subtask_input in ingest_subtask.inputs.all(): + # Create one internal SubtaskOutput (output_role=None), all dataproducts end up in here. + # We do not expose ingested data as a task output connector. The LTA is an endpoint. + ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask, output_role=None) + # gather all dataproducts from all inputs + for ingest_subtask_input in ingest_subtask.inputs.all(): # select and set input dataproducts that meet the filter defined in selection_doc input_dataproducts = [dataproduct for dataproduct in ingest_subtask_input.producer.dataproducts.all() if specifications_doc_meets_selection_doc(dataproduct.specifications_doc, ingest_subtask_input.selection_doc)] ingest_subtask_input.dataproducts.set(input_dataproducts) - # define output and create output dataproducts. - ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask) - # prepare identifiers in bulk for each output_dataproduct dp_gids = [SIPidentifier(source="TMSS") for _ in input_dataproducts] SIPidentifier.objects.bulk_create(dp_gids) + # define output and create output dataproducts. output_dataproducts = [Dataproduct(filename=input_dp.filename, # overwritten later by ingest 'feedback'. Is determined at transfer time by the LTA. directory="LTA", # filled in later by ingest 'feedback'. Is determined at transfer time by the LTA. dataformat=input_dp.dataformat, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py index 2fce321d8417e2d2a63248416e12026987ce676e..bec379fc7e42227345afe0c16643502e82fc73b9 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py @@ -35,8 +35,9 @@ def create_scheduling_unit_draft_from_observing_strategy_template(strategy_templ specifications_doc = dict_with_overrides(specifications_doc, allowed_overrides) if 'scheduling_constraints_template' in specifications_doc: - scheduling_constraints_template_name = specifications_doc.pop('scheduling_constraints_template') - scheduling_constraints_template = models.SchedulingConstraintsTemplate.objects.get(name=scheduling_constraints_template_name) + scheduling_constraints_template_name_version = specifications_doc.pop('scheduling_constraints_template') + scheduling_constraints_template = models.SchedulingConstraintsTemplate.objects.get(name=scheduling_constraints_template_name_version['name'], + version=scheduling_constraints_template_name_version.get('version',1)) else: # use the latest scheduling_constraints_template (can be None, which is acceptable) scheduling_constraints_template = models.SchedulingConstraintsTemplate.objects.all().order_by('created_at').last() @@ -71,7 +72,11 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ name=scheduling_unit_draft.name, description=scheduling_unit_draft.description, draft=scheduling_unit_draft, - specifications_template=scheduling_unit_draft.specifications_template) + specifications_template=scheduling_unit_draft.specifications_template, + ingest_permission_required=scheduling_unit_draft.ingest_permission_required, + piggyback_allowed_tbb=scheduling_unit_draft.piggyback_allowed_tbb, + piggyback_allowed_aartfaac=scheduling_unit_draft.piggyback_allowed_aartfaac, + interrupts_telescope=scheduling_unit_draft.interrupts_telescope) logger.info("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s') created scheduling_unit_blueprint id=%s name='%s'", scheduling_unit_draft.pk, scheduling_unit_draft.name, scheduling_unit_blueprint.pk, scheduling_unit_blueprint.name) @@ -420,8 +425,9 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model def create_scheduling_unit_blueprint_and_tasks_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint, then create its child task_blueprint(s), then create the task_blueprint's subtasks''' - scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) - return update_task_blueprints_and_subtasks_graph_from_draft(scheduling_unit_blueprint) + with transaction.atomic(): + scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) + return update_task_blueprints_and_subtasks_graph_from_draft(scheduling_unit_blueprint) def create_task_blueprint_and_subtasks_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py index c5adc99347792fe207ec34d8ff8efa0a331201e4..531c63c3fc542ebfbb4bb7e32f23e98f20469530 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py @@ -40,6 +40,7 @@ from rest_framework.views import APIView from rest_framework.decorators import api_view, renderer_classes from django.core.exceptions import ObjectDoesNotExist import django_property_filter as property_filters +from django.db import transaction class TextPlainAutoSchema(SwaggerAutoSchema): def get_produces(self): @@ -310,8 +311,10 @@ class SubtaskViewSet(LOFARViewSet): operation_description="Get the input dataproducts of this subtask.") @action(methods=['get'], detail=True, url_name='input_dataproducts') def input_dataproducts(self, request, pk=None): - dataproducts = models.Dataproduct.objects.filter(subtaskinput__subtask_id=pk) - serializer = serializers.DataproductSerializer(dataproducts, many=True, context={'request': request}) + subtask = get_object_or_404(models.Subtask, pk=pk) + # fetch the subtask's input_dataproducts and all related properties in one queryset. Major speedup. + input_dataproducts = subtask.input_dataproducts.select_related('dataformat', 'datatype', 'specifications_template', 'producer', 'feedback_template', 'sap', 'global_identifier', 'archive_info').prefetch_related('consumers') + serializer = serializers.DataproductSerializer(input_dataproducts, many=True, context={'request': request}) return RestResponse(serializer.data) @@ -320,8 +323,10 @@ class SubtaskViewSet(LOFARViewSet): operation_description="Get the output dataproducts of this subtask.") @action(methods=['get'], detail=True, url_name='output_dataproducts') def output_dataproducts(self, request, pk=None): - dataproducts = models.Dataproduct.objects.filter(producer__subtask_id=pk) - serializer = serializers.DataproductSerializer(dataproducts, many=True, context={'request': request}) + subtask = get_object_or_404(models.Subtask, pk=pk) + # fetch the subtask's output_dataproducts and all related properties in one queryset. Major speedup. + output_dataproducts = subtask.output_dataproducts.select_related('dataformat', 'datatype', 'specifications_template', 'producer', 'feedback_template', 'sap', 'global_identifier', 'archive_info').prefetch_related('consumers') + serializer = serializers.DataproductSerializer(output_dataproducts, many=True, context={'request': request}) return RestResponse(serializer.data) @@ -381,6 +386,10 @@ class SubtaskViewSet(LOFARViewSet): return JsonResponse({'id': subtask.id, 'progress': subtask.progress}) +class SubtaskExtendedViewSet(SubtaskViewSet): + serializer_class = serializers.SubtaskExtendedSerializer + + class SubtaskNestedViewSet(LOFARNestedViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializer @@ -401,11 +410,19 @@ class SubtaskInputViewSet(LOFARViewSet): serializer_class = serializers.SubtaskInputSerializer +class SubtaskInputExtendedViewSet(SubtaskInputViewSet): + serializer_class = serializers.SubtaskInputExtendedSerializer + + class SubtaskOutputViewSet(LOFARViewSet): queryset = models.SubtaskOutput.objects.all() serializer_class = serializers.SubtaskOutputSerializer +class SubtaskOutputExtendedViewSet(SubtaskOutputViewSet): + serializer_class = serializers.SubtaskOutputExtendedSerializer + + class DataproductViewSet(LOFARViewSet): queryset = models.Dataproduct.objects.all() serializer_class = serializers.DataproductSerializer @@ -465,27 +482,28 @@ class DataproductViewSet(LOFARViewSet): json_doc = json.loads(request.body.decode('utf-8')) - dataproduct.size = int(json_doc['file_size']) - dataproduct.directory, dataproduct.filename = json_doc['srm_url'].rsplit('/', maxsplit=1) + with transaction.atomic(): + dataproduct.size = int(json_doc['file_size']) + dataproduct.directory, dataproduct.filename = json_doc['srm_url'].rsplit('/', maxsplit=1) - if 'storage_ticket' in json_doc: - models.DataproductArchiveInfo.objects.create(dataproduct=dataproduct, storage_ticket=json_doc['storage_ticket']) + if 'storage_ticket' in json_doc: + models.DataproductArchiveInfo.objects.create(dataproduct=dataproduct, storage_ticket=json_doc['storage_ticket']) - if 'md5_checksum' in json_doc: - models.DataproductHash.objects.create(dataproduct=dataproduct, - hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.MD5.value), - hash=json_doc['md5_checksum']) + if 'md5_checksum' in json_doc: + models.DataproductHash.objects.create(dataproduct=dataproduct, + hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.MD5.value), + hash=json_doc['md5_checksum']) - if 'adler32_checksum' in json_doc: - models.DataproductHash.objects.create(dataproduct=dataproduct, - hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.ADLER32.value), - hash=json_doc['adler32_checksum']) + if 'adler32_checksum' in json_doc: + models.DataproductHash.objects.create(dataproduct=dataproduct, + hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.ADLER32.value), + hash=json_doc['adler32_checksum']) - # create empty feedback. Apart from the archive info above, ingest does not create feedback like observations/pipelines do. - dataproduct.feedback_template = models.DataproductFeedbackTemplate.objects.get(name="empty") - dataproduct.feedback_doc = get_default_json_object_for_schema(dataproduct.feedback_template.schema) + # create empty feedback. Apart from the archive info above, ingest does not create feedback like observations/pipelines do. + dataproduct.feedback_template = models.DataproductFeedbackTemplate.objects.get(name="empty") + dataproduct.feedback_doc = dataproduct.feedback_template.get_default_json_document_for_schema() + dataproduct.save() - dataproduct.save() serializer = self.get_serializer(dataproduct) return RestResponse(serializer.data) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py index 28862bc5b558bcc9945083680d3eb9ec19a6e2fa..c5278d02b9f29f4781c9d927cbb912270b9e54bc 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py @@ -138,7 +138,7 @@ class SchedulingUnitObservingStrategyTemplateViewSet(LOFARViewSet): triger_doc['scheduling_unit_observing_strategy_template']['version'] = int(strategy_template.version) triger_doc['scheduling_unit_observing_strategy_template']['overrides'] = strategy_template.template_doc_with_just_the_parameters - return JsonResponse(triger_doc, json_dumps_params={'indent': 2}) + return JsonResponse(triger_doc, json_dumps_params={'indent': 2, 'sort_keys': True}) @@ -1199,6 +1199,10 @@ class TaskDraftViewSet(LOFARViewSet): return Response(serializer.data) +class TaskDraftExtendedViewSet(TaskDraftViewSet): + serializer_class = serializers.TaskDraftExtendedSerializer + + class TaskDraftNestedViewSet(LOFARNestedViewSet): queryset = models.TaskDraft.objects.all() serializer_class = serializers.TaskDraftSerializer @@ -1387,6 +1391,10 @@ class TaskBlueprintViewSet(LOFARViewSet): return RestResponse(serializer.data) +class TaskBlueprintExtendedViewSet(TaskBlueprintViewSet): + serializer_class = serializers.TaskBlueprintExtendedSerializer + + class TaskBlueprintNestedViewSet(LOFARNestedViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializer @@ -1404,6 +1412,10 @@ class TaskRelationDraftViewSet(LOFARViewSet): serializer_class = serializers.TaskRelationDraftSerializer +class TaskRelationDraftExtendedViewSet(TaskRelationDraftViewSet): + serializer_class = serializers.TaskRelationDraftExtendedSerializer + + class TaskRelationDraftNestedViewSet(LOFARNestedViewSet): queryset = models.TaskRelationDraft.objects.all() serializer_class = serializers.TaskRelationDraftSerializer @@ -1421,6 +1433,10 @@ class TaskRelationBlueprintViewSet(LOFARViewSet): serializer_class = serializers.TaskRelationBlueprintSerializer +class TaskRelationBlueprintExtendedViewSet(TaskRelationBlueprintViewSet): + serializer_class = serializers.TaskRelationBlueprintExtendedSerializer + + class TaskSchedulingRelationBlueprintViewSet(LOFARViewSet): queryset = models.TaskSchedulingRelationBlueprint.objects.all() serializer_class = serializers.TaskSchedulingRelationBlueprintSerializer diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py index 790db62e4ade2fcf3018ccd22d489144a6e89fca..2be2622bcc41a333ed3d37282e98cd4cc4cda4b6 100644 --- a/SAS/TMSS/backend/src/tmss/urls.py +++ b/SAS/TMSS/backend/src/tmss/urls.py @@ -168,7 +168,9 @@ router.register(r'scheduling_unit_draft_extended', viewsets.SchedulingUnitDraftE router.register(r'scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) # ! The last registered view on a model is used for references to objects router.register(r'scheduling_unit_blueprint_extended', viewsets.SchedulingUnitBlueprintExtendedViewSet) router.register(r'scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) # ! The last registered view on a model is used for references to objects +router.register(r'task_draft_extended', viewsets.TaskDraftExtendedViewSet) router.register(r'task_draft', viewsets.TaskDraftViewSet) +router.register(r'task_blueprint_extended', viewsets.TaskBlueprintExtendedViewSet) router.register(r'task_blueprint', viewsets.TaskBlueprintViewSet) router.register(r'task_relation_draft', viewsets.TaskRelationDraftViewSet) router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) @@ -203,9 +205,12 @@ router.register(r'dataproduct_feedback_template', viewsets.DataproductFeedbackTe router.register(r'sap_template', viewsets.SAPTemplateViewSet) # instances +router.register(r'subtask_extended', viewsets.SubtaskExtendedViewSet) router.register(r'subtask', viewsets.SubtaskViewSet) router.register(r'dataproduct', viewsets.DataproductViewSet) +router.register(r'subtask_input_extended', viewsets.SubtaskInputExtendedViewSet) router.register(r'subtask_input', viewsets.SubtaskInputViewSet) +router.register(r'subtask_output_extended', viewsets.SubtaskOutputExtendedViewSet) router.register(r'subtask_output', viewsets.SubtaskOutputViewSet) router.register(r'antenna_set', viewsets.AntennaSetViewSet) router.register(r'dataproduct_transform', viewsets.DataproductTransformViewSet) diff --git a/SAS/TMSS/backend/test/t_scheduling.py b/SAS/TMSS/backend/test/t_scheduling.py index d377f3e2f4cc03bbfcb3dfbfd04ff7d2de9fccdf..518cf64bfe0ef346e7e64914793635e834102cee 100755 --- a/SAS/TMSS/backend/test/t_scheduling.py +++ b/SAS/TMSS/backend/test/t_scheduling.py @@ -657,13 +657,6 @@ class SubtaskInputOutputTest(unittest.TestCase): dataproducts are assigned """ - def setUp(self) -> None: - # make sure we're allowed to schedule - setting = Setting.objects.get(name='dynamic_scheduling_enabled') - setting.value = True - setting.save() - - def test_specifications_doc_meets_selection_doc(self): # empty selection matches all self.assertTrue(specifications_doc_meets_selection_doc({'something else': 'target0'}, {})) @@ -711,6 +704,111 @@ class SubtaskInputOutputTest(unittest.TestCase): self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3}) self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2}) + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") + def test_bug_TMSS_1124_ingest_cannot_be_scheduled_due_to_unfinished_subtask_input_output_implementation(self, assign_resources_mock): + ''' + See Jira: https://support.astron.nl/jira/browse/TMSS-1124 + This bug seemed to indicate that something is wrong in scheduling ingest tasks. + The root-cause is the fact that currently conversion from task_relations into SubtaskInput/Subtaskoutputs + does not filter for datatype/dataformat. + In this initial version of this test we prove that the bug exists, and then we fix it in a later commit. + ''' + # Setup: create a SchedulingUnitDraft/Blueprint with a BF Obs, Pulp, and Ingest + parent_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + su_draft = models.SchedulingUnitDraft.objects.create(scheduling_set=parent_set, + specifications_template=models.SchedulingUnitTemplate.objects.get(name='scheduling unit', version=1)) + + obs_task_template = models.TaskTemplate.objects.get(name='beamforming observation', version=1) + obs_specifications_doc = obs_task_template.get_default_json_document_for_schema() + obs_specifications_doc['SAPs'][0]['subbands'] = [0,1,2,3] + obs_specifications_doc['beamformers'][0]['flys eye']['enabled']= True + obs_task_draft = models.TaskDraft.objects.create(name='obs', + scheduling_unit_draft=su_draft, + specifications_template=obs_task_template, + specifications_doc=obs_specifications_doc) + + pulp_task_template = models.TaskTemplate.objects.get(name='pulsar pipeline', version=1) + pulp_task_draft = models.TaskDraft.objects.create(name='pulp', + scheduling_unit_draft=su_draft, + specifications_template=pulp_task_template, + specifications_doc=pulp_task_template.get_default_json_document_for_schema()) + + # connect the obs and the pulp pipeline + # it's a single relation of beamformed timeseries data + trsel_template = models.TaskRelationSelectionTemplate.objects.get(name='all', version=1) + trsel_obs_pulp_draft = models.TaskRelationDraft.objects.create(producer=obs_task_draft, + consumer=pulp_task_draft, + output_role=obs_task_template.connector_types.get(iotype__value=models.IOType.Choices.OUTPUT.value, + dataformat__value=models.Dataformat.Choices.BEAMFORMED.value, + datatype__value=models.Datatype.Choices.TIME_SERIES.value, + role__value=models.Role.Choices.BEAMFORMER.value), + input_role=pulp_task_template.connector_types.get(iotype__value=models.IOType.Choices.INPUT.value, + dataformat__value=models.Dataformat.Choices.BEAMFORMED.value, + datatype__value=models.Datatype.Choices.TIME_SERIES.value, + role__value=models.Role.Choices.BEAMFORMER.value), + selection_template=trsel_template, + selection_doc=trsel_template.get_default_json_document_for_schema()) + + ingest_task_template = models.TaskTemplate.objects.get(name='ingest', version=1) + ingest_task_draft = models.TaskDraft.objects.create(name='ingest', + scheduling_unit_draft=su_draft, + specifications_template=ingest_task_template, + specifications_doc=ingest_task_template.get_default_json_document_for_schema()) + + # connect the pulsar pipeline and ingest + # there are two connections, 1) pulp summary, 2) pulsar profile + # this should result in two connections at SubtaskInput/SubtaskOutput level, but it does not (which is the bug) + trsel_pulp_ingest_draft1 = models.TaskRelationDraft.objects.create(producer=pulp_task_draft, + consumer=ingest_task_draft, + output_role=pulp_task_template.connector_types.get(iotype__value=models.IOType.Choices.OUTPUT.value, + dataformat__value=models.Dataformat.Choices.PULP_SUMMARY.value, + datatype__value=models.Datatype.Choices.QUALITY.value, + role__value=models.Role.Choices.ANY.value), + input_role=ingest_task_template.connector_types.get(iotype__value=models.IOType.Choices.INPUT.value, + dataformat__value=models.Dataformat.Choices.PULP_SUMMARY.value, + datatype__value=models.Datatype.Choices.QUALITY.value, + role__value=models.Role.Choices.ANY.value), + selection_template=trsel_template, + selection_doc=trsel_template.get_default_json_document_for_schema()) + + + trsel_pulp_ingest_draft2 = models.TaskRelationDraft.objects.create(producer=pulp_task_draft, + consumer=ingest_task_draft, + output_role=pulp_task_template.connector_types.get(iotype__value=models.IOType.Choices.OUTPUT.value, + dataformat__value=models.Dataformat.Choices.PULP_ANALYSIS.value, + datatype__value=models.Datatype.Choices.PULSAR_PROFILE.value, + role__value=models.Role.Choices.ANY.value), + input_role=ingest_task_template.connector_types.get(iotype__value=models.IOType.Choices.INPUT.value, + dataformat__value=models.Dataformat.Choices.PULP_ANALYSIS.value, + datatype__value=models.Datatype.Choices.PULSAR_PROFILE.value, + role__value=models.Role.Choices.ANY.value), + selection_template=trsel_template, + selection_doc=trsel_template.get_default_json_document_for_schema()) + + # convert to blueprint, ingest_permission_required = False + su_blueprint = create_scheduling_unit_blueprint_and_tasks_and_subtasks_from_scheduling_unit_draft(su_draft) + su_blueprint.ingest_permission_required = False + su_blueprint.save() + + # fetch the created subtasks + obs_subtak = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint=su_blueprint, specifications_template__type__value='observation') + pulp_subtak = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint=su_blueprint, specifications_template__type__value='pipeline') + ingest_subtak = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint=su_blueprint, specifications_template__type__value='ingest') + + # scheduling the observation should succeed and creates the obs output dataproducts + schedule_subtask(obs_subtak) + set_subtask_state_following_allowed_transitions(obs_subtak, 'finished') + + # scheduling the pipeline should succeed and creates the pulp output dataproducts + schedule_subtask(pulp_subtak) + set_subtask_state_following_allowed_transitions(pulp_subtak, 'finished') + + # scheduling ingest should succeed and take all pulp's output_dataproducts as input via two SubtaskInput/Outputs + schedule_subtask(ingest_subtak) + + + + class SAPTest(unittest.TestCase): """ diff --git a/SAS/TMSS/backend/test/t_scheduling_units.py b/SAS/TMSS/backend/test/t_scheduling_units.py index 90415b654f7a4fd18a9901a4e2fbe9b0c04d1012..b5d7eb0e6cc925c535cd607deefd6a10878b7439 100644 --- a/SAS/TMSS/backend/test/t_scheduling_units.py +++ b/SAS/TMSS/backend/test/t_scheduling_units.py @@ -615,6 +615,10 @@ class SchedulingUnitBlueprintIndirectModificationsTestCase(unittest.TestCase): client.schedule_subtask(subtask_pipe['id'], datetime.utcnow()) pipe_input_dataproducts = client.get_subtask_input_dataproducts(subtask_pipe['id']) self.assertTrue(len(pipe_input_dataproducts) > 0) + + # fetch obs_output_dataproducts again, as they "changed" now that the pipeline was scheduled: the contain a reference to a consumer (the subtaskinput of the pipeline) + obs_output_dataproducts = client.get_subtask_output_dataproducts(subtask_obs['id']) + self.assertTrue(len(obs_output_dataproducts) > 0) self.assertEqual(obs_output_dataproducts, pipe_input_dataproducts) # and simulate that it runs and errors @@ -641,6 +645,10 @@ class SchedulingUnitBlueprintIndirectModificationsTestCase(unittest.TestCase): # check the copy's dataproducts copy_subtask_pipe_input_dataproducts = client.get_subtask_input_dataproducts(copy_subtask_pipe['id']) self.assertTrue(len(copy_subtask_pipe_input_dataproducts) > 0) + + # fetch obs_output_dataproducts and pipe_input_dataproducts again, as they "changed" now that the copy-pipeline was scheduled: the contain a reference to a consumer (the subtaskinput of the pipeline) + obs_output_dataproducts = client.get_subtask_output_dataproducts(subtask_obs['id']) + pipe_input_dataproducts = client.get_subtask_input_dataproducts(subtask_pipe['id']) self.assertEqual(pipe_input_dataproducts, copy_subtask_pipe_input_dataproducts) self.assertEqual(obs_output_dataproducts, copy_subtask_pipe_input_dataproducts) diff --git a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py index f784b6a6586a5ad5158776f1bb1373b8613a41bd..cfe84258a01a32591c74ec1d71a520791430eb44 100755 --- a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py +++ b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py @@ -478,8 +478,8 @@ class SubtaskInputTestCase(unittest.TestCase): test_data_creator.wipe_cache() cls.subtask_data = test_data_creator.Subtask() cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/') - cls.task_relation_blueprint_data = test_data_creator.TaskRelationBlueprint() - cls.task_relation_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_relation_blueprint_data, '/task_relation_blueprint/') + cls.task_connector_type_data = test_data_creator.TaskConnectorType() + cls.task_connector_type_url = test_data_creator.post_data_and_get_url(cls.task_connector_type_data, '/task_connector_type/') cls.dataproduct_urls = [test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/'), test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')] cls.subtask_output_data = test_data_creator.SubtaskOutput() cls.subtask_output_url = test_data_creator.post_data_and_get_url(cls.subtask_output_data, '/subtask_output/') @@ -494,7 +494,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_input/1234321/', 404) def test_subtask_input_POST_and_GET(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -502,12 +502,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) def test_subtask_input_PUT_invalid_raises_error(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) PUT_and_assert_expected_response(self, BASE_URL + '/subtask_input/9876789876/', sti_test_data, 404, {}) def test_subtask_input_PUT(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -515,12 +515,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # PUT new values, verify - sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2) GET_OK_and_assert_equal_expected_response(self, url, sti_test_data2) def test_subtask_input_PATCH(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -544,7 +544,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_DELETE(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -560,7 +560,7 @@ class SubtaskInputTestCase(unittest.TestCase): task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'), specifications_template_url=self.subtask_data['specifications_template'], specifications_doc=self.subtask_data['specifications_doc']), '/subtask/') - sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -572,29 +572,10 @@ class SubtaskInputTestCase(unittest.TestCase): # assert item gone GET_and_assert_equal_expected_code(self, url, 404) - def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): - # make new task_relation_blueprint instance, but reuse related data for speed - task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(draft_url=self.task_relation_blueprint_data['draft'], template_url=self.task_relation_blueprint_data['selection_template'], - input_role_url=self.task_relation_blueprint_data['input_role'], output_role_url=self.task_relation_blueprint_data['output_role'], - consumer_url=self.task_relation_blueprint_data['consumer']), '/task_relation_blueprint/') - sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) - - # POST new item, verify - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) - - # DELETE dependency and check it's gone - DELETE_and_assert_gone(self, task_relation_blueprint_url) - - # assert item reference is set null - expected_data = dict(sti_test_data) - expected_data['task_relation_blueprint'] = None - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_subtask_input_PROTECT_behavior_on_producer_deleted(self): # make new subtask_output_url instance, but reuse related data for speed subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask']), '/subtask_output/') - sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, task_relation_selection_template_url=self.task_relation_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, task_relation_selection_template_url=self.task_relation_selection_template_url) # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -611,7 +592,7 @@ class SubtaskInputTestCase(unittest.TestCase): task_relation_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') sti_test_data = test_data_creator.SubtaskInput(task_relation_selection_template_url=task_relation_selection_template_url, subtask_url=self.subtask_url, - task_relation_blueprint_url=self.task_relation_blueprint_url, + input_role_url=self.task_connector_type_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url) diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py index 47978f895f9d8f514165e318c788e20d1e129921..423c148756a1c3a025b31c4fba1edce594ea1a91 100755 --- a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py @@ -3239,6 +3239,18 @@ class SchedulingUnitObservingStrategyTemplateTestCase(unittest.TestCase): class SubmitTriggerTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls) -> None: + # wipe all SchedulingUnitObservingStrategyTemplate and all the referenced models + # because other tests may have submitted erroneous templates which cannot be submitted as trigger. + models.TaskBlueprint.objects.all().delete() + models.TaskDraft.objects.all().delete() + models.SchedulingUnitBlueprint.objects.all().delete() + models.SchedulingUnitDraft.objects.all().delete() + models.SchedulingUnitObservingStrategyTemplate.objects.all().delete() + # populate the templates again from disk which are the ones under test. + tmss_test_env.populate_schemas_and_connectors() + def test_submit_trigger_for_all_strategy_templates(self): '''Try to create a scheduling_unit_draft/blueprint for each known strategy template via a trigger submission.''' project = models.Project.objects.create(**Project_test_data(can_trigger=True)) @@ -3246,11 +3258,8 @@ class SubmitTriggerTestCase(unittest.TestCase): with tmss_test_env.create_tmss_client() as client: for strategy_template in models.SchedulingUnitObservingStrategyTemplate.objects.all(): - if strategy_template.template_doc_complete_with_defaults.get('scheduling_constraints_template', '') != 'constraints': - logger.info("test_submit_trigger_for_all_strategy_templates: skipping template '%s' which has no known scheduling_constraints_template which can be used while dynamic scheduling", strategy_template.name) - continue - - logger.info("test_submit_trigger_for_all_strategy_templates: checking template '%s'", strategy_template.name) + logger.info("--------------------------------------------------------------------------------------------") + logger.info("TEST_SUBMIT_TRIGGER_FOR_ALL_STRATEGY_TEMPLATES: checking template '%s'", strategy_template.name) trigger_doc = client.get_trigger_specification_doc_for_scheduling_unit_observing_strategy_template(strategy_template.name, strategy_template.version) trigger_doc['scheduling_set_id'] = scheduling_set.id diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py index b22cbee7d33ad59ef21254c45e8c3a5b00aa8fae..155e9a982bf7773c466889c61afab2a51fc7ed57 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py @@ -373,7 +373,7 @@ def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.Subtas selection_doc = get_default_json_object_for_schema(selection_template.schema) return {"subtask": subtask, - "task_relation_blueprint": models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()), + "input_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), "producer": producer, "selection_doc": selection_doc, "selection_template": selection_template, diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py index 71bf02695ce6f4256cc44ab2f4435fe76a743020..2965cb508e6192723e43f595e4a7f68dc21d9888 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_rest.py +++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py @@ -765,12 +765,12 @@ class TMSSRESTTestDataCreator(): "corrupted_since": datetime.utcnow().isoformat(), "tags": ['tmss', 'testing']} - def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_output_url=None, task_relation_selection_template_url=None, selection_doc=None): + def SubtaskInput(self, subtask_url=None, input_role_url=None, dataproduct_urls=None, subtask_output_url=None, task_relation_selection_template_url=None, selection_doc=None): if subtask_url is None: subtask_url = self.cached_subtask_url - if task_relation_blueprint_url is None: - task_relation_blueprint_url = self.post_data_and_get_url(self.TaskRelationBlueprint(), '/task_relation_blueprint/') + if input_role_url is None: + input_role_url = self.post_data_and_get_url(self.TaskConnectorType(), '/task_connector_type/') if dataproduct_urls is None: dataproduct_urls = [self.cached_dataproduct_url] @@ -785,7 +785,7 @@ class TMSSRESTTestDataCreator(): selection_doc = self.get_response_as_json_object(task_relation_selection_template_url+'/default') return {"subtask": subtask_url, - "task_relation_blueprint": task_relation_blueprint_url, + "input_role": input_role_url, "producer": subtask_output_url, "dataproducts": dataproduct_urls, "selection_doc": selection_doc, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/DynamicScheduler.js b/SAS/TMSS/frontend/tmss_webapp/src/components/DynamicScheduler.js index 6f7edff8e8e610728aaf09ace07b56a84ac70de4..2679479fdd5e686d47156976f11d374ae0b80b91 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/DynamicScheduler.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/DynamicScheduler.js @@ -43,6 +43,9 @@ export default class DynamicScheduler extends Component { } else { appGrowl.show({severity: 'error', summary: 'Error', detail: 'Dynamic Scheduling is not updated successfully.'}); } + if(this.props.callBack) { + this.props.callBack(this.currentStatus); + } this.setState({dsStatus: this.currentStatus, showDialog: false}); } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js index e24af0a09d3c76465b94d5fef8a7aadb3a704204..93a540826eaab0bcdfa3ef5c167f251207cbdf43 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js @@ -2237,7 +2237,7 @@ function Table(props) { /* Take only the original values passed to the component */ selectedRows = _.map(selectedRows, 'original'); /* Callback the parent function if available to pass the selected records on selection */ - if (parentCBonSelection && selectedRows != selectedClickedRow && selectedRows.length != 0) { + if (parentCBonSelection && selectedRows /* != selectedClickedRow && selectedRows.length != 0 */) { parentCBonSelection(selectedRows) } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js index fdd553470409b4baabaa44d163371107e655918b..a10e7781758b1c70b410114790ede0cc202b0875 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js @@ -44,7 +44,7 @@ export default ({ title, subTitle, actions, ...props}) => { if(action.type === 'button') { return ( <button className="p-link" key={index} title={action.title || ''}> - <i className={`fa ${action.disabled?'fa-disabled':''} ${action.icon}`} + <i className={`fa ${action.disabled?'fa-disabled':''} ${action.icon} ${action.classes}`} onMouseOver={(e) => action.disabled?'':onButtonMouseOver(e, action)} onClick={(e) => action.disabled?'':onButtonClick(e, action)} /> </button> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss index fe83753908167bd00d16a39fc386151ed528d9a8..c30f5159c8d22fa69835e14a9bde55d262bc0dba 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss @@ -479,4 +479,18 @@ body .p-multiselect-panel .p-multiselect-header .p-multiselect-filter-container margin-top: 5px; background-color: #dee0e1; cursor: pointer; +} + +.dynamic-scheduling-off { + i { + color: firebrick !important; + } + cursor: default; +} + +.dynamic-scheduling-on { + i { + color: #26be26 !important; + } + cursor: default; } \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 72822aff42d8294eaee630d059494ca43767a725..f2b7ba1233bd59b02d033d51ab5ef6dbd6666a6a 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -100,6 +100,7 @@ export class TimelineView extends Component { datasetStartTime: null, datasetEndTime:null, showDialog: false, popPosition: {display: 'none'}, + dsStatus: false, // To show the Dynamic Scheduling status in timeline/week view page header isOnSkyView: this.timelineUIAttributes.isOnSkyView || false, // Flag to show on sky view in normal timeline view } this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable']; // Statuses before scheduled to get station_group @@ -136,9 +137,35 @@ export class TimelineView extends Component { this.showReservationBlocks = this.showReservationBlocks.bind(this); this.showDymanicSchedulerPopup = this.showDymanicSchedulerPopup.bind(this); this.close = this.close.bind(this); + this.getDSStatus = this.getDSStatus.bind(this); + this.updateDSStatus = this.updateDSStatus.bind(this); + } + + /** + * Get Dynamic Scheduling status + */ + async getDSStatus() { + await UtilService.getDynamicSchedulerStatus() + .then(response => { + var status = false; + if(response) { + status = response.value; + } + this.setState({dsStatus: status}); + }); + } + + /** + * Set current status when the Dynamic Scheduling On/Off + * + * @param {boolean} status + */ + async updateDSStatus(status) { + this.setState({dsStatus: status}); } async componentDidMount() { + this.getDSStatus(); this.getStatusList(); const permission = await AuthUtil.getUserRolePermission(); const timelinePermission = permission.userRolePermission.timeline; @@ -1344,6 +1371,10 @@ export class TimelineView extends Component { <TieredMenu className="app-header-menu" model={this.state.menuOptions} popup ref={el => this.optionsMenu = el} /> <PageHeader location={this.props.location} title={'Scheduling Units - Timeline View'} actions={[ + { icon: this.state.dsStatus?'fas fa-play-circle':'fas fa-stop-circle', + title: this.state.dsStatus?'Dynamic Scheduling is On':'Dynamic Scheduling is Off', + classname: this.state.dsStatus?'dynamic-scheduling-on':'dynamic-scheduling-off', + type: 'link' }, { icon: 'fa-cog', title: 'Dynamic Scheduling Settings', type: 'button', actOn: 'click', props: { callback: this.showDymanicSchedulerPopup }, }, { icon: 'fa-bars', title: '', @@ -1663,7 +1694,7 @@ export class TimelineView extends Component { {this.state.showDialog && <div className="p-grid" data-testid="confirm_dialog"> <CustomDialog type="success" visible={this.state.showDialog} width="30vw" showIcon={false} - header="Dynamic Scheduling Settings" message={<DynamicScheduler />} + header="Dynamic Scheduling Settings" message={<DynamicScheduler callBack={this.updateDSStatus} />} content={''} actions={ [ {id:"no", title: 'Close', callback: this.close} ]} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js index a2ba1f2ed222b2acbd015e21d00f6b712b332473..fd94361b6ea04199453de3c82a438342ebb813df 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js @@ -80,6 +80,7 @@ export class WeekTimelineView extends Component { showDialog: false, userrole: AuthStore.getState(), popPosition: {display: 'none'}, + dsStatus: false, // To show the Dynamic Scheduling status in timeline/week view page header isOnSkyView: this.timelineUIAttributes.isOnSkyWeekView || false, // Flag to show on sky view in week view } this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable']; // Statuses before scheduled to get station_group @@ -106,9 +107,35 @@ export class WeekTimelineView extends Component { this.updateSchedulingUnit = this.updateSchedulingUnit.bind(this); this.showDymanicSchedulerPopup = this.showDymanicSchedulerPopup.bind(this); this.close = this.close.bind(this); + this.getDSStatus = this.getDSStatus.bind(this); + this.updateDSStatus = this.updateDSStatus.bind(this); + } + + /** + * Get Dynamic Scheduling status + */ + async getDSStatus() { + await UtilService.getDynamicSchedulerStatus() + .then(response => { + var status = false; + if(response) { + status = response.value; + } + this.setState({dsStatus: status}); + }); + } + + /** + * Set current status when the Dynamic Scheduling On/Off + * + * @param {boolean} status + */ + async updateDSStatus(status) { + this.setState({dsStatus: status}); } async componentDidMount() { + this.getDSStatus(); this.getStatusList(); const permission = await AuthUtil.getUserRolePermission(); const timelinePermission = permission.userRolePermission.timeline; @@ -1067,6 +1094,10 @@ export class WeekTimelineView extends Component { <TieredMenu className="app-header-menu" model={this.state.menuOptions} popup ref={el => this.optionsMenu = el} /> <PageHeader location={this.props.location} title={'Scheduling Units - Week View'} actions={[ + { icon: this.state.dsStatus?'fas fa-play-circle':'fas fa-stop-circle', + title: this.state.dsStatus?'Dynamic Scheduling is On':'Dynamic Scheduling is Off', + classname: this.state.dsStatus?'dynamic-scheduling-on':'dynamic-scheduling-off', + type: 'link' }, { icon: 'fa-cog', title: 'Dynamic Scheduling Settings', type: 'button', actOn: 'click', props: { callback: this.showDymanicSchedulerPopup }, }, { icon: 'fa-bars', title: '', type: 'button', actOn: 'mouseOver', props: { callback: this.showOptionMenu }, }, @@ -1273,7 +1304,7 @@ export class WeekTimelineView extends Component { {this.state.showDialog && <div className="p-grid" data-testid="confirm_dialog"> <CustomDialog type="success" visible={this.state.showDialog} width="30vw" showIcon={false} - header="Dynamic Scheduling Settings" message={<DynamicScheduler />} + header="Dynamic Scheduling Settings" message={<DynamicScheduler callBack={this.updateDSStatus} />} content={''} actions={ [ {id:"no", title: 'Close', callback: this.close} ]}