diff --git a/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
index 6ed29be0f7ab44d05cb9086c9ccc26d4e5b65ee7..251ec28c032c4710d275a10172f6e992c2b7e197 100644
--- a/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
+++ b/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
@@ -184,14 +184,15 @@ class TMSSPGListener(PostgresListener):
         # ... and also send status change and object update events for the parent task, and schedulingunit,
         # because their status is implicitly derived from their subtask(s)
         # send both object.updated and status change events
-        self.onTaskBlueprintUpdated( {'id': subtask.task_blueprint.id})
-        self._sendNotification(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.'+subtask.task_blueprint.status.capitalize(),
-                               {'id': subtask.task_blueprint.id, 'status': subtask.task_blueprint.status})
-
-        self.onSchedulingUnitBlueprintUpdated( {'id': subtask.task_blueprint.scheduling_unit_blueprint.id})
-        self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.'+subtask.task_blueprint.scheduling_unit_blueprint.status.capitalize(),
-                               {'id': subtask.task_blueprint.scheduling_unit_blueprint.id, 'status': subtask.task_blueprint.scheduling_unit_blueprint.status})
-                
+        for td in subtask.task_blueprints.all():
+            self.onTaskBlueprintUpdated( {'id': td.id})
+            self._sendNotification(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.'+td.status.capitalize(),
+                                   {'id': std.id, 'status': td.status})
+
+            self.onSchedulingUnitBlueprintUpdated( {'id': td.scheduling_unit_blueprint.id})
+            self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.'+td.scheduling_unit_blueprint.status.capitalize(),
+                                   {'id': td.scheduling_unit_blueprint.id, 'status': td.scheduling_unit_blueprint.status})
+
 
     def onTaskBlueprintInserted(self, payload = None):
         self._sendNotification(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Created', payload)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
index f78dbf0b456d53211c9006711637e94ff63d47f2..2e4d36cb4e1dcbe6eb2887a1f22da4a6c3ce959f 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
@@ -46,7 +46,7 @@ def _convert_to_parset_dict_for_observationcontrol_schema(subtask: models.Subtas
     parset["Observation.tmssID"] = subtask.pk
     parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize()
     parset["Observation.processSubtype"] = "Beam Observation"
-    parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name
+    parset["Observation.Campaign.name"] = subtask.task_blueprints.first().scheduling_unit_blueprint.draft.scheduling_set.project.name  # todo: first?
     parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else subtask.start_time
     parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else subtask.stop_time
     parset["Observation.VirtualInstrument.minimalNrStations"] = 1  # maybe not mandatory?
@@ -167,8 +167,8 @@ def _convert_to_parset_dict_for_pipelinecontrol_schema(subtask: models.Subtask)
     parset["Observation.processSubtype"] = "Averaging Pipeline"
     parset["Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py"
     parset["Observation.ObservationControl.PythonControl.softwareVersion"] = ""
-    parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name
-    parset["Observation.Scheduler.taskName"] = subtask.task_blueprint.name
+    parset["Observation.Campaign.name"] = subtask.task_blueprints.first().scheduling_unit_blueprint.draft.scheduling_set.project.name  # todo: first?
+    parset["Observation.Scheduler.taskName"] = subtask.task_blueprints.first().name   # todo: first?
     parset["Observation.Scheduler.predecessors"] = []
     parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name
     parset["Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
index 76e057c97456f40d5b35c670c27f1f60d9d88ccc..9563790f65f2396584ca5ff8ed81528e52d05a68 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
@@ -161,7 +161,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
                                          process_map=process_map,
                                          channelwidth_frequency=None, # NA any more ('BlueGene compatibility' see comment in LTA-SIP.xsd)
                                          channelwidth_frequencyunit=constants.FREQUENCYUNIT_HZ,  # fixed
-                                         observationdescription=subtask.task_blueprint.description,
+                                         observationdescription=subtask.task_blueprints.first().description,   # todo: first?
                                          channelspersubband=0,  # NA any more ('BlueGene compatibility' see comment in LTA-SIP.xsd)
                                          subarraypointings=subarraypointings,
                                          transientbufferboardevents=None  # fixed
@@ -176,7 +176,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
             raise TMSSException("There seems to be no subtask input associated to your pipeline subtask id %s. Please define what data the pipeline processed." % subtask.id)
 
         pipeline_map = siplib.PipelineMap(
-                name=subtask.task_blueprint.name,
+                name=subtask.task_blueprints.first().name,   # todo: first?
                 version='unknown',  # todo from subtask.specifications_doc? from feedback (we have feedback and storagewriter versions there, not pipeline version or sth)?
                 sourcedata_identifiers=sourcedata_identifiers,
                 process_map=process_map)
@@ -187,8 +187,8 @@ def create_sip_representation_for_subtask(subtask: Subtask):
                 numberofcorrelateddataproducts=get_number_of_dataproducts_of_type(subtask, Dataformat.Choices.MEASUREMENTSET.value),
                 frequencyintegrationstep=subtask.specifications_doc.get('demixer',{}).get('frequency_steps', 0),
                 timeintegrationstep=subtask.specifications_doc.get('demixer',{}).get('time_step', 0),
-                flagautocorrelations=subtask.task_blueprint.specifications_doc["flag"]["autocorrelations"],
-                demixing=True if 'demix' in subtask.task_blueprint.specifications_doc else False
+                flagautocorrelations=subtask.task_blueprints.first().specifications_doc["flag"]["autocorrelations"],     # todo: first?
+                demixing=True if 'demix' in subtask.task_blueprints.first().specifications_doc else False    # todo: first?
             )
         # todo: distinguish and create other pipeline types. Probably most of these can be filled in over time as needed,
         #  but they are not required for UC1. Here are stubs to start from for the other types the LTA supports:
@@ -278,7 +278,7 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_type))
 
     try:
-        dataproduct_fileformat = fileformat_map[dataproduct.producer.subtask.task_blueprint.consumed_by.first().dataformat.value] # todo same as with type? Why is this not with the data? Why is this so different from the LTA datamodel?
+        dataproduct_fileformat = fileformat_map[dataproduct.producer.subtask.task_blueprints.first().consumed_by.first().dataformat.value] # todo same as with type? Why is this not with the data? Why is this so different from the LTA datamodel?     # todo: first?
     except Exception as err:
         dataproduct_fileformat = constants.FILEFORMATTYPE_UNDOCUMENTED
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_fileformat))
@@ -463,7 +463,7 @@ def generate_sip_for_dataproduct(dataproduct: Dataproduct) -> siplib.Sip:
         sip_dataproduct = create_sip_representation_for_dataproduct(dataproduct)
 
     # Gather project details
-    project = dataproduct.producer.subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project
+    project = dataproduct.producer.subtask.task_blueprints.first().scheduling_unit_blueprint.draft.scheduling_set.project     # todo: first?
     project_code = project.name
     project_primaryinvestigator = 'project_primaryinvestigator'
     project_contactauthor = 'project_contactauthor'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
index 5d651ab5ebf0e0be93898d158eb1201bf02c3b11..9451ac7618f5e866bfb8388aa7d665ce0596195b 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
@@ -147,7 +147,7 @@ class Subtask(BasicCommon):
     stop_time = DateTimeField(null=True, help_text='Stop this subtask at the specified time (NULLable).')
     state = ForeignKey('SubtaskState', null=False, on_delete=PROTECT, related_name='task_states', help_text='Subtask state (see Subtask State Machine).')
     specifications_doc = JSONField(help_text='Final specifications, as input for the controller.')
-    task_blueprints = ManyToManyField('TaskBlueprint', related_name='subtasks', null=True, help_text='Task Blueprint to which this Subtask belongs.')
+    task_blueprints = ManyToManyField('TaskBlueprint', related_name='subtasks', blank=True, help_text='Task Blueprint to which this Subtask belongs.')
     specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.')
     do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
     cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
@@ -174,11 +174,11 @@ class Subtask(BasicCommon):
         '''get the specified (or estimated) duration of this subtask based on the specified task duration and the subtask type'''
         if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
             # observations have a specified duration, so grab it from the spec.
-            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', 0))
+            return timedelta(seconds=self.task_blueprints.first().specifications_doc.get('duration', 0))   # todo: first?
 
         if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
             # pipelines usually do not have a specified duration, so make a guess (half the obs duration?).
-            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2))
+            return timedelta(seconds=self.task_blueprints.first().specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2))   # todo: first?
 
         # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now)
         return timedelta(minutes=5)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
index a242101c40860dbf1a76f7af4479ca5c9d856ff3..e6d77be9dace6efd97631860d011e8bf6f9e4098 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
@@ -599,7 +599,7 @@ def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingU
     for task_blueprint in scheduling_unit.task_blueprints.all():
         defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all()
         for subtask in defined_independend_subtasks:
-            update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time)
+            update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprints.first().relative_start_time)   # todo: first?
 
 
 def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime):
@@ -617,9 +617,9 @@ def shift_successors_until_after_stop_time(subtask: Subtask):
 
         # ... but adjust it if there is a scheduling_relation with an offset.
         # so, check if these successive subtasks have different task_blueprint parents
-        if subtask.task_blueprint.id != successor.task_blueprint.id:
-            relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprint, second=successor.task_blueprint) |
-                         TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprint, second=subtask.task_blueprint)).all()
+        if subtask.task_blueprints.first().id != successor.task_blueprints.first().id:    # todo: first?
+            relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprints.first(), second=successor.task_blueprints.first()) |         # todo: first?
+                         TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprints.first(), second=subtask.task_blueprints.first())).all()    # todo: first?
             if relations:
                 # there should be only one scheduling relation between the tasks
                 relation = relations[0]
@@ -983,7 +983,7 @@ def schedule_observation_subtask(observation_subtask: Subtask):
     dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty")
     subtask_output = observation_subtask.outputs.first() # TODO: make proper selection, not default first()
     directory = "/data/%s/%s/L%s/uv" % ("projects" if isProductionEnvironment() else "test-projects",
-                                        observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name,
+                                        observation_subtask.task_blueprints.first().scheduling_unit_blueprint.draft.scheduling_set.project.name,    # todo: first?
                                         observation_subtask.id)
 
     for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings']):
@@ -1146,7 +1146,7 @@ def schedule_ingest_subtask(ingest_subtask: Subtask):
     ingest_subtask.save()
 
     # check permission pre-requisites
-    scheduling_unit_blueprint = ingest_subtask.task_blueprint.scheduling_unit_blueprint
+    scheduling_unit_blueprint = ingest_subtask.task_blueprints.first().scheduling_unit_blueprint    # todo: first?
     if scheduling_unit_blueprint.ingest_permission_required:
         if scheduling_unit_blueprint.ingest_permission_granted_since is None or scheduling_unit_blueprint.ingest_permission_granted_since > datetime.utcnow():
             raise SubtaskSchedulingException("Cannot schedule ingest subtask id=%d because it requires explicit permission and the permission has not been granted (yet)" % (ingest_subtask.pk,))
diff --git a/SAS/TMSS/backend/test/t_scheduling.py b/SAS/TMSS/backend/test/t_scheduling.py
index 82da39dc54ffcaec9ee4bbb698eb4030c4586197..115d918a25e975ef8539ff09d63bc67f8bc13fda 100755
--- a/SAS/TMSS/backend/test/t_scheduling.py
+++ b/SAS/TMSS/backend/test/t_scheduling.py
@@ -71,8 +71,10 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(specifications_template=models.TaskTemplate.objects.get(name='target observation' if subtask_type_value=='observation' else 'preprocessing pipeline')))
     subtask_template_obj = models.SubtaskTemplate.objects.get(name="%s control" % subtask_type_value)
     subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value)
-    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj, task_blueprint=task_blueprint)
-    return models.Subtask.objects.create(**subtask_data)
+    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj)
+    subtask = models.Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([task_blueprint])
+    return subtask
 
 
 def create_reserved_stations_for_testing(station_list):
@@ -124,7 +126,7 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow()+timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
@@ -155,7 +157,7 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
@@ -192,7 +194,7 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
@@ -228,7 +230,7 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow()+timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']),
@@ -254,7 +256,7 @@ class SchedulingTest(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=obs_task_blueprint['url'])
+                                                         task_blueprint_urls=[obs_task_blueprint['url']])
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
             obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
             test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
@@ -269,7 +271,7 @@ class SchedulingTest(unittest.TestCase):
 
             pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'],
                                                           specifications_doc=pipe_spec,
-                                                          task_blueprint_url=pipe_task_blueprint['url'],
+                                                          task_blueprint_urls=[pipe_task_blueprint['url']],
                                                           cluster_url=cluster_url)
             pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/')
 
@@ -298,7 +300,7 @@ class SchedulingTest(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+                                                         task_blueprint_urls=[test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')])
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
             obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
             test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
@@ -310,7 +312,7 @@ class SchedulingTest(unittest.TestCase):
 
             ingest_subtask_data = test_data_creator.Subtask(specifications_template_url=ingest_subtask_template['url'],
                                                           specifications_doc=ingest_spec,
-                                                          task_blueprint_url=obs_subtask['task_blueprint'],
+                                                          task_blueprint_urls=obs_subtask['task_blueprints'],  # todo [0]?
                                                           cluster_url=cluster_url)
             ingest_subtask = test_data_creator.post_data_and_get_response_as_json_object(ingest_subtask_data, '/subtask/')
 
@@ -322,7 +324,7 @@ class SchedulingTest(unittest.TestCase):
                 client.set_subtask_status(predecessor['id'], 'finished')
             client.set_subtask_status(ingest_subtask['id'], 'defined')
 
-            task_blueprint = client.get_url_as_json_object(ingest_subtask['task_blueprint'])
+            task_blueprint = client.get_url_as_json_object(ingest_subtask['task_blueprints'][0])   # todo: [0]?
             schedulingunit_blueprint = client.get_url_as_json_object(task_blueprint['scheduling_unit_blueprint'])
 
             # first, make sure we need but do not have ingest persmission...
@@ -472,7 +474,7 @@ class SAPTest(unittest.TestCase):
             subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'],
                                                      specifications_doc=spec,
                                                      cluster_url = cluster_url,
-                                                     task_blueprint_url=task_blueprint['url'],
+                                                     task_blueprint_urls=[task_blueprint['url']],
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
                                                      stop_time=datetime.utcnow() + timedelta(minutes=15))
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
diff --git a/SAS/TMSS/backend/test/t_subtasks.py b/SAS/TMSS/backend/test/t_subtasks.py
index 88394f62284eb35fc3efd03adcbdfce2fc789557..529a448aaa1c5391954504a6bc3969c082f7e4c2 100755
--- a/SAS/TMSS/backend/test/t_subtasks.py
+++ b/SAS/TMSS/backend/test/t_subtasks.py
@@ -50,7 +50,9 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     subtask_template_obj = create_subtask_template_for_testing(template_type)
     subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value)
     subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj)
-    return models.Subtask.objects.create(**subtask_data)
+    subtask = models.Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
+    return subtask
 
 
 def create_subtask_template_for_testing(template_type: object):
diff --git a/SAS/TMSS/backend/test/test_utils.py b/SAS/TMSS/backend/test/test_utils.py
index afed5e6e43fa3e19e5b836c5f9e37657184bda9a..8763c93a9dcf24126cd76773d0fd4e3d877667a9 100644
--- a/SAS/TMSS/backend/test/test_utils.py
+++ b/SAS/TMSS/backend/test/test_utils.py
@@ -697,7 +697,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
             self.create_output_dataproducts = create_output_dataproducts
 
         def need_to_handle(self, subtask: models.Subtask) -> bool:
-            if subtask.task_blueprint.scheduling_unit_blueprint.id != self.scheduling_unit_blueprint_id:
+            if subtask.task_blueprints.first().scheduling_unit_blueprint.id != self.scheduling_unit_blueprint_id:    # todo: first?
                 return False
 
             if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value and not self.handle_observations:
@@ -826,7 +826,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
                         output_dp.feedback_doc = feedback_doc
                         output_dp.save()
                 elif subtask.specifications_template.type.value == models.SubtaskType.Choices.INGEST.value:
-                    project_name = subtask.task_blueprint.draft.scheduling_unit_draft.scheduling_set.project.name
+                    project_name = subtask.task_blueprints.first().draft.scheduling_unit_draft.scheduling_set.project.name    # todo: first?
 
                     for output_dp in subtask.output_dataproducts:
                         try:
diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
index 616298e418636d097ac59dfdd4c0ff5acb80d0a0..757c0857371231f6e97a8080c1d06c015c9cda3b 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
@@ -361,11 +361,15 @@ def DataproductFeedbackTemplate_test_data() -> dict:
             "schema": minimal_json_schema(),
             "tags": ["TMSS", "TESTING"]}
 
-def SubtaskOutput_test_data(subtask: models.Subtask=None) -> dict:
+def SubtaskOutput_test_data(subtask: models.Subtask=None, task_blueprint: models.TaskBlueprint=None) -> dict:
     if subtask is None:
         subtask = models.Subtask.objects.create(**Subtask_test_data())
 
+    if task_blueprint is None:
+        task_blueprint = models. TaskBlueprint.objects.create(**TaskBlueprint_test_data(()))
+
     return {"subtask": subtask,
+            "task_blueprint": task_blueprint,
             "tags":[]}
 
 def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.SubtaskOutput=None, selection_doc=None, selection_template: models.TaskRelationSelectionTemplate=None) -> dict:
diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py
index e485e8f47ce746c0b63e08193e2dd61c501d6eb8..2ed92c329f7f23028e1f0a2b3bfab6ff47f760bb 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py
@@ -612,12 +612,12 @@ class TMSSRESTTestDataCreator():
             return self._cluster_url
 
 
-    def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedack:str =None):
+    def Subtask(self, cluster_url=None, task_blueprint_urls=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedack:str =None):
         if cluster_url is None:
             cluster_url = self.cached_cluster_url
     
-        if task_blueprint_url is None:
-            task_blueprint_url = self.cached_task_blueprint_url
+        if task_blueprint_urls is None:
+            task_blueprint_urls = [self.cached_task_blueprint_url]
     
         if specifications_template_url is None:
             specifications_template_url = self.cached_subtask_template_url
@@ -641,7 +641,7 @@ class TMSSRESTTestDataCreator():
                 "stop_time": stop_time,
                 "state": self.django_api_url + '/subtask_state/%s' % (state,),
                 "specifications_doc": specifications_doc,
-                "task_blueprint": task_blueprint_url,
+                "task_blueprints": task_blueprint_urls,
                 "specifications_template": specifications_template_url,
                 "tags": ["TMSS", "TESTING"],
                 "do_cancel": datetime.utcnow().isoformat(),