diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py
index da7366fec6ee8b11140ea7121902d273f8f83552..1af1533c1b7d3ee5f54730671268224d64e7be0e 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py
@@ -121,7 +121,7 @@ def _get_completion_level(cycle: models.Cycle, start: datetime, stop: datetime)
     result['total'], result['succeeded'], result['succeeded_perc'] = total, total_succeeded, succeeded_perc
 
     # Calculate prognosis
-    unschedulable_subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint__in=subs).filter(state='unschedulable')
+    unschedulable_subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint__in=subs).filter(state='unschedulable')
     unschedulable_duration = sum([uns.duration.total_seconds() for uns in unschedulable_subtasks])
     result['prognosis'] = round(unschedulable_duration / total, 2) if total > 0 else None
 
@@ -216,11 +216,11 @@ def _get_data_ingested_per_site_and_category(cycle: models.Cycle, start: datetim
 
     # TODO: Currently there is no way to fitler per LTA site.
     # # Get DataProducts related to the cycle with an ArchiveInfo
-    # archive_info = models.DataproductArchiveInfo.objects.filter(dataproduct__producer__subtask__task_blueprints__draft__scheduling_unit_draft__scheduling_set__project__cycles=cycle.pk)
+    # archive_info = models.DataproductArchiveInfo.objects.filter(dataproduct__producer__subtask__task_blueprint__draft__scheduling_unit_draft__scheduling_set__project__cycles=cycle.pk)
     # dataproducts = [ai.dataproduct for ai in archive_info]
 
     # Get DataProducts related to the cycle from Subtasks with state 'finished'
-    dataproducts = models.Dataproduct.objects.filter(producer__subtask__state='finished', producer__subtask__task_blueprints__draft__scheduling_unit_draft__scheduling_set__project__cycles=cycle.pk, producer__subtask__start_time__gte=start, producer__subtask__stop_time__lte=stop)
+    dataproducts = models.Dataproduct.objects.filter(producer__subtask__state='finished', producer__subtask__task_blueprint__draft__scheduling_unit_draft__scheduling_set__project__cycles=cycle.pk, producer__subtask__start_time__gte=start, producer__subtask__stop_time__lte=stop)
 
     # Combine and filter DataProducts accordingly
     dps_interferometric_obs_sizes = dataproducts.filter(producer__subtask__specifications_template__type='observation', dataformat='MeasurementSet', datatype='visibilities').aggregate(Sum('size'))
@@ -380,12 +380,12 @@ def _get_subs_and_durations_from_project(project_pk: int, start: datetime, stop:
             except Exception as e:
                 sub_info['target'] = None
             # Get SAS ID info
-            subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint=sub)
+            subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint=sub)
             sub_info['SAS ID'] = {}
             for template_name in ('observation control', 'preprocessing pipeline', 'pulsar pipeline'):
                 sub_info['SAS ID'][template_name] = [subtask.id for subtask in subtasks.filter(specifications_template__name=template_name)]
             # Get ingest info
-            dataproducts = models.Dataproduct.objects.filter(producer__subtask__specifications_template__type='ingest').filter(producer__subtask__state__value='finished').filter(producer__subtask__task_blueprints__scheduling_unit_blueprint=sub)
+            dataproducts = models.Dataproduct.objects.filter(producer__subtask__specifications_template__type='ingest').filter(producer__subtask__state__value='finished').filter(producer__subtask__task_blueprint__scheduling_unit_blueprint=sub)
             # TODO: Maybe it would be useful to implement an 'ingested_stop_time' (and even an 'ingested_start_time' and consequently an 'ingested_duration'?)
             sub_info['ingested_date'] = max(dataproducts, key=lambda x: x.producer.subtask.stop_time).producer.subtask.stop_time if dataproducts else None
             sub_info['ingested_data_size'] = dataproducts.aggregate(Sum('size'))['size__sum']
@@ -429,7 +429,7 @@ def _get_lta_dataproducts(project_name: str, start: datetime, stop: datetime) ->
     # Query dataproducts from Subtasks of type 'ingest' within 'finished' status
     dataproducts = models.Dataproduct.objects.filter(producer__subtask__specifications_template__type='ingest') \
         .filter(producer__subtask__state__value='finished') \
-        .filter(producer__subtask__task_blueprints__draft__scheduling_unit_draft__scheduling_set__project__name=project_name)
+        .filter(producer__subtask__task_blueprint__draft__scheduling_unit_draft__scheduling_set__project__name=project_name)
     # Filter basing on date interval if passed
     dataproducts = dataproducts.filter(producer__subtask__start_time__gte=start) if start else dataproducts
     dataproducts = dataproducts.filter(producer__subtask__stop_time__lte=stop) if stop else dataproducts
@@ -446,7 +446,7 @@ def _get_saps_exposure(project_pk: int, start: datetime, stop: datetime) -> {}:
     tasks = models.TaskBlueprint.objects.filter(scheduling_unit_blueprint__draft__scheduling_set__project=project_pk)
     for task in tasks:
         # Get subtasks and filter basing on date interval if passed
-        subtasks = models.Subtask.objects.filter(task_blueprints=task, specifications_template__type='observation')
+        subtasks = models.Subtask.objects.filter(task_blueprint=task, specifications_template__type='observation')
         subtasks = subtasks.filter(start_time__gte=start) if start else subtasks
         subtasks = subtasks.filter(stop_time__lte=stop) if stop else subtasks
         # Get SAPs
diff --git a/SAS/TMSS/backend/test/t_adapter.py b/SAS/TMSS/backend/test/t_adapter.py
index 488b9a3bc18400ce6477d5667fd62df4d0fbdf41..9bc66d5f4ba74f3ba33425894d63e82aa914da19 100755
--- a/SAS/TMSS/backend/test/t_adapter.py
+++ b/SAS/TMSS/backend/test/t_adapter.py
@@ -77,9 +77,8 @@ class ObservationParsetAdapterTest(unittest.TestCase):
 
     def create_subtask(self, specifications_doc):
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
-        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
+        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc, task_blueprint=models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()))
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
-        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
         return subtask
@@ -117,7 +116,7 @@ class ObservationParsetAdapterTest(unittest.TestCase):
         specifications_doc = self.get_default_specifications()
         subtask = self.create_subtask(specifications_doc)
         parset = convert_to_parset_dict(subtask)
-        sub = [tb.scheduling_unit_blueprint for tb in subtask.task_blueprints.all()][0]
+        sub = subtask.task_blueprint.scheduling_unit_blueprint
 
         # Assert the values are the same of the scheduling_unit_blueprint
         self.assertEqual(sub.piggyback_allowed_aartfaac, parset["ObservationControl.StationControl.aartfaacPiggybackAllowed"])
@@ -233,10 +232,9 @@ class PulsarPipelineParsetAdapterTest(unittest.TestCase):
         subtask_template = models.SubtaskTemplate.objects.get(name='pulsar pipeline')
         specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema)
 
-        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
+        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc, task_blueprint=models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()))
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
 
-        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
         return subtask
@@ -269,9 +267,8 @@ class SIPadapterTest(unittest.TestCase):
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
         # Create SubTask(output)
-        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
+        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc, task_blueprint=models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()))
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
-        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         # Create Dataproduct
         dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
@@ -316,9 +313,8 @@ class SIPadapterTest(unittest.TestCase):
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
         # Create SubTask(output)
-        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
+        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc, task_blueprint=models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()))
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
-        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         # Create Dataproduct
         dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output,
@@ -385,9 +381,8 @@ class SIPadapterTest(unittest.TestCase):
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
         # Create SubTask(output)
-        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
-        subtask: models.Subtask = models.Subtask.objects.create(**subtask_data)
-        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
+        subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc, task_blueprint=models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()))
+        subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         # Create Dataproduct
         dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output,
@@ -512,8 +507,7 @@ class CycleReportTest(unittest.TestCase):
         tb = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(task_draft=cls.projects_components[project_name]['task_draft'], scheduling_unit_blueprint=sub))
         # Create Subtask
         subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data(subtask_type_value=subtask_type))
-        subtask = models.Subtask.objects.create(**Subtask_test_data(subtask_template=subtask_template))
-        subtask.task_blueprints.set([tb])
+        subtask = models.Subtask.objects.create(**Subtask_test_data(subtask_template=subtask_template, task_blueprint=tb))
 
         if status:
             set_subtask_state_following_allowed_transitions(subtask, status)
@@ -661,8 +655,7 @@ class ProjectReportTest(unittest.TestCase):
         tb = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(task_draft=self.task_draft, scheduling_unit_blueprint=sub))
         # Create Subtask of type 'ingest'
         subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data(subtask_type_value='ingest'))
-        subtask = models.Subtask.objects.create(**Subtask_test_data(subtask_template=subtask_template))
-        subtask.task_blueprints.set([tb])
+        subtask = models.Subtask.objects.create(**Subtask_test_data(subtask_template=subtask_template, task_blueprint=tb))
 
         if status:
             set_subtask_state_following_allowed_transitions(subtask, status)
diff --git a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
index 1f4dbb16b5f032ef5fd02dc89eb45876c96532c6..d9fa22bf3c0a1ae25d538cdcf06421e8dfecfc2a 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
@@ -263,6 +263,7 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase):
 class SubtaskTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
+        test_data_creator.wipe_cache()
         # we should not depend on "previous" data
         models.SubtaskInput.objects.all().delete()
         models.DataproductHash.objects.all().delete()
@@ -285,7 +286,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask/1234321/', 404)
 
     def test_subtask_POST_and_GET(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -298,13 +299,13 @@ class SubtaskTestCase(unittest.TestCase):
         self.assertGreaterEqual(int(subtask_id), minimium_subtaskid)
 
     def test_subtask_PUT_invalid_raises_error(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/subtask/9876789876/', st_test_data, 404, {})
 
     def test_subtask_PUT(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
-        st_test_data2 = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data2 = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -316,7 +317,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, url, st_test_data2)
 
     def test_subtask_PATCH(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -332,7 +333,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, url, expected_data)
 
     def test_subtask_DELETE(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -343,10 +344,10 @@ class SubtaskTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_subtask_PROTECT_behavior_on_state_choice_deleted(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # create dependency that is safe to delete (enums are not populated / re-established between tests)
-        state_data = {'value': 'kickme'}
+        state_data = {'value': 'defining'}
         POST_and_assert_expected_response(self, BASE_URL + '/subtask_state/', state_data, 201, state_data)
         state_url =  BASE_URL + '/subtask_state/kickme'
 
@@ -369,25 +370,25 @@ class SubtaskTestCase(unittest.TestCase):
                                                         template_url=self.task_blueprint_data['specifications_template'],
                                                         scheduling_unit_blueprint_url=self.task_blueprint_data['scheduling_unit_blueprint'])
         task_blueprint_url = test_data_creator.post_data_and_get_url(tbp_test_data, '/task_blueprint/')
-        st_test_data = test_data_creator.Subtask(task_blueprint_urls=[task_blueprint_url], cluster_url=self.cluster_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(task_blueprint_url=task_blueprint_url, cluster_url=self.cluster_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
         GET_OK_and_assert_equal_expected_response(self, url, st_test_data)
 
-        # DELETE dependency and check it's gone
-        DELETE_and_assert_gone(self, task_blueprint_url)
+        # Try to DELETE dependency, verify that was not successful
+        # Unfortunately we don't get a nice error in json, but a Django debug page on error 500...
+        response = requests.delete(task_blueprint_url, auth=AUTH)
+        self.assertEqual(500, response.status_code)
+        self.assertTrue("ProtectedError" in str(response.content))
+        GET_OK_and_assert_equal_expected_response(self, url, st_test_data)
 
-        # assert item reference is set null
-        expected_data = dict(st_test_data)
-        expected_data['task_blueprint'] = None
-        GET_OK_and_assert_equal_expected_response(self, url, expected_data)
 
     def test_subtask_PROTECT_behavior_on_template_deleted(self):
         stt_test_data = test_data_creator.SubtaskTemplate()
         expected_data = test_data_creator.update_schema_from_template("subtasktemplate", stt_test_data)
         specifications_template_url = test_data_creator.post_data_and_get_url(stt_test_data, '/subtask_template/')
-        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url, cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url])
+        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url, cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
@@ -421,7 +422,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 1})
 
         # PATCH item with state update and verify log record is created
-        test_patch = {"state": BASE_URL + "/subtask_state/finishing"}
+        test_patch = {"state": BASE_URL + "/subtask_state/defined"}
         PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch)
         GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 2})
 
@@ -429,6 +430,8 @@ class SubtaskTestCase(unittest.TestCase):
 class DataproductTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
+        test_data_creator.wipe_cache()
+
         cls.specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/dataproduct_specifications_template/')
         cls.subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(), '/subtask_output/')
         cls.dataproduct_feedback_template_url = test_data_creator.post_data_and_get_url(test_data_creator.DataproductFeedbackTemplate(), '/dataproduct_feedback_template/')
@@ -534,6 +537,7 @@ class DataproductTestCase(unittest.TestCase):
 class SubtaskInputTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
+        test_data_creator.wipe_cache()
         cls.subtask_data = test_data_creator.Subtask()
         cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/')
         cls.task_relation_blueprint_data = test_data_creator.TaskRelationBlueprint()
@@ -587,8 +591,8 @@ class SubtaskInputTestCase(unittest.TestCase):
 
         # make new subtask_url instance, but reuse related data for speed
         subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
-                                                                                        task_blueprint_urls=[self.subtask_data['task_blueprint']],
-                                                                                        specifications_template_urls=self.subtask_data['specifications_template'],
+                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
+                                                                                        specifications_template_url=self.subtask_data['specifications_template'],
                                                                                         specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
         test_patch = {"subtask": subtask_url,
                       "tags": ['FANCYTAG'],
@@ -614,7 +618,7 @@ class SubtaskInputTestCase(unittest.TestCase):
     def test_subtask_input_CASCADE_behavior_on_subtask_deleted(self):
         # make new subtask_url instance, but reuse related data for speed
         subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
-                                                                                        task_blueprint_urls=[self.subtask_data['task_blueprint']],
+                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
                                                                                         specifications_template_url=self.subtask_data['specifications_template'],
                                                                                         specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
         sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url)
@@ -859,6 +863,7 @@ class AntennaSetTestCase(unittest.TestCase):
 class DataproductTransformTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
+        test_data_creator.wipe_cache()
         cls.input_dataproduct_data = test_data_creator.Dataproduct()
         cls.input_dataproduct_url = test_data_creator.post_data_and_get_url(cls.input_dataproduct_data, '/dataproduct/')
         cls.output_dataproduct_data = test_data_creator.Dataproduct()
@@ -1123,6 +1128,7 @@ class ClusterTestCase(unittest.TestCase):
 class DataproductHashTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
+        test_data_creator.wipe_cache()
         cls.dataproduct_url = test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')
 
     def test_dataproduct_hash_list_apiformat(self):
@@ -1224,6 +1230,9 @@ class DataproductHashTestCase(unittest.TestCase):
 
 
 class DataproductArchiveInfoTestCase(unittest.TestCase):
+    def setUp(self) -> None:
+        test_data_creator.wipe_cache()
+
     def test_dataproduct_archive_info_list_apiformat(self):
         r = requests.get(BASE_URL + '/dataproduct_archive_info/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1354,7 +1363,7 @@ class SubtaskQueryTestCase(unittest.TestCase):
             start_time = datetime.now() + timedelta(hours=2, days=day_idx)
             stop_time = datetime.now() + timedelta(hours=4, days=day_idx)
             test_data_creator.post_data_and_get_url(test_data_creator.Subtask(start_time=start_time, stop_time=stop_time,
-                                                                              cluster_url=cluster_url, task_blueprint_urls=[task_blueprint_url]), '/subtask/')
+                                                                              cluster_url=cluster_url, task_blueprint_url=task_blueprint_url), '/subtask/')
 
     subtasks_test_data_with_start_stop_time = {'clusterB': 50, 'clusterC': 30 }
 
diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
index f5c06e0742ff4a7488cb669f92bfed93bc69bb9c..60553c4230c5e38877f4b2728c02f752ead23ac7 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
@@ -407,7 +407,7 @@ def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.Subtas
 
 def Subtask_test_data(subtask_template: models.SubtaskTemplate=None,
                       specifications_doc: dict=None, start_time=None, stop_time=None, cluster=None, state=None,
-                      raw_feedback=None) -> dict:
+                      raw_feedback=None, task_blueprint: models.TaskBlueprint=None) -> dict:
 
     if subtask_template is None:
         subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data())
@@ -432,7 +432,7 @@ def Subtask_test_data(subtask_template: models.SubtaskTemplate=None,
              "stop_time": stop_time,
              "state": state,
              "specifications_doc": specifications_doc,
-             #"task_blueprint": task_blueprint,  # ManyToMany, use set()
+             "task_blueprint": task_blueprint,
              "specifications_template": subtask_template,
              "tags": ["TMSS", "TESTING"],
              "cluster": cluster,
diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py
index ad598429c8d0f77db2634798ec6ed28c8974d234..47b33d858b0bce535d4bff676981a97ab6dfa464 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py
@@ -645,12 +645,12 @@ class TMSSRESTTestDataCreator():
             return self._cluster_url
 
 
-    def Subtask(self, cluster_url=None, task_blueprint_urls=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedback:str =None):
+    def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedback:str =None):
         if cluster_url is None:
             cluster_url = self.cached_cluster_url
     
-        if task_blueprint_urls is None:
-            task_blueprint_urls = [self.cached_task_blueprint_url]
+        if task_blueprint_url is None:
+            task_blueprint_url = self.cached_task_blueprint_url
     
         if specifications_template_url is None:
             specifications_template_url = self.cached_subtask_template_url
@@ -674,7 +674,7 @@ class TMSSRESTTestDataCreator():
                 "stop_time": stop_time,
                 "state": self.django_api_url + '/subtask_state/%s' % (state,),
                 "specifications_doc": specifications_doc,
-                "task_blueprints": task_blueprint_urls,
+                "task_blueprint": task_blueprint_url,
                 "specifications_template": specifications_template_url,
                 "tags": ["TMSS", "TESTING"],
                 "cluster": cluster_url,
@@ -708,13 +708,16 @@ class TMSSRESTTestDataCreator():
             self._subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/')
             return self._subtask_output_url
 
-    def Dataproduct(self, filename="my_filename", directory="/tmp/",
+    def Dataproduct(self, filename="my_filename", directory:str=None,
                     specifications_doc=None, specifications_template_url=None,
                     subtask_output_url=None,
                     dataproduct_feedback_doc=None, dataproduct_feedback_template_url=None,
                     dataformat="MeasurementSet", datatype="visibilities",
                     sap_url=None):
 
+        if directory is None:
+            directory = '/tmp/%s/' % uuid.uuid4()
+
         if specifications_template_url is None:
             specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/')