From e51d3843eed4a724ac0196e15d621c2e1895c974 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=B6rn=20K=C3=BCnsem=C3=B6ller?=
 <jkuensem@physik.uni-bielefeld.de>
Date: Thu, 16 May 2019 17:14:23 +0000
Subject: [PATCH] LEI-86: Process review comments

---
 .gitattributes                                |  2 +-
 ...515_1501.py => 0002_auto_20190516_1607.py} | 30 ++++----
 .../lsmr/lsmrapp/migrations/0003_populate.py  |  2 +-
 .../src/lsmr/lsmrapp/models/scheduling.py     | 20 ++---
 .../src/lsmr/lsmrapp/models/specification.py  | 10 +--
 .../src/templates/react_jsonschema_form.html  |  2 +-
 SAS/LSMR/test/CMakeLists.txt                  |  5 +-
 SAS/LSMR/test/t_lsmrapp_scheduling_django.py  | 74 +++++++++++--------
 .../test/t_lsmrapp_scheduling_functional.py   | 58 ++++++++-------
 SAS/LSMR/test/test_utils.py                   |  4 +
 10 files changed, 115 insertions(+), 92 deletions(-)
 rename SAS/LSMR/src/lsmr/lsmrapp/migrations/{0002_auto_20190515_1501.py => 0002_auto_20190516_1607.py} (99%)

diff --git a/.gitattributes b/.gitattributes
index 36bed1b47e9..41d83f28478 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -4252,7 +4252,7 @@ SAS/LSMR/src/lsmr/lsmrapp/__init__.py -text
 SAS/LSMR/src/lsmr/lsmrapp/admin.py -text
 SAS/LSMR/src/lsmr/lsmrapp/apps.py -text
 SAS/LSMR/src/lsmr/lsmrapp/migrations/0001_initial.py -text
-SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190515_1501.py -text
+SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190516_1607.py -text
 SAS/LSMR/src/lsmr/lsmrapp/migrations/0003_populate.py -text
 SAS/LSMR/src/lsmr/lsmrapp/migrations/CMakeLists.txt -text
 SAS/LSMR/src/lsmr/lsmrapp/migrations/__init__.py -text
diff --git a/SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190515_1501.py b/SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190516_1607.py
similarity index 99%
rename from SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190515_1501.py
rename to SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190516_1607.py
index d28b1acf047..e24912c3587 100644
--- a/SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190515_1501.py
+++ b/SAS/LSMR/src/lsmr/lsmrapp/migrations/0002_auto_20190516_1607.py
@@ -1,4 +1,4 @@
-# Generated by Django 2.0.6 on 2019-05-15 15:01
+# Generated by Django 2.0.6 on 2019-05-16 16:07
 
 import django.contrib.postgres.fields
 import django.contrib.postgres.fields.jsonb
@@ -106,9 +106,9 @@ class Migration(migrations.Migration):
                 ('deleted_since', models.DateTimeField(help_text='When this dataproduct was removed from disk, or NULL if not deleted (NULLable).', null=True)),
                 ('pinned_since', models.DateTimeField(help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).', null=True)),
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.')),
-                ('do_cancel', models.DateTimeField(help_text='When this dataproduct was cancelled (NULLable).  Cancelling a dataproduct triggers cleanup if necessary.')),
-                ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).')),
-                ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).')),
+                ('do_cancel', models.DateTimeField(help_text='When this dataproduct was cancelled (NULLable).  Cancelling a dataproduct triggers cleanup if necessary.', null=True)),
+                ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).', null=True)),
+                ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)),
                 ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')),
                 ('dataformat', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.DataformatChoice')),
             ],
@@ -124,8 +124,8 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)),
-                ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).')),
-                ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).')),
+                ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)),
+                ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)),
                 ('dataproduct', models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.Dataproduct')),
             ],
             options={
@@ -434,13 +434,13 @@ class Migration(migrations.Migration):
                 ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)),
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
-                ('start_time', models.DateTimeField(help_text='Start this subtask at the specified time (NULLable).')),
-                ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).')),
+                ('start_time', models.DateTimeField(help_text='Start this subtask at the specified time (NULLable).', null=True)),
+                ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)),
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')),
-                ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')),
+                ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)),
                 ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')),
                 ('scheduler_input_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Partial specifications, as input for the scheduler.')),
-                ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.Cluster')),
+                ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.Cluster')),
                 ('schedule_method', models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.ScheduleMethod')),
             ],
             options={
@@ -729,6 +729,11 @@ class Migration(migrations.Migration):
             name='specifications_template',
             field=models.ForeignKey(help_text='Schema used for specifications_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='lsmrapp.TaskTemplate'),
         ),
+        migrations.AddField(
+            model_name='subtasktemplate',
+            name='type',
+            field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.SubtaskTypeChoice'),
+        ),
         migrations.AddField(
             model_name='subtaskinput',
             name='producer',
@@ -779,11 +784,6 @@ class Migration(migrations.Migration):
             name='task_blueprint',
             field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='lsmrapp.TaskBlueprint'),
         ),
-        migrations.AddField(
-            model_name='subtask',
-            name='type',
-            field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='lsmrapp.SubtaskTypeChoice'),
-        ),
         migrations.AddField(
             model_name='schedulingunitdraft',
             name='requirements_template',
diff --git a/SAS/LSMR/src/lsmr/lsmrapp/migrations/0003_populate.py b/SAS/LSMR/src/lsmr/lsmrapp/migrations/0003_populate.py
index 84a99dd4b2a..155a4622c53 100644
--- a/SAS/LSMR/src/lsmr/lsmrapp/migrations/0003_populate.py
+++ b/SAS/LSMR/src/lsmr/lsmrapp/migrations/0003_populate.py
@@ -6,7 +6,7 @@ from ..populate import *
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('lsmrapp', '0002_auto_20190515_1501'),
+        ('lsmrapp', '0002_auto_20190516_1607'),
     ]
 
     operations = [ migrations.RunPython(populate_choices) ]
diff --git a/SAS/LSMR/src/lsmr/lsmrapp/models/scheduling.py b/SAS/LSMR/src/lsmr/lsmrapp/models/scheduling.py
index 193039e0c65..016e48eaff4 100644
--- a/SAS/LSMR/src/lsmr/lsmrapp/models/scheduling.py
+++ b/SAS/LSMR/src/lsmr/lsmrapp/models/scheduling.py
@@ -93,6 +93,7 @@ class ScheduleMethod(AbstractChoice):
 #
 
 class SubtaskTemplate(Template):
+    type = ForeignKey('SubtaskTypeChoice', null=False, on_delete=PROTECT)
     queue = BooleanField(default=False)
     realtime = BooleanField(default=False)
 
@@ -133,17 +134,16 @@ class Subtask(BasicCommon):
     inspection plots on the observed data, etc. Each task has a specific configuration, will have resources allocated
     to it, and represents a single run.
     """
-    type = ForeignKey('SubtaskTypeChoice', null=False, on_delete=PROTECT)
-    start_time = DateTimeField(help_text='Start this subtask at the specified time (NULLable).')
-    stop_time = DateTimeField(help_text='Stop this subtask at the specified time (NULLable).')
+    start_time = DateTimeField(null=True, help_text='Start this subtask at the specified time (NULLable).')
+    stop_time = DateTimeField(null=True, help_text='Stop this subtask at the specified time (NULLable).')
     state = ForeignKey('SubtaskStateChoice', null=False, on_delete=PROTECT, related_name='task_states', help_text='Subtask state (see Subtask State Machine).')
     specifications_doc = JSONField(help_text='Final specifications, as input for the controller.')
     task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.')
     specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.')
-    do_cancel = DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
+    do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
     priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')
     schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).')
-    cluster = ForeignKey('Cluster', null=False, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
+    cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
     scheduler_input_doc = JSONField(help_text='Partial specifications, as input for the scheduler.')
     # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work?
 
@@ -178,9 +178,9 @@ class Dataproduct(BasicCommon):
     specifications_doc = JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.')
     specifications_template = ForeignKey('DataproductSpecificationsTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.')
     producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='Subtask Output which generates this dataproduct.')
-    do_cancel = DateTimeField(help_text='When this dataproduct was cancelled (NULLable).  Cancelling a dataproduct triggers cleanup if necessary.')
-    expected_size = BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).')
-    size = BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).')
+    do_cancel = DateTimeField(null=True, help_text='When this dataproduct was cancelled (NULLable).  Cancelling a dataproduct triggers cleanup if necessary.')
+    expected_size = BigIntegerField(null=True, help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).')
+    size = BigIntegerField(null=True, help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).')
     feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.')
     feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.')
 
@@ -213,8 +213,8 @@ class Cluster(NamedCommon):
 class DataproductArchiveInfo(BasicCommon):
     dataproduct = ForeignKey('Dataproduct', on_delete=PROTECT, help_text='A dataproduct residing in the archive.')
     storage_ticket = CharField(max_length=128, help_text='Archive-system identifier.')
-    public_since = DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).')
-    corrupted_since = DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).')
+    public_since = DateTimeField(null=True, help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).')
+    corrupted_since = DateTimeField(null=True, help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).')
 
 
 class DataproductHash(BasicCommon):
diff --git a/SAS/LSMR/src/lsmr/lsmrapp/models/specification.py b/SAS/LSMR/src/lsmr/lsmrapp/models/specification.py
index 60009d4f7e9..e82508bae1e 100644
--- a/SAS/LSMR/src/lsmr/lsmrapp/models/specification.py
+++ b/SAS/LSMR/src/lsmr/lsmrapp/models/specification.py
@@ -204,12 +204,12 @@ class Project(NamedCommon):
     priority = IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')  # todo: define a value for the default priority
     can_trigger = BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).')
     private_data = BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')
-    expert = BooleanField(default=True, help_text='Expert projects put more responsibility on the PI.')
+    expert = BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')
     filler = BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')
 
 
 class SchedulingSet(NamedCommon):
-    generator_doc = JSONField(help_text='Parameters for the generator (NULLable).')
+    generator_doc = JSONField(null=True, help_text='Parameters for the generator (NULLable).')
     generator_template = ForeignKey('GeneratorTemplate', on_delete=SET_NULL, null=True, help_text='Generator for the scheduling units in this set (NULLable).')
     generator_source = ForeignKey('SchedulingUnitDraft', on_delete=SET_NULL, null=True, help_text='Reference for the generator to an existing collection of specifications (NULLable).')
     project = ForeignKey('Project', related_name="scheduling_sets", on_delete=PROTECT, help_text='Project to which this scheduling set belongs.')  # protected to avoid accidents
@@ -218,8 +218,8 @@ class SchedulingSet(NamedCommon):
 class SchedulingUnitDraft(NamedCommon):
     requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this run.')
     copies = ForeignKey('SchedulingUnitDraft', related_name="copied_from", on_delete=SET_NULL, null=True, help_text='Source reference, if we are a copy (NULLable).')
-    copy_reason = ForeignKey('CopyReasonChoice', on_delete=PROTECT, help_text='Reason why source was copied (NULLable).')
-    generator_instance_doc = CharField(max_length=200, help_text='Parameter value that generated this run draft (NULLable).')  # todo: check if the field size is good enough
+    copy_reason = ForeignKey('CopyReasonChoice', null=True, on_delete=PROTECT, help_text='Reason why source was copied (NULLable).')
+    generator_instance_doc = JSONField(null=True, help_text='Parameter value that generated this run draft (NULLable).')
     scheduling_set = ForeignKey('SchedulingSet', related_name='scheduling_unit_drafts', on_delete=CASCADE, help_text='Set to which this scheduling unit draft belongs.')
     requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') # todo: 'schema'?
 
@@ -234,7 +234,7 @@ class SchedulingUnitBlueprint(NamedCommon):
 class TaskDraft(NamedCommon):
     specifications_doc = JSONField(help_text='Specifications for this task.')
     copies = ForeignKey('TaskDraft', related_name="copied_from", on_delete=SET_NULL, null=True, help_text='Source reference, if we are a copy (NULLable).')
-    copy_reason = ForeignKey('CopyReasonChoice', on_delete=PROTECT, help_text='Reason why source was copied (NULLable).')
+    copy_reason = ForeignKey('CopyReasonChoice', on_delete=PROTECT, null=True, help_text='Reason why source was copied (NULLable).')
     scheduling_unit_draft = ForeignKey('SchedulingUnitDraft', related_name='task_drafts', on_delete=CASCADE, help_text='Scheduling Unit draft to which this task draft belongs.')
     specifications_template = ForeignKey('TaskTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') # todo: 'schema'?
 
diff --git a/SAS/LSMR/src/templates/react_jsonschema_form.html b/SAS/LSMR/src/templates/react_jsonschema_form.html
index fc30964cd03..20bed288f4b 100644
--- a/SAS/LSMR/src/templates/react_jsonschema_form.html
+++ b/SAS/LSMR/src/templates/react_jsonschema_form.html
@@ -150,7 +150,7 @@ const schema = {
     "relative_starttime": {
       "type": "number",
       "title": "Relative starttime (seconds)",
-      "description": "Starttime with respect to other observations in this specification unit",
+      "description": "Starttime with respect to other observations in this scheduling unit",
       "default": 0,
       "minimum": 0
     },
diff --git a/SAS/LSMR/test/CMakeLists.txt b/SAS/LSMR/test/CMakeLists.txt
index 24b91edc566..378bb1a60ac 100644
--- a/SAS/LSMR/test/CMakeLists.txt
+++ b/SAS/LSMR/test/CMakeLists.txt
@@ -8,10 +8,13 @@ include(FindPythonModule)
 #find_python_module(mock REQUIRED)
 find_python_module(requests REQUIRED)
 #find_python_module(sqlalchemy)
-#find_python_module(testing.postgresql)
+find_python_module(testing.postgresql)
 find_python_module(ldap_test REQUIRED)            # sudo pip3 install python-ldap-test
 find_python_module(ldap3 REQUIRED)            # sudo pip3 install ldap3
 
+find_program(uuidgen UUIDGEN_PATH)
+find_program(ss SS_PATH)
+
 lofar_add_test(t_lsmrapp_specification_django)
 lofar_add_test(t_lsmrapp_specification_functional)
 lofar_add_test(t_lsmrapp_scheduling_django)
diff --git a/SAS/LSMR/test/t_lsmrapp_scheduling_django.py b/SAS/LSMR/test/t_lsmrapp_scheduling_django.py
index 85f5bc6c5e6..752fe90beea 100755
--- a/SAS/LSMR/test/t_lsmrapp_scheduling_django.py
+++ b/SAS/LSMR/test/t_lsmrapp_scheduling_django.py
@@ -33,31 +33,42 @@ client = rest_framework.test.APIClient()
 
 class SubtaskTemplateTest(rest_framework.test.APITransactionTestCase):
 
-    def setUp(self):
+    def setUp(self, populate=True):
         user, _ = User.objects.get_or_create(username='paulus', email='paulus@boskabouter.com')
         client.force_login(user)
 
+        if populate:
+            try:
+                populate_choices(None, None)  # <- can only be called once per test case
+            except:
+                print("why is this happening!? Continuing anyway...")
+
+        # test data
+        self.test_data_1 = {"type": models.SubtaskTypeChoice.objects.get(value='copy'),
+                            "name": "observation",
+                            "description": 'My one observation',
+                            "version": 'v0.314159265359',
+                            "schema": {"mykey": "my value"},
+                            "realtime": True,
+                            "queue": False,
+                            "tags": ["LSMR", "TESTING"]}
+
+        self.test_data_2 = {"type": models.SubtaskTypeChoice.objects.get(value='pipeline'),
+                            "name": "observation",
+                            "description": 'My other observation',
+                            "version": 'v0.314159265359',
+                            "schema": {"myotherkey": "my other value"},
+                            "realtime": False,
+                            "queue": True,
+                            "tags": []}
+
+
     def tearDown(self):
         client.logout()
 
     reset_sequences = True  # todo: Decide whether we want this. It slows down tests, but we can hardcode primary keys.
 
-    # test data
-    test_data_1 = {"name": "observation",
-                   "description": 'My one observation',
-                   "version": 'v0.314159265359',
-                   "schema": {"mykey": "my value"},
-                   "realtime": True,
-                   "queue": False,
-                   "tags": ["LSMR", "TESTING"]}
 
-    test_data_2 = {"name": "observation",
-                   "description": 'My other observation',
-                   "version": 'v0.314159265359',
-                   "schema": {"myotherkey": "my other value"},
-                   "realtime": False,
-                   "queue": True,
-                   "tags": []}
 
     def test_SubtaskTemplate_gets_created_with_correct_creation_timestamp(self):
 
@@ -91,8 +102,7 @@ class SubtaskTemplateTest(rest_framework.test.APITransactionTestCase):
         # assert
         response = client.get('/subtask_template/', format='json', follow=True)
         self.assertEqual(response.status_code, 200)
-        for item in self.test_data_1.items():
-            self.assertIn(item, response.data['results'][0].items())
+        assertDataWithUrls(self, response.data['results'][0], self.test_data_1)
 
     def test_GET_SubtaskTemplate_view_returns_correct_entry(self):
 
@@ -105,10 +115,8 @@ class SubtaskTemplateTest(rest_framework.test.APITransactionTestCase):
         response2 = client.get('/subtask_template/%s/' % id2, format='json', follow=True)
         self.assertEqual(response1.status_code, 200)
         self.assertEqual(response2.status_code, 200)
-        for item in self.test_data_1.items():
-            self.assertIn(item, response1.data.items())
-        for item in self.test_data_2.items():
-            self.assertIn(item, response2.data.items())
+        assertDataWithUrls(self, response1.data, self.test_data_1)
+        assertDataWithUrls(self, response2.data, self.test_data_2)
 
 
 
@@ -473,14 +481,17 @@ class SubtaskTest(rest_framework.test.APITransactionTestCase):
         wrbt = TaskBlueprintTest()
         wrbt.setUp(populate=False)
 
+        sst = SubtaskTemplateTest()
+        sst.setUp(populate=False)
+
         # test data
-        self.test_data_1 = {"type": models.SubtaskTypeChoice.objects.get(value='copy'),
+        self.test_data_1 = {
                        "start_time": datetime.utcnow().isoformat(),
                        "stop_time": datetime.utcnow().isoformat(),
                        "state": models.SubtaskStateChoice.objects.get(value='scheduling'),
                        "specifications_doc": "{}",
                        "task_blueprint": models.TaskBlueprint.objects.create(**wrbt.test_data_1),
-                       "specifications_template": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_1),
+                       "specifications_template": models.SubtaskTemplate.objects.create(**sst.test_data_1),
                        "tags": ["LSMR", "TESTING"],
                        "do_cancel": datetime.utcnow().isoformat(),
                        "priority": 1,
@@ -488,13 +499,13 @@ class SubtaskTest(rest_framework.test.APITransactionTestCase):
                        "cluster": models.Cluster.objects.create(location="downstairs", tags=[]),
                        "scheduler_input_doc": "{}"}
 
-        self.test_data_2 = {"type": models.SubtaskTypeChoice.objects.get(value='pipeline'),
+        self.test_data_2 = {
                        "start_time": datetime.utcnow().isoformat(),
                        "stop_time": datetime.utcnow().isoformat(),
                        "state": models.SubtaskStateChoice.objects.get(value='queueing'),
                        "specifications_doc": "{'some': 'spec'}",
                        "task_blueprint": models.TaskBlueprint.objects.create(**wrbt.test_data_2),
-                       "specifications_template": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_2),
+                       "specifications_template": models.SubtaskTemplate.objects.create(**sst.test_data_2),
                        "tags": [],
                        "do_cancel": datetime.utcnow().isoformat(),
                        "priority": 1,
@@ -716,19 +727,22 @@ class SubtaskConnectorTest(rest_framework.test.APITransactionTestCase):
                 print("why is this happening!? Continuing anyway...")
 
 
+        sst = SubtaskTemplateTest()
+        sst.setUp(populate=False)
+
         # test data
         self.test_data_1 = {"role": models.RoleChoice.objects.get(value='calibrator'),
                        "datatype": models.DatatypeChoice.objects.get(value='instrument model'),
                        # "dataformats": [models.DataformatChoice.objects.get(value='Beamformed')], # -> use set()
-                       "output_of": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_1),
-                       "input_of": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_2),
+                       "output_of": models.SubtaskTemplate.objects.create(**sst.test_data_1),
+                       "input_of": models.SubtaskTemplate.objects.create(**sst.test_data_2),
                        "tags": []}
 
         self.test_data_2 = {"role": models.RoleChoice.objects.get(value='target'),
                        "datatype": models.DatatypeChoice.objects.get(value='image'),
                        # "dataformats": [models.DataformatChoice.objects.get(value='MeasurementSet')], # -> use set()
-                       "output_of": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_1),
-                       "input_of": models.SubtaskTemplate.objects.create(**SubtaskTemplateTest.test_data_2),
+                       "output_of": models.SubtaskTemplate.objects.create(**sst.test_data_1),
+                       "input_of": models.SubtaskTemplate.objects.create(**sst.test_data_2),
                        "tags": []}
 
     def tearDown(self):
diff --git a/SAS/LSMR/test/t_lsmrapp_scheduling_functional.py b/SAS/LSMR/test/t_lsmrapp_scheduling_functional.py
index 853b2bb983e..174506686e3 100755
--- a/SAS/LSMR/test/t_lsmrapp_scheduling_functional.py
+++ b/SAS/LSMR/test/t_lsmrapp_scheduling_functional.py
@@ -51,7 +51,8 @@ class SubtaskTemplateTestCase(unittest.TestCase):
         self.auth = HTTPBasicAuth(username=ldap_credentials.user, password=ldap_credentials.password)
 
     # test data
-    test_data_1 = {"name": "observation",
+    test_data_1 = {"type": BASE_URL + '/subtask_type_choice/copy/',
+                   "name": "observation",
                    "description": 'My one observation',
                    "version": 'v0.314159265359',
                    "schema": {"mykey": "my value"},
@@ -59,7 +60,8 @@ class SubtaskTemplateTestCase(unittest.TestCase):
                    "queue": False,
                    "tags": ["LSMR", "TESTING"]}
 
-    test_data_2 = {"name": "observation",
+    test_data_2 = {"type": BASE_URL + '/subtask_type_choice/pipeline/',
+                   "name": "observation",
                    "description": 'My other observation',
                    "version": 'v0.314159265359',
                    "schema": {"myotherkey": "my other value"},
@@ -67,7 +69,8 @@ class SubtaskTemplateTestCase(unittest.TestCase):
                    "queue": True,
                    "tags": []}
 
-    test_patch = {"version": 'v6.28318530718',
+    test_patch = {"type": BASE_URL + '/subtask_type_choice/inspection/',
+                  "version": 'v6.28318530718',
                   "schema": {"mykey": "my better value"},
                   }
 
@@ -119,6 +122,27 @@ class SubtaskTemplateTestCase(unittest.TestCase):
         # DELETE and check it's gone
         DELETE_and_assert_gone(self, url)
 
+    def test_subtask_template_PROTECT_behavior_on_type_choice_deleted(self):
+
+        # create dependency that is safe to delete (enums are not populated / re-established between tests)
+        type_data = {'value': 'kickme'}
+        POST_and_assert_expected_response(self, BASE_URL + '/subtask_type_choice/', type_data, 201, type_data)
+        type_url =  BASE_URL + '/subtask_type_choice/kickme/'
+
+        # POST new item and verify
+        test_data = dict(self.test_data_1)
+        test_data['type'] = type_url
+        url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', test_data, 201, test_data)['url']
+        GET_and_assert_expected_response(self, url, 200, test_data)
+
+        # Try to DELETE dependency, verify that was not successful
+        # Unfortunately we don't get a nice error in json, but a Django debug page on error 500...
+        response = requests.delete(type_url, auth=self.auth)
+        self.assertEqual(500, response.status_code)
+        self.assertTrue("ProtectedError" in str(response.content))
+        GET_and_assert_expected_response(self, type_url, 200, type_data)
+
+
 
 class DataproductSpecificationsTemplateTestCase(unittest.TestCase):
 
@@ -339,8 +363,7 @@ class SubtaskTestCase(unittest.TestCase):
                                                         ClusterTestCase.test_data_2)['url']
 
         # test data
-        self.test_data_1 = {"type": BASE_URL + '/subtask_type_choice/copy/',
-                            "start_time": datetime.utcnow().isoformat(),
+        self.test_data_1 = {"start_time": datetime.utcnow().isoformat(),
                             "stop_time": datetime.utcnow().isoformat(),
                             "state": BASE_URL + '/subtask_state_choice/scheduling/',
                             "specifications_doc": "{}",
@@ -354,8 +377,7 @@ class SubtaskTestCase(unittest.TestCase):
                             "scheduler_input_doc": "{}"
                             }
 
-        self.test_data_2 = {"type": BASE_URL + '/subtask_type_choice/pipeline/',
-                            "start_time": datetime.utcnow().isoformat(),
+        self.test_data_2 = {"start_time": datetime.utcnow().isoformat(),
                             "stop_time": datetime.utcnow().isoformat(),
                             "state": BASE_URL + '/subtask_state_choice/queueing/',
                             "specifications_doc": "{'some': 'spec'}",
@@ -369,7 +391,7 @@ class SubtaskTestCase(unittest.TestCase):
                             "scheduler_input_doc": "{}"
                             }
 
-        self.test_patch = {"type": BASE_URL + '/subtask_type_choice/inspection/',
+        self.test_patch = {
                            "specifications_doc": {"somespec": "somevalue"},
                           }
 
@@ -425,26 +447,6 @@ class SubtaskTestCase(unittest.TestCase):
         # DELETE and check it's gone
         DELETE_and_assert_gone(self, url)
 
-    def test_subtask_PROTECT_behavior_on_type_choice_deleted(self):
-
-        # create dependency that is safe to delete (enums are not populated / re-established between tests)
-        type_data = {'value': 'kickme'}
-        POST_and_assert_expected_response(self, BASE_URL + '/subtask_type_choice/', type_data, 201, type_data)
-        type_url =  BASE_URL + '/subtask_type_choice/kickme/'
-
-        # POST new item and verify
-        test_data = dict(self.test_data_1)
-        test_data['type'] = type_url
-        url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', test_data, 201, test_data)['url']
-        GET_and_assert_expected_response(self, url, 200, test_data)
-
-        # Try to DELETE dependency, verify that was not successful
-        # Unfortunately we don't get a nice error in json, but a Django debug page on error 500...
-        response = requests.delete(type_url, auth=self.auth)
-        self.assertEqual(500, response.status_code)
-        self.assertTrue("ProtectedError" in str(response.content))
-        GET_and_assert_expected_response(self, type_url, 200, type_data)
-
     def test_subtask_PROTECT_behavior_on_state_choice_deleted(self):
 
         # create dependency that is safe to delete (enums are not populated / re-established between tests)
diff --git a/SAS/LSMR/test/test_utils.py b/SAS/LSMR/test/test_utils.py
index b7cdb2052ba..8843e240169 100644
--- a/SAS/LSMR/test/test_utils.py
+++ b/SAS/LSMR/test/test_utils.py
@@ -26,6 +26,8 @@ def assertDataWithUrls(self, data, expected):
     object instances get returned as urls, check that the value is part of that url
     """
 
+    # TODO: Make this smarter, this only checks for matching pk!
+
     for k, v in expected.items():
         if isinstance(v, models.Model):
             v = str(v.pk)
@@ -39,6 +41,8 @@ def assertUrlList(self, url_list, expected_objects):
     object instances get returned as urls, check that the expected projects are in that list
     """
 
+    # TODO: Make this smarter, this only checks for matching pk!
+
     self.assertEqual(len(url_list), len(expected_objects))
     for v in expected_objects:
         if isinstance(v, models.Model):
-- 
GitLab