diff --git a/CMake/NPMInstall.cmake b/CMake/NPMInstall.cmake
index de0738bf0a18d5948715757c3beb2f031b7dc172..687480f98adeae32f9b44b4b05f4a1e1486a0c01 100644
--- a/CMake/NPMInstall.cmake
+++ b/CMake/NPMInstall.cmake
@@ -162,7 +162,7 @@ function(npm_install NPM_PACKAGE_SPECIFICATION)
 
     add_custom_command(
     OUTPUT "${NPM_BINARY_DIR}/node_modules" "${NPM_BINARY_DIR}/package-lock.json"
-    COMMAND npm install
+    COMMAND NODE_OPTIONS="--max-old-space-size=8192" npm install --legacy-peer-deps
     DEPENDS "${NPM_BINARY_DIR}/package.json"
     WORKING_DIRECTORY "${NPM_BINARY_DIR}"
     COMMENT "Downloading npm dependencies for ${NPM_BINARY_DIR}/package.json")
diff --git a/LCS/PyCommon/test/t_util.py b/LCS/PyCommon/test/t_util.py
index fdb48f516ca561ba77bf1cdf865ce0540357024b..77971885fb78f135c4dba3cee0daa940dd1c33be 100644
--- a/LCS/PyCommon/test/t_util.py
+++ b/LCS/PyCommon/test/t_util.py
@@ -14,9 +14,10 @@ def setUpModule():
 def tearDownModule():
     pass
 
+
+@unit_test
 class TestUtils(unittest.TestCase):
 
-    @unit_test
     def test_is_iterable(self):
         #list
         self.assertTrue(is_iterable([]))
@@ -36,6 +37,33 @@ class TestUtils(unittest.TestCase):
         self.assertFalse(is_iterable(1))
         self.assertFalse(is_iterable(None))
 
+    def test_merge_nested_dicts(self):
+        dict_a = {'a': 1, 
+                  'b': {
+                      'c': 3, 
+                      'd': [4, 5, 6]}, 
+                  'e': [{'foo': 1}, 
+                        {'bar': 2}]}
+
+        dict_b = {'a': 2}
+        merged = dict_with_overrides(dict_a, dict_b)
+        self.assertEqual({'a': 2, 'b': {'c': 3, 'd': [4, 5, 6]}, 'e': [{'foo': 1}, {'bar': 2}]}, merged)
+
+        dict_b = {'a': 1, 'b': 2}
+        merged = dict_with_overrides(dict_a, dict_b)
+        self.assertEqual({'a': 1, 'b': 2, 'e': [{'foo': 1}, {'bar': 2}]}, merged)
+
+        dict_b = {'b': {'c': 4}}
+        merged = dict_with_overrides(dict_a, dict_b)
+        self.assertEqual({'a': 1, 'b': {'c': 4, 'd': [4, 5, 6]}, 'e': [{'foo': 1}, {'bar': 2}]}, merged)
+
+        dict_b = {'e': [{'foo': 2}, {'bar': 3}]}
+        merged = dict_with_overrides(dict_a, dict_b)
+        self.assertEqual({'a': 1, 'b': {'c': 3, 'd': [4, 5, 6]}, 'e': [{'foo': 2}, {'bar': 3}]}, merged)
+
+        with self.assertRaises(AssertionError):
+            dict_b = {'e': []} #AssertionError should be raised cause list is not of same length as original
+            dict_with_overrides(dict_a, dict_b)
 
 def main(argv):
     unittest.main()
diff --git a/LCS/PyCommon/util.py b/LCS/PyCommon/util.py
index bc45029e88faa887e9753efeda711206c71ddd39..541937c640c1b47066da09af33ce9e6083dd0c2c 100644
--- a/LCS/PyCommon/util.py
+++ b/LCS/PyCommon/util.py
@@ -27,7 +27,9 @@ This package contains different utilities that are common for LOFAR software
 import sys
 import os, os.path
 import time
-
+from copy import deepcopy
+import logging
+logger = logging.getLogger(__name__)
 
 def check_bit(value, bit):
     """
@@ -218,3 +220,67 @@ def find_free_port(preferred_port: int=0, allow_reuse_of_lingering_port: bool=Tr
 
         s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
         return s.getsockname()[1]
+
+def dict_search_and_replace(d: dict, key, value):
+    '''perform an in-place search-and-replace to replace all items for the given key by the given value'''
+    if key in d:
+        d[key] = value
+
+    # recurse over nested items.
+    for k in d:
+        if isinstance(d[k], dict):
+            dict_search_and_replace(d[k], key, value)
+        elif isinstance(d[k], list):
+            for i in d[k]:
+                if isinstance(i, dict):
+                    dict_search_and_replace(i, key, value)
+
+
+def dict_with_overrides(org_dict: dict, overrides: dict) -> dict:
+    '''return a copy of the original (nested) org_dict with all (nested) key/value pairs in the overrides_dict applied'''
+    new_dict = deepcopy(org_dict)
+
+    for override_key, override_value in overrides.items():
+        if isinstance(override_value, dict):
+            sub_dict = new_dict.get(override_key, {})
+            new_dict[override_key] = dict_with_overrides(sub_dict, override_value)
+        elif isinstance(override_value, list):
+            sub_list = new_dict.get(override_key, [])
+            assert isinstance(sub_list, list)
+            assert len(sub_list) == len(override_value)
+
+            for i in range(len(override_value)):
+                org_list_item = sub_list[i]
+                override_list_item = override_value[i]
+                if isinstance(org_list_item, dict) and isinstance(override_list_item, dict):
+                    new_dict[override_key][i] = dict_with_overrides(org_list_item, override_list_item)
+                else:
+                    new_dict[override_key][i] = override_list_item
+        else:
+            new_dict[override_key] = override_value
+
+    return new_dict
+
+
+def subdict_of_pointer_items(org_dict: dict, pointers: [str]) -> dict:
+    '''extract a (nested) subdict out of the original (nested) org_dict with only the items which have a json-pointer-path which is in the given pointers list'''
+    new_dict = deepcopy(org_dict)
+
+    # recursive helper function
+    def remove_non_pointer_items(doc, parent_path: str):
+        if isinstance(doc, dict):
+            for key, value in list(doc.items()):
+                path = parent_path + '/' + key
+                if any([pointer == path for pointer in pointers]):
+                    # keep this key/value as it is exactly one of the pointer paths
+                    pass
+                elif not any([pointer.startswith(path) for pointer in pointers]):
+                    del doc[key]
+                else:
+                    remove_non_pointer_items(value, path)
+        elif isinstance(doc, list):
+            for cntr, item in enumerate(doc):
+                remove_non_pointer_items(item, '%s/%d' % (parent_path, cntr))
+
+    remove_non_pointer_items(new_dict, "#")
+    return new_dict
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/exceptions.py b/SAS/TMSS/backend/src/tmss/exceptions.py
index 0097fbd4bb7de7572240f0b68986c3d572129135..984ab4a225fe62195a4646d039b27218de566499 100644
--- a/SAS/TMSS/backend/src/tmss/exceptions.py
+++ b/SAS/TMSS/backend/src/tmss/exceptions.py
@@ -45,3 +45,6 @@ class UnknownTemplateException(TMSSException):
     '''raised when TMSS trying to base its processing routines on the chosen template, but this specific template is unknown.'''
     pass
 
+class InvalidSpecificationException(TMSSException):
+    '''raised when the user submits an invalid specification'''
+    pass
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
index 74dd30b92b0fee17ba8e8d228c5a9ed37563d5d4..c9b1b0d50f4a4bec1eca066c40d86fb3b992d56a 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2021-06-04 11:11
+# Generated by Django 3.0.9 on 2021-07-09 07:33
 
 from django.conf import settings
 import django.contrib.auth.models
@@ -150,6 +150,9 @@ class Migration(migrations.Migration):
                 ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)),
                 ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin),
         ),
         migrations.CreateModel(
@@ -684,7 +687,7 @@ class Migration(migrations.Migration):
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
                 ('description', models.CharField(blank=True, default='', help_text='A longer description of this object.', max_length=255)),
-                ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)),
+                ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('template', django.contrib.postgres.fields.jsonb.JSONField(help_text='JSON-data compliant with the JSON-schema in the scheduling_unit_template. This observation strategy template like a predefined recipe with all the correct settings, and defines which parameters the user can alter.')),
             ],
             options={
@@ -862,6 +865,9 @@ class Migration(migrations.Migration):
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schedulings for this task (IMMUTABLE).')),
                 ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, models.Model),
         ),
         migrations.CreateModel(
@@ -882,6 +888,9 @@ class Migration(migrations.Migration):
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')),
                 ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin),
         ),
         migrations.CreateModel(
@@ -893,6 +902,9 @@ class Migration(migrations.Migration):
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin),
         ),
         migrations.CreateModel(
@@ -904,6 +916,9 @@ class Migration(migrations.Migration):
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin),
         ),
         migrations.CreateModel(
@@ -974,6 +989,9 @@ class Migration(migrations.Migration):
                 ('placement', models.ForeignKey(help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')),
                 ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskDraft')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin),
         ),
         migrations.CreateModel(
@@ -988,6 +1006,9 @@ class Migration(migrations.Migration):
                 ('placement', models.ForeignKey(default='after', help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')),
                 ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')),
             ],
+            options={
+                'abstract': False,
+            },
             bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.ProjectPropertyMixin),
         ),
         migrations.AddConstraint(
@@ -1544,18 +1565,34 @@ class Migration(migrations.Migration):
             model_name='tasktemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='tasktemplate_unique_name_version'),
         ),
+        migrations.AddIndex(
+            model_name='taskschedulingrelationdraft',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_d1e21f_gin'),
+        ),
         migrations.AddConstraint(
             model_name='taskschedulingrelationdraft',
             constraint=models.UniqueConstraint(fields=('first', 'second'), name='TaskSchedulingRelationDraft_unique_relation'),
         ),
+        migrations.AddIndex(
+            model_name='taskschedulingrelationblueprint',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_4b39d4_gin'),
+        ),
         migrations.AddConstraint(
             model_name='taskschedulingrelationblueprint',
             constraint=models.UniqueConstraint(fields=('first', 'second'), name='TaskSchedulingRelationBlueprint_unique_relation'),
         ),
+        migrations.AddIndex(
+            model_name='taskrelationdraft',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_aeef84_gin'),
+        ),
         migrations.AddConstraint(
             model_name='taskrelationdraft',
             constraint=models.UniqueConstraint(fields=('producer', 'consumer', 'input_role', 'output_role'), name='TaskRelationDraft_unique_relation'),
         ),
+        migrations.AddIndex(
+            model_name='taskrelationblueprint',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_256437_gin'),
+        ),
         migrations.AddConstraint(
             model_name='taskrelationblueprint',
             constraint=models.UniqueConstraint(fields=('producer', 'consumer', 'input_role', 'output_role', 'draft'), name='TaskRelationBlueprint_unique_relation'),
@@ -1596,6 +1633,10 @@ class Migration(migrations.Migration):
             model_name='setting',
             index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_set_tags_41a1ba_gin'),
         ),
+        migrations.AddConstraint(
+            model_name='schedulingunitobservingstrategytemplate',
+            constraint=models.UniqueConstraint(fields=('name', 'version'), name='schedulingunitobservingstrategytemplate_unique_name_version'),
+        ),
         migrations.AddIndex(
             model_name='sap',
             index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sap_tags_7451b0_gin'),
@@ -1644,6 +1685,10 @@ class Migration(migrations.Migration):
             model_name='dataproductarchiveinfo',
             index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_ebf2ef_gin'),
         ),
+        migrations.AddIndex(
+            model_name='dataproduct',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_5932a3_gin'),
+        ),
         migrations.AddConstraint(
             model_name='dataproduct',
             constraint=models.UniqueConstraint(fields=('directory', 'filename'), name='dataproduct_unique_path'),
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py
index ab1e8fc20ca9443ff68460d07168c67a5402089c..834f7bbcdb947eec22918e44ec9f910f4957da65 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py
@@ -3,6 +3,8 @@ This file contains common constructs used by database models in other modules
 """
 
 import logging
+import typing
+
 logger = logging.getLogger(__name__)
 
 from django.db.models import Model, CharField, DateTimeField, IntegerField, UniqueConstraint
@@ -61,6 +63,14 @@ class BasicCommon(Model):
     created_at = DateTimeField(auto_now_add=True, help_text='Moment of object creation.')
     updated_at = DateTimeField(auto_now=True, help_text='Moment of last object update.')
 
+    @property
+    def is_used(self) -> bool:
+        '''Is this model instance used by any of its related objects?'''
+        for rel_obj in self._meta.related_objects:
+            if rel_obj.related_model.objects.filter(**{rel_obj.field.attname: self}).count() > 0:
+                return True
+        return False
+
     class Meta:
         abstract = True
         indexes = [GinIndex(fields=['tags'])]
@@ -112,14 +122,34 @@ class AbstractChoice(Model):
         return self.value
 
 
-class Template(NamedCommon):
+class NamedVersionedCommon(NamedCommon):
     version = IntegerField(editable=False, null=False, help_text='Version of this template (with respect to other templates of the same name)')
-    schema = JSONField(help_text='Schema for the configurable parameters needed to use this template.')
 
     class Meta:
         abstract = True
         constraints = [UniqueConstraint(fields=['name', 'version'], name='%(class)s_unique_name_version')]
 
+    def auto_set_version_number(self):
+        '''An instance of a subclass of this class, for example a template, cannot/shouldnot be updated if it is already being used.
+        So, update the version number if the template is already used, else keep it.'''
+        if self.pk is None:
+            # this is a new instance. auto-assign new unique version number
+            self.version = self.__class__.objects.filter(name=self.name).count() + 1
+        else:
+            # this is an existing instance which is being updated. Check if it is being used.
+            if self.is_used:
+                # yes, this template is used by others, so "editing"/updating is forbidden,
+                # so create new instance (by setting pk=None) and assign new unique version number
+                self.pk = None
+                self.version = self.__class__.objects.filter(name=self.name).count() + 1
+
+
+class Template(NamedVersionedCommon):
+    schema = JSONField(help_text='Schema for the configurable parameters needed to use this template.')
+
+    class Meta(NamedVersionedCommon.Meta):
+        abstract = True
+
     def get_default_json_document_for_schema(self) -> dict:
         '''get a json document object (dict) which complies with this template's schema and with all the defaults filled in.'''
         return get_default_json_object_for_schema(self.schema, cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE)
@@ -167,27 +197,11 @@ class Template(NamedCommon):
         # this template's schema has a schema of its own (usually the draft-06 meta schema). Validate it.
         validate_json_against_its_schema(self.schema)
 
-    @property
-    def is_used(self) -> bool:
-        '''Is this template used by any of its related objects?'''
-        for rel_obj in self._meta.related_objects:
-            if rel_obj.related_model.objects.filter(**{rel_obj.field.attname: self}).count() > 0:
-                return True
-        return False
-
-    def auto_set_version_number(self):
-        '''A template cannot/shouldnot be updated if it is already being used.
-        So, update the version number if the template is already used, else keep it.'''
-        if self.pk is None:
-            # this is a new instance. auto-assign new unique version number
-            self.version = self.__class__.objects.filter(name=self.name).count() + 1
-        else:
-            # this is a known template. Check if it is being used.
-            if self.is_used:
-                # yes, this template is used by others, so "editing"/updating is forbidden,
-                # so create new instance (by setting pk=None) and assign new unique version number
-                self.pk = None
-                self.version = self.__class__.objects.filter(name=self.name).count() + 1
+    def validate_document(self, json_doc: typing.Union[str, dict]) -> bool:
+        '''validate the given json_doc against the template's schema
+           If no exception if thrown, then the given json_string validates against the given schema.
+           :raises SchemaValidationException if the json_string does not validate against the schema '''
+        validate_json_against_schema(json_doc, self.schema, cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE)
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         self.auto_set_version_number()
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
index 097799f697b4934cb340edeb4d660351eb6b090e..94bbdd24af774ea1327a206e98535a130e542bf9 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
@@ -419,7 +419,7 @@ class Dataproduct(BasicCommon, TemplateSchemaMixin):
     sap = ForeignKey('SAP', on_delete=PROTECT, null=True, related_name="dataproducts", help_text='SAP this dataproduct was generated out of (NULLable).')
     global_identifier = OneToOneField('SIPidentifier', editable=False, null=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
-    class Meta:
+    class Meta(BasicCommon.Meta):
         constraints = [UniqueConstraint(fields=['directory', 'filename'], name='%(class)s_unique_path')]
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
index beb610c20436106a31f51e768aed613a984e0f02..c443bfb04340fb77de7c54ba3cfca6b493456438 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
@@ -10,7 +10,7 @@ from django.contrib.postgres.fields import JSONField
 from enum import Enum
 from django.db.models.expressions import RawSQL
 from django.db.models.deletion import ProtectedError
-from .common import AbstractChoice, BasicCommon, Template, NamedCommon, TemplateSchemaMixin, NamedCommonPK, RefreshFromDbInvalidatesCachedPropertiesMixin, ProjectPropertyMixin
+from .common import AbstractChoice, BasicCommon, Template, NamedCommon, NamedVersionedCommon, TemplateSchemaMixin, NamedCommonPK, RefreshFromDbInvalidatesCachedPropertiesMixin, ProjectPropertyMixin
 from lofar.common.json_utils import validate_json_against_schema, validate_json_against_its_schema, add_defaults_to_json_object_for_schema
 from lofar.sas.tmss.tmss.exceptions import *
 from django.core.exceptions import ValidationError
@@ -21,6 +21,9 @@ from pprint import pformat
 from lofar.sas.tmss.tmss.exceptions import TMSSException
 from lofar.sas.tmss.tmss.exceptions import BlueprintCreationException, TMSSException
 from django.db.models import Count
+from copy import deepcopy
+from lofar.common.util import subdict_of_pointer_items
+
 #
 # I/O
 #
@@ -194,17 +197,21 @@ class DefaultGeneratorTemplate(BasicCommon):
     template = ForeignKey("GeneratorTemplate", on_delete=PROTECT)
 
 
-class SchedulingUnitObservingStrategyTemplate(NamedCommon):
+class SchedulingUnitObservingStrategyTemplate(NamedVersionedCommon):
     '''
     A SchedulingUnitObservingStrategyTemplate is a template in the sense that it serves as a template to fill in json data objects conform its referred scheduling_unit_template.
     It is however not derived from the (abstract) Template super-class, because the Template super class is for JSON schemas, not JSON data objects.
     '''
-    version = CharField(max_length=128, help_text='Version of this template (with respect to other templates of the same name).')
     template = JSONField(null=False, help_text='JSON-data compliant with the JSON-schema in the scheduling_unit_template. '
                                                'This observation strategy template like a predefined recipe with all the correct settings, and defines which parameters the user can alter.')
     scheduling_unit_template = ForeignKey("SchedulingUnitTemplate", on_delete=PROTECT, null=False, help_text="")
 
+    class Meta(NamedVersionedCommon.Meta):
+        abstract = False
+
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
+        self.auto_set_version_number()
+
         if self.template and self.scheduling_unit_template_id and self.scheduling_unit_template.schema:
             try:
                 validate_json_against_schema(self.template, self.scheduling_unit_template.schema)
@@ -216,6 +223,33 @@ class SchedulingUnitObservingStrategyTemplate(NamedCommon):
 
         super().save(force_insert, force_update, using, update_fields)
 
+    @property
+    def template_doc_complete_with_defaults(self) -> dict:
+        template_doc = deepcopy(self.template)
+
+        # loop over all tasks, and add the defaults to each task given the task's specifications_template
+        for task_name, task_doc in list(template_doc.get('tasks',{}).items()):
+            task_specifications_template = TaskTemplate.objects.get(name=task_doc['specifications_template'])
+            template_doc['tasks'][task_name] = add_defaults_to_json_object_for_schema(task_doc, task_specifications_template.schema,
+                                                                                      cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE)
+
+        # add the default constraints given the scheduling_constraints_template
+        if 'scheduling_constraints_template' in template_doc:
+            constraints_template = SchedulingConstraintsTemplate.objects.get(name=template_doc.get('scheduling_constraints_template'))
+            constraints_doc = add_defaults_to_json_object_for_schema(template_doc.get('scheduling_constraints_doc', {}), constraints_template.schema,
+                                                                     cache=TemplateSchemaMixin._schema_cache, max_cache_age=TemplateSchemaMixin._MAX_SCHEMA_CACHE_AGE)
+            template_doc['scheduling_constraints_doc'] = constraints_doc
+
+        return template_doc
+
+    @property
+    def template_doc_with_just_the_parameters(self) -> dict:
+        parameter_pointers = sum([p['refs'] for p in self.template.get('parameters',[])], [])
+
+        template_doc = self.template_doc_complete_with_defaults
+        template_doc = subdict_of_pointer_items(template_doc, parameter_pointers)
+
+        return template_doc
 
 class SchedulingUnitTemplate(Template):
     pass
@@ -831,7 +865,7 @@ class TaskDraft(NamedCommon, TemplateSchemaMixin, ProjectPropertyMixin):
     output_pinned = BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')
     path_to_project = 'scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(NamedCommon.Meta):
         # ensure there are no tasks with duplicate names within one scheduling_unit
         constraints = [UniqueConstraint(fields=['name', 'scheduling_unit_draft'], name='TaskDraft_unique_name_in_scheduling_unit')]
 
@@ -962,7 +996,7 @@ class TaskBlueprint(ProjectPropertyMixin, TemplateSchemaMixin, NamedCommon):
     output_pinned = BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')
     path_to_project = 'draft__scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(NamedCommon.Meta):
         # ensure there are no tasks with duplicate names within one scheduling_unit,
         constraints = [UniqueConstraint(fields=['name', 'scheduling_unit_blueprint'], name='TaskBlueprint_unique_name_in_scheduling_unit')]
 
@@ -1119,7 +1153,7 @@ class TaskRelationDraft(BasicCommon, TemplateSchemaMixin, ProjectPropertyMixin):
     output_role = ForeignKey('TaskConnectorType', related_name='taskrelationdraft_output_roles', on_delete=CASCADE, help_text='Output connector type (what kind of data is taken from the producer).')
     path_to_project = 'producer__scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(BasicCommon.Meta):
         # ensure there are no duplicate relations between tasks with the same in/out roles.
         constraints = [UniqueConstraint(fields=['producer', 'consumer', 'input_role', 'output_role'], name='TaskRelationDraft_unique_relation')]
 
@@ -1148,7 +1182,7 @@ class TaskRelationBlueprint(BasicCommon, TemplateSchemaMixin, ProjectPropertyMix
     selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.')  # todo: 'schema'?
     path_to_project = 'draft__producer__scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(BasicCommon.Meta):
         # ensure there are no duplicate relations between tasks with the same in/out roles.
         constraints = [UniqueConstraint(fields=['producer', 'consumer', 'input_role', 'output_role', 'draft'], name='TaskRelationBlueprint_unique_relation')]
 
@@ -1164,7 +1198,7 @@ class TaskSchedulingRelationBlueprint(BasicCommon, ProjectPropertyMixin):
     time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')  
     path_to_project = 'first__draft__scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(BasicCommon.Meta):
         # ensure there are no duplicate scheduling relations between tasks
         constraints = [UniqueConstraint(fields=['first', 'second'], name='TaskSchedulingRelationBlueprint_unique_relation')]
 
@@ -1183,7 +1217,7 @@ class TaskSchedulingRelationDraft(BasicCommon, ProjectPropertyMixin):
     time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')  
     path_to_project = 'first__scheduling_unit_draft__scheduling_set__project'
 
-    class Meta:
+    class Meta(BasicCommon.Meta):
         # ensure there are no duplicate scheduling relations between tasks
         constraints = [UniqueConstraint(fields=['first', 'second'], name='TaskSchedulingRelationDraft_unique_relation')]
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
index a9971f0feb61b69687b8c2575f458ebe1bae8cf2..fad946748e363bc4c1e25a5c1b15b15df86419e9 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
@@ -187,7 +187,7 @@ def populate_subtask_allowed_state_transitions(apps, schema_editor):
         ])
 
 def populate_settings(apps, schema_editor):
-    Setting.objects.create(name=SystemSettingFlag.objects.get(value='dynamic_scheduling_enabled'), value=False)
+    Setting.objects.create(name=SystemSettingFlag.objects.get(value='dynamic_scheduling_enabled'), value=isDevelopmentEnvironment())
 
 def populate_test_data():
     """
@@ -427,22 +427,20 @@ def populate_cycles(apps, schema_editor):
 
 
 def populate_projects(apps, schema_editor):
-    from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data
-
     for name, rank in (("high", 3), ("normal", 2), ("low", 1)):
         tmss_project = models.Project.objects.create(name=name,
                                                  description="Project for all TMSS tests and commissioning (%s priority)" % (name,),
                                                  priority_rank=rank,
-                                                 can_trigger=False,
+                                                 can_trigger=name=='high',
                                                  private_data=True,
                                                  expert=True,
                                                  filler=False)
         tmss_project.tags = ["Commissioning"]
-        tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")])
+        tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 16")])
         tmss_project.save()
 
         # for convenience, create a schedulingset for each project
-        models.SchedulingSet.objects.create(**SchedulingSet_test_data(name="Test Scheduling Set", project=tmss_project))
+        models.SchedulingSet.objects.create(name="Test Scheduling Set", project=tmss_project)
 
         project_quota = ProjectQuota.objects.create(project=tmss_project, value=1e12, resource_type=ResourceType.objects.get(name="LTA Storage"))
         sara_fs = Filesystem.objects.get(name="Lofar Storage (SARA)")
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/HBA-single-beam-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/HBA-single-beam-observation-scheduling-unit-observation-strategy.json
index 612883f0ea8247332ee35f0ff84d05af8c3a5937..38564ad9477f704f9f6fd79f34e3e39e556cc205 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/HBA-single-beam-observation-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/HBA-single-beam-observation-scheduling-unit-observation-strategy.json
@@ -190,6 +190,10 @@
     }
   },
   "parameters":[
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "name":"Target Name",
       "refs":[
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json
index b92f74bd8efa8b3255293a183f9879e8d4136ca9..b4247ca8ef3574b1b7ef0b79468188d2faa8e810 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json
@@ -713,6 +713,10 @@
     }
   },
   "parameters":[
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "name":"Target 1 Name",
       "refs":[
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
index ceaeb481376ed7679aab6fab96a3eb759a9a19f8..1d9b2e16a5effb08a99b3c711d7ca95e3610289a 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
@@ -365,6 +365,10 @@
     }
   },
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "refs": [
         "#/tasks/Target Observation/specifications_doc/SAPs/0/digital_pointing"
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/beamforming-complex-voltages-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/beamforming-complex-voltages-observation-scheduling-unit-observation-strategy.json
index 16c517fdf83c44ea368df09927f04336dc7dcac7..2aea1682130225ae3089e9cdbdce83886d9981fb 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/beamforming-complex-voltages-observation-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/beamforming-complex-voltages-observation-scheduling-unit-observation-strategy.json
@@ -898,6 +898,10 @@
     }
   },
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "name": "Target Name",
       "refs": [
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-triggers-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-triggers-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..1a971a1ac45bff233951ad5fc0ad0248f236342d
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-triggers-1.json
@@ -0,0 +1,65 @@
+{
+  "$id": "http://tmss.lofar.org/api/schemas/commonschematemplate/triggers/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title": "triggers",
+  "description": "This schema defines json document for submitting triggers.",
+  "version": 1,
+  "type": "object",
+  "definitions": {},
+  "properties": {
+    "name": {
+      "title": "Name",
+      "description": "The name for the scheduling_unit created by this trigger",
+      "type": "string",
+      "default": ""
+    },
+    "description": {
+      "title": "Description",
+      "description": "An optional description for the scheduling_unit created by this trigger",
+      "type": "string",
+      "default": ""
+    },
+    "mode": {
+      "title": "Mode",
+      "description": "When the mode is 'test' then only a scheduling_unit_draft is created and can be inspected by the used. When mode is 'run', then the draft is turned into a blueprint as well, and an attempt is made to schedule it according to the scheduling constraints.",
+      "type": "string",
+      "enum": ["test", "run"],
+      "default": "test"
+    },
+    "scheduling_set_id": {
+      "title": "Scheduling Set",
+      "description": "The id of the parent scheduling set to which the created scheduling unit is attached",
+      "type": "integer",
+      "default": -1
+    },
+    "scheduling_unit_observing_strategy_template": {
+      "title": "Strategy Template",
+      "description": "Which scheduling_unit observing strategy template to use to create the scheduling_unit from",
+      "type": "object",
+      "default": {},
+      "properties": {
+        "name": {
+          "title": "Name",
+          "description": "The name of the scheduling_unit observing strategy template",
+          "type": "string",
+          "default": ""
+        },
+        "version": {
+          "title": "Version",
+          "description": "The version of the scheduling_unit observing strategy template",
+          "type": "integer",
+          "default": 1
+        },
+        "overrides": {
+          "title": "Template document overrides",
+          "description": "A nested json document with overrides for the default template document, for example the pointing or duration etc. These overrides have to adhere to the template's schema definitions.",
+          "type": "object",
+          "default": {}
+        }
+      },
+      "required": ["name"],
+      "additionalProperties": false
+    }
+  },
+  "required": ["name", "mode", "scheduling_set_id","scheduling_unit_observing_strategy_template"]
+}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/pulsar_timing-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/pulsar_timing-scheduling-unit-observation-strategy.json
index d4adca63b2cdeaae0a302aac1d03ff92b3d3dc77..c2f0d0dc339e41a34e3a13580c466143e306919b 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/pulsar_timing-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/pulsar_timing-scheduling-unit-observation-strategy.json
@@ -971,6 +971,10 @@
     }
   },
   "parameters":[
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "name":"Duration",
       "refs":[
@@ -1107,6 +1111,20 @@
     }
   ],
   "task_scheduling_relations":[
-
-  ]
+  ],
+  "scheduling_constraints_template": "constraints",
+  "scheduling_constraints_doc": {
+    "sky": {
+      "min_distance": {
+        "sun": 0,
+        "moon": 0,
+        "jupiter": 0
+      },
+      "transit_offset": {
+        "to": 21600,
+        "from": -21600
+      },
+      "min_target_elevation": 0.261666666667
+    }
+  }
 }
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/responsive_telescope_HBA_LoTSS-scheduling_unit_observation-strategy-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/responsive_telescope_HBA_LoTSS-scheduling_unit_observation-strategy-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..824d08bd5f02dc538c8339dfce3d5ed8a57ec288
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/responsive_telescope_HBA_LoTSS-scheduling_unit_observation-strategy-1.json
@@ -0,0 +1,552 @@
+{
+  "tasks": {
+    "Ingest": {
+      "tags": [],
+      "description": "Ingest all preprocessed dataproducts",
+      "specifications_doc": {},
+      "specifications_template": "ingest"
+    },
+    "Target Pipeline": {
+      "tags": [],
+      "description": "Preprocessing Pipeline for Target Observation",
+      "specifications_doc": {
+        "flag": {
+          "rfi_strategy": "HBAdefault",
+          "outerchannels": true,
+          "autocorrelations": true
+        },
+        "demix": {
+          "sources": {},
+          "time_steps": 10,
+          "ignore_target": false,
+          "frequency_steps": 64
+        },
+        "average": {
+          "time_steps": 1,
+          "frequency_steps": 4
+        },
+        "storagemanager": "dysco"
+      },
+      "specifications_template": "preprocessing pipeline"
+    },
+    "Target Observation": {
+      "tags": [],
+      "description": "Target Observation",
+      "specifications_doc": {
+        "QA": {
+          "plots": {
+            "enabled": true,
+            "autocorrelation": true,
+            "crosscorrelation": true
+          },
+          "file_conversion": {
+            "enabled": true,
+            "nr_of_subbands": -1,
+            "nr_of_timestamps": 256
+          }
+        },
+        "SAPs": [
+          {
+            "name": "target",
+            "subbands": [
+              104,
+              105,
+              106,
+              107,
+              108,
+              109,
+              110,
+              111,
+              112,
+              113,
+              114,
+              115,
+              116,
+              117,
+              118,
+              119,
+              120,
+              121,
+              122,
+              123,
+              124,
+              125,
+              126,
+              127,
+              128,
+              129,
+              130,
+              131,
+              132,
+              133,
+              134,
+              135,
+              136,
+              137,
+              138,
+              139,
+              140,
+              141,
+              142,
+              143,
+              144,
+              145,
+              146,
+              147,
+              148,
+              149,
+              150,
+              151,
+              152,
+              153,
+              154,
+              155,
+              156,
+              157,
+              158,
+              159,
+              160,
+              161,
+              162,
+              163,
+              164,
+              165,
+              166,
+              167,
+              168,
+              169,
+              170,
+              171,
+              172,
+              173,
+              174,
+              175,
+              176,
+              177,
+              178,
+              179,
+              180,
+              181,
+              182,
+              183,
+              184,
+              185,
+              186,
+              187,
+              188,
+              189,
+              190,
+              191,
+              192,
+              193,
+              194,
+              195,
+              196,
+              197,
+              198,
+              199,
+              200,
+              201,
+              202,
+              203,
+              204,
+              205,
+              206,
+              207,
+              208,
+              209,
+              210,
+              211,
+              212,
+              213,
+              214,
+              215,
+              216,
+              217,
+              218,
+              219,
+              220,
+              221,
+              222,
+              223,
+              224,
+              225,
+              226,
+              227,
+              228,
+              229,
+              230,
+              231,
+              232,
+              233,
+              234,
+              235,
+              236,
+              237,
+              238,
+              239,
+              240,
+              241,
+              242,
+              243,
+              244,
+              245,
+              246,
+              247,
+              248,
+              249,
+              250,
+              251,
+              252,
+              253,
+              254,
+              255,
+              256,
+              257,
+              258,
+              259,
+              260,
+              261,
+              262,
+              263,
+              264,
+              265,
+              266,
+              267,
+              268,
+              269,
+              270,
+              271,
+              272,
+              273,
+              274,
+              275,
+              276,
+              277,
+              278,
+              279,
+              280,
+              281,
+              282,
+              283,
+              284,
+              285,
+              286,
+              287,
+              288,
+              289,
+              290,
+              291,
+              292,
+              293,
+              294,
+              295,
+              296,
+              297,
+              298,
+              299,
+              300,
+              301,
+              302,
+              303,
+              304,
+              305,
+              306,
+              307,
+              308,
+              309,
+              310,
+              311,
+              312,
+              313,
+              314,
+              315,
+              316,
+              317,
+              318,
+              319,
+              320,
+              321,
+              322,
+              323,
+              324,
+              325,
+              326,
+              327,
+              328,
+              329,
+              330,
+              331,
+              332,
+              333,
+              334,
+              335,
+              336,
+              337,
+              338,
+              339,
+              340,
+              341,
+              342,
+              343,
+              344,
+              345,
+              346,
+              347
+            ],
+            "digital_pointing": {
+              "angle1": 0.6624317181687094,
+              "angle2": 1.5579526427549426,
+              "direction_type": "J2000"
+            }
+          }
+        ],
+        "filter": "HBA_110_190",
+        "duration": 7200,
+        "tile_beam": {
+          "angle1": 0.6624317181687094,
+          "angle2": 1.5579526427549426,
+          "direction_type": "J2000"
+        },
+        "correlator": {
+          "storage_cluster": "CEP4",
+          "integration_time": 1,
+          "channels_per_subband": 64
+        },
+        "antenna_set": "HBA_DUAL_INNER",
+        "station_groups": [
+          {
+            "stations": [
+              "CS001",
+              "CS002",
+              "CS003",
+              "CS004",
+              "CS005",
+              "CS006",
+              "CS007",
+              "CS011",
+              "CS013",
+              "CS017",
+              "CS021",
+              "CS024",
+              "CS026",
+              "CS028",
+              "CS030",
+              "CS031",
+              "CS032",
+              "CS101",
+              "CS103",
+              "CS201",
+              "CS301",
+              "CS302",
+              "CS401",
+              "CS501",
+              "RS106",
+              "RS205",
+              "RS208",
+              "RS210",
+              "RS305",
+              "RS306",
+              "RS307",
+              "RS310",
+              "RS406",
+              "RS407",
+              "RS409",
+              "RS503",
+              "RS508",
+              "RS509"
+            ],
+            "max_nr_missing": 4
+          }
+        ]
+      },
+      "specifications_template": "target observation"
+    },
+    "Calibrator Pipeline": {
+      "tags": [],
+      "description": "Preprocessing Pipeline for Calibrator Observation",
+      "specifications_doc": {
+        "flag": {
+          "rfi_strategy": "HBAdefault",
+          "outerchannels": true,
+          "autocorrelations": true
+        },
+        "demix": {
+          "sources": {},
+          "time_steps": 10,
+          "ignore_target": false,
+          "frequency_steps": 64
+        },
+        "average": {
+          "time_steps": 1,
+          "frequency_steps": 4
+        },
+        "storagemanager": "dysco"
+      },
+      "specifications_template": "preprocessing pipeline"
+    },
+    "Calibrator Observation": {
+      "tags": [],
+      "description": "Calibrator Observation after Target Observation",
+      "specifications_doc": {
+        "name": "calibrator",
+        "duration": 600,
+        "pointing": {
+          "angle1": 0.6624317181687094,
+          "angle2": 1.5579526427549426,
+          "direction_type": "J2000"
+        },
+        "autoselect": false
+      },
+      "specifications_template": "calibrator observation"
+    }
+  },
+  "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc/time"]
+    },
+    {
+      "name": "Target Name",
+      "refs": [
+        "#/tasks/Target Observation/specifications_doc/SAPs/0/name"
+      ]
+    },
+    {
+      "name": "Target Pointing",
+      "refs": [
+        "#/tasks/Target Observation/specifications_doc/SAPs/0/digital_pointing"
+      ]
+    },
+    {
+      "name": "Subbands",
+      "refs": [
+        "#/tasks/Target Observation/specifications_doc/SAPs/0/subbands"
+      ]
+    },
+    {
+      "name": "Tile Beam",
+      "refs": [
+        "#/tasks/Target Observation/specifications_doc/tile_beam"
+      ]
+    },
+    {
+      "name": "Target Duration",
+      "refs": [
+        "#/tasks/Target Observation/specifications_doc/duration"
+      ]
+    },
+    {
+      "name": "Calibrator Name",
+      "refs": [
+        "#/tasks/Calibrator Observation 2/specifications_doc/name"
+      ]
+    },
+    {
+      "name": "Calibrator Pointing",
+      "refs": [
+        "#/tasks/Calibrator Observation 2/specifications_doc/pointing"
+      ]
+    },
+    {
+      "name": "Calibrator Duration",
+      "refs": [
+        "#/tasks/Calibrator Observation 2/specifications_doc/duration"
+      ]
+    }
+  ],
+  "task_relations": [
+    {
+      "tags": [],
+      "input": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "output": {
+        "role": "correlator",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "consumer": "Calibrator Pipeline",
+      "producer": "Calibrator Observation",
+      "selection_doc": {},
+      "selection_template": "all"
+    },
+    {
+      "tags": [],
+      "input": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "output": {
+        "role": "correlator",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "consumer": "Target Pipeline",
+      "producer": "Target Observation",
+      "selection_doc": {
+        "sap": [
+          "target1"
+        ]
+      },
+      "selection_template": "SAP"
+    },
+    {
+      "tags": [],
+      "input": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "output": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "consumer": "Ingest",
+      "producer": "Calibrator Pipeline",
+      "selection_doc": {},
+      "selection_template": "all"
+    },
+    {
+      "tags": [],
+      "input": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "output": {
+        "role": "any",
+        "datatype": "visibilities",
+        "dataformat": "MeasurementSet"
+      },
+      "consumer": "Ingest",
+      "producer": "Target Pipeline",
+      "selection_doc": {},
+      "selection_template": "all"
+    }
+  ],
+  "task_scheduling_relations": [
+    {
+      "first": "Calibrator Observation",
+      "second": "Target Observation",
+      "placement": "after",
+      "time_offset": 60
+    }
+  ],
+  "scheduling_constraints_doc": {
+    "sky": {
+      "transit_offset": {
+        "to": 86400,
+        "from": -86400
+      }
+    },
+    "time": {
+      "between": []
+    }
+  },
+  "scheduling_constraints_template": "constraints"
+}
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-beamformed-observation-pipeline-ingest-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-beamformed-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
index 2d30b5d158bed5cfc4bed5bc34d664929183d80e..4f2d980fa5f4fd8dad760c960ec52927a50bacac 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-beamformed-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-beamformed-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
@@ -489,6 +489,10 @@
   "task_scheduling_relations": [
   ],
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "refs": [
         "#/tasks/Observation/specifications_doc/duration"
@@ -507,5 +511,20 @@
       ],
       "name": "Tile Beam"
     }
-  ]
+  ],
+  "scheduling_constraints_template": "constraints",
+  "scheduling_constraints_doc": {
+    "sky": {
+      "min_distance": {
+        "sun": 0,
+        "moon": 0,
+        "jupiter": 0
+      },
+      "transit_offset": {
+        "to": 21600,
+        "from": -21600
+      },
+      "min_target_elevation": 0.261666666667
+    }
+  }
 }
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
index fb42281e098c0088e91cd4e8b9264af943f0e53f..655149a7457ada9ed875e8da50168eaecd78cc07 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json
@@ -171,6 +171,10 @@
   },
   "scheduling_constraints_template": "constraints",
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "refs": [
         "#/tasks/Observation/specifications_doc/duration"
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json
index 5e27ecea8cc605dae4fe1053c6f370cee44f1534..53ed5dbf731f4a4e2ef9c9e43380c81690042e68 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json
@@ -129,6 +129,10 @@
     }
   },
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "refs": [
         "#/tasks/Observation/specifications_doc/duration"
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json
index 6c98b054b6b0ba7557fda5f9beb08cdef626be1e..b083d1177d7f51941dc17ce3f9b9eceff5f6a357 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json
@@ -68,6 +68,10 @@
     }
   },
   "parameters": [
+    {
+      "name": "Scheduling Constraints",
+      "refs": ["#/scheduling_constraints_doc"]
+    },
     {
       "refs": [
         "#/tasks/Observation/specifications_doc/duration"
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json
index d8fe7a85ae211f6094b9a8313c227125b44db0f5..fdf0b7943df3db1cc7eb687d5b9716381adff2ed 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json
@@ -27,6 +27,10 @@
     "file_name": "common_schema_template-pipeline-1.json",
     "template": "common_schema_template"
   },
+  {
+    "file_name": "common_schema_template-triggers-1.json",
+    "template": "common_schema_template"
+  },
   {
     "file_name": "dataproduct_specifications_template-SAP-1.json",
     "template": "dataproduct_specifications_template"
@@ -218,6 +222,15 @@
     "description": "This observation strategy template defines a LoTSS (Co-)observing run with a Calibrator-Target-Calibrator observation chain, plus a preprocessing pipeline for each and ingest of pipeline data only.",
     "version": 1
   },
+  {
+    "file_name": "responsive_telescope_HBA_LoTSS-scheduling_unit_observation-strategy-1.json",
+    "template": "scheduling_unit_observing_strategy_template",
+    "scheduling_unit_template_name": "scheduling unit",
+    "scheduling_unit_template_version": "1",
+    "name": "Responsive Telescope HBA LoTSS",
+    "description": "This observation strategy template defines a similar observation strategy as for LoTSS, but then with a single Calibrator at the end so that the Target Observation can start immediately once the trigger is submitted.",
+    "version": 1
+  },
   {
     "file_name": "HBA-single-beam-observation-scheduling-unit-observation-strategy.json",
     "template": "scheduling_unit_observing_strategy_template",
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
index 6180292a3916534ac9279819325e88b54a2710dd..0e081ba8296b54e329bbc268d870ae689c7337d6 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
@@ -256,6 +256,7 @@ class SchedulingUnitDraftSerializer(DynamicRelationalHyperlinkedModelSerializer)
     class Meta:
         model = models.SchedulingUnitDraft
         fields = '__all__'
+        read_only_fields = ['interrupts_telescope']
         extra_fields = ['scheduling_unit_blueprints', 'task_drafts', 'duration']
         expandable_fields = {
             'observation_strategy_template': 'lofar.sas.tmss.tmss.tmssapp.serializers.SchedulingUnitObservingStrategyTemplateSerializer',
@@ -271,13 +272,13 @@ class SchedulingUnitDraftCopySerializer(SchedulingUnitDraftSerializer):
     class Meta(SchedulingUnitDraftSerializer.Meta):
        fields = ['copy_reason']
        extra_fields =['scheduling_set_id']
-       read_only_fields = ['scheduling_unit_blueprints','task_drafts']
+       read_only_fields = SchedulingUnitDraftSerializer.Meta.read_only_fields + ['scheduling_unit_blueprints','task_drafts']
 
 
 class SchedulingUnitDraftCopyFromSchedulingSetSerializer(SchedulingUnitDraftSerializer):
     class Meta(SchedulingUnitDraftSerializer.Meta):
        fields = ['copy_reason']
-       read_only_fields = ['scheduling_unit_blueprints','task_drafts']
+       read_only_fields = SchedulingUnitDraftSerializer.Meta.read_only_fields + ['scheduling_unit_blueprints','task_drafts']
 
 
 class SchedulingUnitBlueprintSerializer(DynamicRelationalHyperlinkedModelSerializer):
@@ -287,6 +288,7 @@ class SchedulingUnitBlueprintSerializer(DynamicRelationalHyperlinkedModelSeriali
     class Meta:
         model = models.SchedulingUnitBlueprint
         fields = '__all__'
+        read_only_fields = ['interrupts_telescope']
         extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status', 'observed_end_time', 'output_pinned']
         expandable_fields = {
             'requirements_template': 'lofar.sas.tmss.tmss.tmssapp.serializers.SchedulingUnitTemplateSerializer',
@@ -299,7 +301,7 @@ class SchedulingUnitBlueprintCopyToSchedulingUnitDraftSerializer(SchedulingUnitB
     class Meta(SchedulingUnitDraftSerializer.Meta):
        fields = ['copy_reason']
        extra_fields =['scheduling_set_id']
-       read_only_fields = ['scheduling_unit_blueprints','task_drafts']
+       read_only_fields = SchedulingUnitBlueprintSerializer.Meta.read_only_fields + ['scheduling_unit_blueprints','task_drafts']
 
 
 class TaskDraftSerializer(DynamicRelationalHyperlinkedModelSerializer):
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py
index cae5b5fa07ea292a6f87f2ccc4853446590b416d..a0a999ce6d6aadda8ae230a271b2b71ac471c4ec 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py
@@ -12,12 +12,58 @@ import logging
 from datetime import datetime, timedelta
 from django.db.utils import IntegrityError
 from django.db import transaction
+from lofar.common.util import dict_with_overrides, subdict_of_pointer_items
 
 logger = logging.getLogger(__name__)
 
 # cache for json schema's
 _schema_cache = {}
 
+
+def create_scheduling_unit_draft_from_observing_strategy_template(strategy_template: models.SchedulingUnitObservingStrategyTemplate, scheduling_set: models.SchedulingSet, name: str, description: str=None, requirements_doc_overrides: dict=None) -> models.SchedulingUnitDraft:
+    '''create a new SchedulingUnitDraft from the given strategy_template with 'parent' scheduling_set'''
+    requirements_doc = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema)
+
+    # apply overrides on template requirements_doc if given
+    if requirements_doc_overrides:
+        # only use allowed overrides
+        allowed_parameter_pointers = sum([p['refs'] for p in strategy_template.template['parameters']], [])
+        allowed_overrides = subdict_of_pointer_items(requirements_doc_overrides, allowed_parameter_pointers)
+
+        if allowed_overrides != requirements_doc_overrides:
+            raise InvalidSpecificationException("Specified faulty overrides. These are the allowed overridable parameters: \n%s\nSubmitted overrides:%s" % (
+                                                '\n'.join([str(p) for p in allowed_parameter_pointers]), requirements_doc_overrides))
+
+        # do the actual override of the remaining allowed items
+        requirements_doc = dict_with_overrides(requirements_doc, allowed_overrides)
+
+    if 'scheduling_constraints_template' in requirements_doc:
+        scheduling_constraints_template_name = requirements_doc.pop('scheduling_constraints_template')
+        scheduling_constraints_template = models.SchedulingConstraintsTemplate.objects.get(name=scheduling_constraints_template_name)
+    else:
+        # get the default_scheduling_constraints_template
+        default_scheduling_constraints_template = models.DefaultSchedulingConstraintsTemplate.objects.all().order_by('created_at').last()
+        if default_scheduling_constraints_template:
+            scheduling_constraints_template = default_scheduling_constraints_template.template
+        else:
+            scheduling_constraints_template = None
+
+    # extract the scheduling_constraints_doc from the template_doc,
+    # so we can feed in seperately into the SchedulingUnitDraft.
+    scheduling_constraints_doc = requirements_doc.pop('scheduling_constraints_doc', {})
+
+    scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name=name,
+                                                                      description=description,
+                                                                      scheduling_set=scheduling_set,
+                                                                      requirements_doc=requirements_doc,
+                                                                      requirements_template=strategy_template.scheduling_unit_template,
+                                                                      observation_strategy_template=strategy_template,
+                                                                      scheduling_constraints_doc=scheduling_constraints_doc,
+                                                                      scheduling_constraints_template=scheduling_constraints_template)
+
+    return scheduling_unit_draft
+
+
 def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint:
     """
     Create a SchedulingUnitBlueprint from the SchedulingUnitDraft
@@ -145,99 +191,95 @@ def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.
     """
     logger.debug("create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft.id=%s, name='%s') ...", scheduling_unit_draft.pk, scheduling_unit_draft.name)
 
-    if len(scheduling_unit_draft.requirements_doc.get("tasks", {})) == 0:
-        raise BlueprintCreationException("create_task_drafts_from_scheduling_unit_draft: scheduling_unit_draft.id=%s has no tasks defined in its requirements_doc" % (scheduling_unit_draft.pk,))
-
-    schema_cache = {}
-
-    for task_name, task_definition in scheduling_unit_draft.requirements_doc["tasks"].items():
-        task_template_name = task_definition["specifications_template"]
-        task_template = models.TaskTemplate.objects.get(name=task_template_name)
-
-        task_specifications_doc = task_definition["specifications_doc"]
-        task_specifications_doc = add_defaults_to_json_object_for_schema(task_specifications_doc, task_template.schema, cache=_schema_cache)
-
-        try:
-            logger.debug("creating task draft... task_name='%s', task_template_name='%s'", task_template_name, task_template_name)
-
-            with transaction.atomic():
-                task_draft = models.TaskDraft.objects.create(name=task_name,
-                                                             description=task_definition.get("description",""),
-                                                             tags=task_definition.get("tags",[]),
-                                                             specifications_doc=task_specifications_doc,
-                                                             copy_reason=models.CopyReason.objects.get(value='template'),
-                                                             copies=None,
-                                                             scheduling_unit_draft=scheduling_unit_draft,
-                                                             specifications_template=task_template)
-
-                logger.info("created task draft id=%s task_name='%s', task_template_name='%s'", task_draft.pk, task_name, task_template_name)
-        except IntegrityError as e:
-            if 'TaskDraft_unique_name_in_scheduling_unit' in str(e):
-                logger.info("task draft task_name='%s', task_template_name='%s' already exists in scheduling_unit id=%s name='%s'",
-                            task_name, task_template_name, scheduling_unit_draft.id, scheduling_unit_draft.name)
-            else:
-                raise
-
-    # Now create task relations
-    for task_relation_definition in scheduling_unit_draft.requirements_doc["task_relations"]:
-        try:
-            producer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["producer"])
-            consumer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["consumer"])
-            input_role = models.TaskConnectorType.objects.get(task_template=consumer_task_draft.specifications_template,
-                                                              role=task_relation_definition["input"]["role"],
-                                                              datatype=task_relation_definition["input"]["datatype"],
-                                                              dataformat=task_relation_definition["input"]["dataformat"],
-                                                              iotype=models.IOType.Choices.INPUT.value)
-            output_role = models.TaskConnectorType.objects.get(task_template=producer_task_draft.specifications_template,
-                                                               role=task_relation_definition["output"]["role"],
-                                                               datatype=task_relation_definition["output"]["datatype"],
-                                                               dataformat=task_relation_definition["output"]["dataformat"],
-                                                               iotype=models.IOType.Choices.OUTPUT.value)
-            selection_template = models.TaskRelationSelectionTemplate.objects.get(name=task_relation_definition["selection_template"])
-        except Exception as e:
-            logger.error("Could not determine Task Relations for %s. Error: %s", task_relation_definition, e)
-            raise
-
-        try:
-            with transaction.atomic():
-                task_relation = models.TaskRelationDraft.objects.create(tags=task_relation_definition.get("tags",[]),
-                                                                        selection_doc=task_relation_definition["selection_doc"],
-                                                                        producer=producer_task_draft,
-                                                                        consumer=consumer_task_draft,
-                                                                        input_role=input_role,
-                                                                        output_role=output_role,
-                                                                        selection_template=selection_template)
-                logger.info("created task_relation id=%s between task draft id=%s name='%s' and id=%s name='%s",
-                            task_relation.pk, producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name)
-        except IntegrityError as e:
-            if 'TaskRelationDraft_unique_relation' in str(e):
-                logger.info("task_relation between task draft id=%s name='%s' and id=%s name='%s already exists",
-                             producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name)
-            else:
+    if len(scheduling_unit_draft.requirements_doc.get("tasks", {})) > 0:
+        for task_name, task_definition in scheduling_unit_draft.requirements_doc["tasks"].items():
+            task_template_name = task_definition["specifications_template"]
+            task_template = models.TaskTemplate.objects.get(name=task_template_name)
+
+            task_specifications_doc = task_definition["specifications_doc"]
+            task_specifications_doc = add_defaults_to_json_object_for_schema(task_specifications_doc, task_template.schema, cache=_schema_cache)
+
+            try:
+                logger.debug("creating task draft... task_name='%s', task_template_name='%s'", task_template_name, task_template_name)
+
+                with transaction.atomic():
+                    task_draft = models.TaskDraft.objects.create(name=task_name,
+                                                                 description=task_definition.get("description",""),
+                                                                 tags=task_definition.get("tags",[]),
+                                                                 specifications_doc=task_specifications_doc,
+                                                                 copy_reason=models.CopyReason.objects.get(value='template'),
+                                                                 copies=None,
+                                                                 scheduling_unit_draft=scheduling_unit_draft,
+                                                                 specifications_template=task_template)
+
+                    logger.info("created task draft id=%s task_name='%s', task_template_name='%s'", task_draft.pk, task_name, task_template_name)
+            except IntegrityError as e:
+                if 'TaskDraft_unique_name_in_scheduling_unit' in str(e):
+                    logger.info("task draft task_name='%s', task_template_name='%s' already exists in scheduling_unit id=%s name='%s'",
+                                task_name, task_template_name, scheduling_unit_draft.id, scheduling_unit_draft.name)
+                else:
+                    raise
+
+        # Now create task relations
+        for task_relation_definition in scheduling_unit_draft.requirements_doc["task_relations"]:
+            try:
+                producer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["producer"])
+                consumer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["consumer"])
+                input_role = models.TaskConnectorType.objects.get(task_template=consumer_task_draft.specifications_template,
+                                                                  role=task_relation_definition["input"]["role"],
+                                                                  datatype=task_relation_definition["input"]["datatype"],
+                                                                  dataformat=task_relation_definition["input"]["dataformat"],
+                                                                  iotype=models.IOType.Choices.INPUT.value)
+                output_role = models.TaskConnectorType.objects.get(task_template=producer_task_draft.specifications_template,
+                                                                   role=task_relation_definition["output"]["role"],
+                                                                   datatype=task_relation_definition["output"]["datatype"],
+                                                                   dataformat=task_relation_definition["output"]["dataformat"],
+                                                                   iotype=models.IOType.Choices.OUTPUT.value)
+                selection_template = models.TaskRelationSelectionTemplate.objects.get(name=task_relation_definition["selection_template"])
+            except Exception as e:
+                logger.error("Could not determine Task Relations for %s. Error: %s", task_relation_definition, e)
                 raise
 
-
-    # task_scheduling_relation
-    for task_scheduling_relation_definition in scheduling_unit_draft.requirements_doc["task_scheduling_relations"]:
-        placement = models.SchedulingRelationPlacement.objects.get(value=task_scheduling_relation_definition["placement"])
-        time_offset = task_scheduling_relation_definition["time_offset"]
-        first_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["first"])
-        second_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["second"])
-
-        try:
-            with transaction.atomic():
-                task_scheduling_relation = models.TaskSchedulingRelationDraft.objects.create(placement=placement,
-                                                                                             time_offset=time_offset,
-                                                                                             first=first_task_draft,
-                                                                                             second=second_task_draft)
-                logger.info("created task_scheduling_relation id=%s between task draft id=%s name='%s' and id=%s name='%s",
-                            task_scheduling_relation.pk, first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name)
-        except IntegrityError as e:
-            if 'TaskSchedulingRelationDraft_unique_relation' in str(e):
-                logger.info("task_scheduling_relation between task draft id=%s name='%s' and id=%s name='%s already exists",
-                            first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name)
-            else:
-                raise
+            try:
+                with transaction.atomic():
+                    task_relation = models.TaskRelationDraft.objects.create(tags=task_relation_definition.get("tags",[]),
+                                                                            selection_doc=task_relation_definition["selection_doc"],
+                                                                            producer=producer_task_draft,
+                                                                            consumer=consumer_task_draft,
+                                                                            input_role=input_role,
+                                                                            output_role=output_role,
+                                                                            selection_template=selection_template)
+                    logger.info("created task_relation id=%s between task draft id=%s name='%s' and id=%s name='%s",
+                                task_relation.pk, producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name)
+            except IntegrityError as e:
+                if 'TaskRelationDraft_unique_relation' in str(e):
+                    logger.info("task_relation between task draft id=%s name='%s' and id=%s name='%s already exists",
+                                 producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name)
+                else:
+                    raise
+
+
+        # task_scheduling_relation
+        for task_scheduling_relation_definition in scheduling_unit_draft.requirements_doc["task_scheduling_relations"]:
+            placement = models.SchedulingRelationPlacement.objects.get(value=task_scheduling_relation_definition["placement"])
+            time_offset = task_scheduling_relation_definition["time_offset"]
+            first_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["first"])
+            second_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["second"])
+
+            try:
+                with transaction.atomic():
+                    task_scheduling_relation = models.TaskSchedulingRelationDraft.objects.create(placement=placement,
+                                                                                                 time_offset=time_offset,
+                                                                                                 first=first_task_draft,
+                                                                                                 second=second_task_draft)
+                    logger.info("created task_scheduling_relation id=%s between task draft id=%s name='%s' and id=%s name='%s",
+                                task_scheduling_relation.pk, first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name)
+            except IntegrityError as e:
+                if 'TaskSchedulingRelationDraft_unique_relation' in str(e):
+                    logger.info("task_scheduling_relation between task draft id=%s name='%s' and id=%s name='%s already exists",
+                                first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name)
+                else:
+                    raise
 
     scheduling_unit_draft.refresh_from_db()
     return scheduling_unit_draft
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/views.py b/SAS/TMSS/backend/src/tmss/tmssapp/views.py
index f999b495fb48ffccda5d3c89250bdfc63ae07846..7620179e0a574d3b5cd51d0f2be6f0379c8364a8 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/views.py
@@ -1,12 +1,19 @@
 import os
 
+import logging
+logger = logging.getLogger(__name__)
+
 from django.http import HttpResponse, JsonResponse, Http404
+from rest_framework.response import Response as RestResponse
+from rest_framework import status
 from django.shortcuts import get_object_or_404, render, redirect
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.common.json_utils import get_default_json_object_for_schema
 from lofar.common.datetimeutils import formatDatetime
+from lofar.common.util import single_line_with_single_spaces
 from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset
 from lofar.sas.tmss.tmss.tmssapp.adapters.reports import create_cycle_report
+from lofar.sas.tmss.tmss.tmssapp.tasks import create_scheduling_unit_draft_from_observing_strategy_template, create_task_blueprints_and_subtasks_from_scheduling_unit_draft
 from drf_yasg.utils import swagger_auto_schema
 from drf_yasg.openapi import Parameter
 from rest_framework.authtoken.models import Token
@@ -14,6 +21,7 @@ from rest_framework.permissions import AllowAny
 from rest_framework.decorators import authentication_classes, permission_classes
 from django.apps import apps
 import re
+from lofar.sas.tmss.tmss.tmssapp.serializers import SchedulingUnitDraftSerializer, SchedulingUnitBlueprintSerializer
 
 from rest_framework.decorators import api_view
 from datetime import datetime
@@ -343,3 +351,45 @@ def get_cycles_report(request):
         results[c_pk] = create_cycle_report(request, c)
 
     return JsonResponse(results)
+
+
+@api_view(['POST'])
+def submit_trigger(request):
+    trigger_doc = request.data
+    logger.info("Received trigger submission: %s", single_line_with_single_spaces(trigger_doc))
+
+    # check if doc is valid
+    trigger_template = get_object_or_404(models.CommonSchemaTemplate, name="triggers")
+    trigger_template.validate_document(trigger_doc)
+
+    # gather relevant objects from db
+    strategy_template = get_object_or_404(models.SchedulingUnitObservingStrategyTemplate, name=trigger_doc['scheduling_unit_observing_strategy_template']['name'], version=trigger_doc['scheduling_unit_observing_strategy_template']['version'])
+    scheduling_set = get_object_or_404(models.SchedulingSet, pk=trigger_doc['scheduling_set_id'])
+
+    # check permissions
+    if not scheduling_set.project.can_trigger:
+        msg = 'Project \'%s\' does not allow triggers' % scheduling_set.project
+        logger.error(msg)
+        return RestResponse(msg, status=status.HTTP_403_FORBIDDEN)
+
+    from django.db import transaction
+    with transaction.atomic():
+        # try to create the draft from the trigger_doc
+        scheduling_unit_draft = create_scheduling_unit_draft_from_observing_strategy_template(strategy_template,
+                                                                                              scheduling_set,
+                                                                                              name=trigger_doc['name'],
+                                                                                              description=trigger_doc.get('description'),
+                                                                                              requirements_doc_overrides=trigger_doc['scheduling_unit_observing_strategy_template'].get('overrides', {}))
+
+        # indicate that we are allowed to interrupt the telescope
+        scheduling_unit_draft.interrupts_telescope = True
+        scheduling_unit_draft.save()
+
+        # if the trigger mode is 'run', then turn it into a blueprint which the dynamic scheduler will try to pick up, given the scheduling constraints
+        if trigger_doc['mode'].lower() == 'run':
+            scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
+            return RestResponse(SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request': request}).data,
+                                status=status.HTTP_201_CREATED)
+
+        return RestResponse(SchedulingUnitDraftSerializer(scheduling_unit_draft, context={'request': request}).data,
+                            status=status.HTTP_201_CREATED)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
index 73d586327f6798df7ec47763d26ed3bd03fda529..752ea356aaa6ec0ba81abe864314030ccbefb7ea 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
@@ -86,49 +86,68 @@ class SchedulingUnitObservingStrategyTemplateViewSet(LOFARViewSet):
                                             Parameter(name='name', required=False, type='string', in_='query',
                                                       description="The name for the newly created scheduling_unit"),
                                             Parameter(name='description', required=False, type='string', in_='query',
-                                                      description="The description for the newly created scheduling_unit")])
+                                                      description="The description for the newly created scheduling_unit"),
+                                            Parameter(name='requirements_doc_overrides', required=False, type='dict', in_='body',
+                                                      description="a JSON dict containing the override values for the parameters in the template")
+                                            ])
     @action(methods=['post'], detail=True)
     def create_scheduling_unit(self, request, pk=None):
         strategy_template = get_object_or_404(models.SchedulingUnitObservingStrategyTemplate, pk=pk)
-        spec = add_defaults_to_json_object_for_schema(strategy_template.template,
-                                                      strategy_template.scheduling_unit_template.schema)
-
-        if 'scheduling_constraints_template' in strategy_template.template:
-            # create/update the scheduling_constraints_doc from the strategy_template if defined
-            scheduling_constraints_template_name = strategy_template.template['scheduling_constraints_template']
-            scheduling_constraints_template = SchedulingConstraintsTemplate.objects.get(name=scheduling_constraints_template_name)
-            scheduling_constraints_doc = strategy_template.template.get('scheduling_constraints_doc',{})
-            scheduling_constraints_doc = add_defaults_to_json_object_for_schema(scheduling_constraints_doc, scheduling_constraints_template.schema)
-        else:
-            # get the default_scheduling_constraints_template and fill a doc if available
-            default_scheduling_constraints_template = models.DefaultSchedulingConstraintsTemplate.objects.all().order_by('created_at').last()
-            if default_scheduling_constraints_template:
-                scheduling_constraints_template = default_scheduling_constraints_template.template
-                scheduling_constraints_doc = get_default_json_object_for_schema(scheduling_constraints_template.schema)
-            else:
-                scheduling_constraints_template = None
-                scheduling_constraints_doc = None
-
         scheduling_set = get_object_or_404(models.SchedulingSet, pk=request.query_params['scheduling_set_id'])
+        name = request.query_params.get('name', "scheduling unit")
+        description = request.query_params.get('description', "")
+
+        requirements_doc_overrides = request.data if isinstance(request.data, dict) else None
 
-        scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name=request.query_params.get('name', "scheduling unit"),
-                                                                          description=request.query_params.get('description', ""),
-                                                                          requirements_doc=spec,
-                                                                          scheduling_set=scheduling_set,
-                                                                          requirements_template=strategy_template.scheduling_unit_template,
-                                                                          observation_strategy_template=strategy_template,
-                                                                          scheduling_constraints_doc=scheduling_constraints_doc,
-                                                                          scheduling_constraints_template=scheduling_constraints_template)
+        scheduling_unit_draft = create_scheduling_unit_draft_from_observing_strategy_template(strategy_template, scheduling_set, name, description, requirements_doc_overrides)
 
+        # return a response with the new serialized SchedulingUnitDraft, and a Location to the new instance in the header
         scheduling_unit_observation_strategy_template_path = request._request.path
         base_path = scheduling_unit_observation_strategy_template_path[:scheduling_unit_observation_strategy_template_path.find('/scheduling_unit_observing_strategy_template')]
         scheduling_unit_draft_path = '%s/scheduling_unit_draft/%s/' % (base_path, scheduling_unit_draft.id,)
 
-        # return a response with the new serialized SchedulingUnitDraft, and a Location to the new instance in the header
         return Response(serializers.SchedulingUnitDraftSerializer(scheduling_unit_draft, context={'request':request}).data,
                         status=status.HTTP_201_CREATED,
                         headers={'Location': scheduling_unit_draft_path})
 
+
+    @swagger_auto_schema(responses={200: 'The template document as JSON object with all defaults filled in.',
+                                    403: 'forbidden'},
+                         operation_description="Get the template document as JSON object with all defaults filled in. This template document is the foundation from which a scheduling unit draft is created. The intended use is to carefully inspect this JSON document for all specification details for this strategy template.")
+    @action(methods=['get'], detail=True)
+    def template_doc_complete_with_defaults(self, request, pk=None):
+        strategy_template = get_object_or_404(models.SchedulingUnitObservingStrategyTemplate, pk=pk)
+        return JsonResponse(strategy_template.template_doc_complete_with_defaults, json_dumps_params={'indent': 2})
+
+
+    @swagger_auto_schema(responses={200: 'The template document as JSON object with just the items that are in the parameters list.',
+                                    403: 'forbidden'},
+                         operation_description="Get the slimmed down version of the template document as JSON object with all defaults filled in, but then only for the allowed-to-be-edited parameters. This template document is the foundation from which a scheduling unit draft is created. The intended use is to carefully inspect this JSON document for all specification details for this strategy template.")
+    @action(methods=['get'], detail=True)
+    def template_doc_with_just_the_parameters(self, request, pk=None):
+        strategy_template = get_object_or_404(models.SchedulingUnitObservingStrategyTemplate, pk=pk)
+        return JsonResponse(strategy_template.template_doc_with_just_the_parameters, json_dumps_params={'indent': 2})
+
+
+    @swagger_auto_schema(responses={200: 'Get a trigger JSON document ready to be used for trigger submission',
+                                    403: 'forbidden'},
+                         operation_description="Get a trigger JSON document ready to be used for trigger submission.")
+    @action(methods=['get'], detail=True)
+    def trigger_doc(self, request, pk=None):
+        '''Convenience endpoint to easily fetch a JSON doc for the requested strategy_template which can be used for trigger submission'''
+        strategy_template = get_object_or_404(models.SchedulingUnitObservingStrategyTemplate, pk=pk)
+        trigger_template = get_object_or_404(models.CommonSchemaTemplate, name='triggers')
+        triger_doc = trigger_template.get_default_json_document_for_schema()
+        triger_doc['name'] = 'my_trigger_name'
+        triger_doc['description'] = 'my_description_for_this_trigger'
+        triger_doc['scheduling_unit_observing_strategy_template']['name'] = strategy_template.name
+        triger_doc['scheduling_unit_observing_strategy_template']['version'] = int(strategy_template.version)
+        triger_doc['scheduling_unit_observing_strategy_template']['overrides'] = strategy_template.template_doc_with_just_the_parameters
+
+        return JsonResponse(triger_doc, json_dumps_params={'indent': 2})
+
+
+
 class SchedulingUnitObservingStrategyTemplateNestedViewSet(LOFARNestedViewSet):
     queryset = models.SchedulingUnitObservingStrategyTemplate.objects.all()
     serializer_class = serializers.SchedulingUnitObservingStrategyTemplateSerializer
@@ -572,6 +591,7 @@ class SchedulingUnitDraftNestedViewSet(LOFARNestedViewSet):
         else:
             return models.SchedulingUnitDraft.objects.all()
 
+
 class TaskDraftCopyViewSet(LOFARCopyViewSet):
     queryset = models.TaskDraft.objects.all()
     serializer_class = serializers.TaskDraftSerializer
diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py
index 5953c86b4c4d79ec844b32034e5c7b637a7e18cb..409fcec7002c5bf2954bb8382471c7f08d289cb6 100644
--- a/SAS/TMSS/backend/src/tmss/urls.py
+++ b/SAS/TMSS/backend/src/tmss/urls.py
@@ -79,6 +79,7 @@ urlpatterns = [
     re_path('util/target_rise_and_set/?', views.get_target_rise_and_set, name='get_target_rise_and_set'),
     re_path('util/target_transit/?', views.get_target_transit, name='get_target_transit'),
     re_path('util/cycles_report/?', views.get_cycles_report, name='get_cycles_report'),
+    re_path('submit_trigger', views.submit_trigger, name='submit_trigger'),
 ]
 
 if os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False):
diff --git a/SAS/TMSS/backend/test/t_tasks.py b/SAS/TMSS/backend/test/t_tasks.py
index 354acfaaa04800daec420d80fea7d29f978b9eac..7df2243e35ea367d54ef3ecd67faa16b7d422418 100755
--- a/SAS/TMSS/backend/test/t_tasks.py
+++ b/SAS/TMSS/backend/test/t_tasks.py
@@ -97,8 +97,7 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
                                    copies=None,
                                    scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
 
-        with self.assertRaises(BlueprintCreationException):
-            create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft)
+        create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft)
 
         scheduling_unit_draft.refresh_from_db()
         task_drafts = scheduling_unit_draft.task_drafts.all()
@@ -133,7 +132,7 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
         """
         Create Scheduling Unit Draft with empty task specification
         Check if the name draft (specified) is equal to name blueprint (created)
-        Check with REST-call if NO tasks are created
+        Check that a blueprint was created, even though the draft has no tasks
         """
         strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
         strategy_template.template['tasks'] = {}
@@ -148,10 +147,9 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
                                    copies=None,
                                    scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
 
-        with self.assertRaises(BlueprintCreationException):
-            create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-
-        self.assertEqual(0, len(scheduling_unit_draft.scheduling_unit_blueprints.all()))
+        scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
+        self.assertEqual(scheduling_unit_draft.name, scheduling_unit_blueprint.draft.name)
+        self.assertEqual(1, len(scheduling_unit_draft.scheduling_unit_blueprints.all()))
 
 
     def test_create_multiple_scheduling_unit_blueprints_from_one_draft(self):
@@ -233,10 +231,11 @@ class CreationFromSchedulingUnitBluePrint(unittest.TestCase):
     5. create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint:
     """
 
+    @unittest.skip("we allow turning a draft to blueprint, even if the draft is useless and has no tasks")
     def test_create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(self):
         """
         Create Scheduling Unit BluePrint
-        Check with REST-call if NO tasks are created, an Exception is raised becaus the requirements_doc of the
+        Check with REST-call if NO tasks are created, an Exception is raised because the requirements_doc of the
         scheduling_unit (draft) has no tasks defined, it is an empty list
         """
         scheduling_unit_blueprint_data = SchedulingUnitBlueprint_test_data(name="Test Scheduling Unit BluePrint")
diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
index 48e53e6d4969601a1ab9b9382289f8c65a86ea77..9f5eb818ed3c56674f8b4ac87b77479ee24e175e 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
@@ -27,10 +27,11 @@
 # behavior in a controlled way.
 # We should probably also fully test behavior wrt mandatory and nullable fields.
 
+from dateutil import parser
 from datetime import datetime
 import unittest
 import logging
-logger = logging.getLogger(__name__)
+logger = logging.getLogger('lofar.'+__name__)
 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
 from lofar.common.test_utils import exit_with_skipped_code_if_skip_integration_tests
@@ -51,6 +52,7 @@ User = get_user_model()
 from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
 test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH)
 
+tmss_test_env.populate_schemas()
 
 # todo: for overall speed improvements, but also for clarity, it would be nice to switch to django.test.TestCase
 #  in order to separate the db content between them. Investigated why that currently yields a ton of 404 errors.
@@ -1534,6 +1536,8 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         self.assertEqual(response_3['count'], 0)
 
 
+
+
 class TaskDraftTestCase(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
@@ -2301,6 +2305,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         response_data = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/' % task_blueprint.id, 200)
         assertUrlList(self, response_data['subtasks'], [subtask_1, subtask_2])
 
+    @unittest.skip("This test is dependend on previous tests (in the results list). ToDo: fix it.")
     def test_TaskBlueprint_contains_lists_of_related_TaskRelationBlueprint(self):
 
         # setup
@@ -3100,6 +3105,34 @@ class CyclePermissionTestCase(unittest.TestCase):
 
 
 class SchedulingUnitObservingStrategyTemplateTestCase(unittest.TestCase):
+    def test_create_draft_from_all_strategy_templates(self):
+        '''Try to create a scheduling_unit_draft for each known strategy template.
+        Check that it is created properly.'''
+        scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data("scheduling set"))
+
+        for strategy_template in models.SchedulingUnitObservingStrategyTemplate.objects.all():
+            logger.info("test_create_draft_from_all_strategy_templates: checking template '%s'", strategy_template.name)
+            with tmss_test_env.create_tmss_client() as client:
+                # check normal creation with all defaults
+                scheduling_unit_draft = client.create_scheduling_unit_draft_from_strategy_template(strategy_template.id, scheduling_set.id)
+                self.assertIsNotNone(scheduling_unit_draft)
+                self.assertEqual(scheduling_set.id, scheduling_unit_draft['scheduling_set_id'])
+                self.assertEqual(strategy_template.id, scheduling_unit_draft['observation_strategy_template_id'])
+
+                # check creation with overrides on top of all defaults
+                overrides = client.get_scheduling_unit_observing_strategy_template_specification_with_just_the_parameters(strategy_template.name, strategy_template.version)
+
+                scheduling_unit_draft = client.create_scheduling_unit_draft_from_strategy_template(strategy_template.id, scheduling_set.id, requirements_doc_overrides=overrides)
+                self.assertIsNotNone(scheduling_unit_draft)
+                self.assertEqual(scheduling_set.id, scheduling_unit_draft['scheduling_set_id'])
+                self.assertEqual(strategy_template.id, scheduling_unit_draft['observation_strategy_template_id'])
+
+                # check that other overrides are forbidden
+                with self.assertRaises(Exception):
+                    overrides = {'foo': 'bar'}
+                    client.create_scheduling_unit_draft_from_strategy_template(strategy_template.id, scheduling_set.id, requirements_doc_overrides=overrides)
+
+
 
     def test_nested_SchedulingUnitObservingStrategyTemplate_are_filtered_according_to_SchedulingSet(self):
 
@@ -3113,6 +3146,53 @@ class SchedulingUnitObservingStrategyTemplateTestCase(unittest.TestCase):
         GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_observing_strategy_template/'
                                                         % scheduling_set.id,  template_test_data, 1)
 
+
+class SubmitTriggerTestCase(unittest.TestCase):
+    def test_submit_trigger_for_all_strategy_templates(self):
+        '''Try to create a scheduling_unit_draft/blueprint for each known strategy template via a trigger submission.'''
+        project = models.Project.objects.create(**Project_test_data(can_trigger=True))
+        scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data("scheduling set", project=project))
+
+        with tmss_test_env.create_tmss_client() as client:
+            for strategy_template in models.SchedulingUnitObservingStrategyTemplate.objects.all():
+                if strategy_template.template_doc_complete_with_defaults.get('scheduling_constraints_template', '') != 'constraints':
+                    logger.info("test_submit_trigger_for_all_strategy_templates: skipping template '%s' which has no known scheduling_constraints_template which can be used while dynamic scheduling", strategy_template.name)
+                    continue
+
+                logger.info("test_submit_trigger_for_all_strategy_templates: checking template '%s'", strategy_template.name)
+                trigger_doc = client.get_trigger_specification_doc_for_scheduling_unit_observing_strategy_template(strategy_template.name, strategy_template.version)
+                trigger_doc['scheduling_set_id'] = scheduling_set.id
+
+                trigger_doc['mode'] = 'test'
+                result = client.submit_trigger(trigger_doc)
+                self.assertIsNotNone(result)
+                self.assertTrue('scheduling_unit_draft' in result['url']) # it's a draft (because the mode is "test")
+                self.assertEqual(scheduling_set.id, result['scheduling_set_id'])
+                self.assertEqual(strategy_template.id, result['observation_strategy_template_id'])
+
+                trigger_doc['mode'] = 'run'
+                result = client.submit_trigger(trigger_doc)
+                self.assertIsNotNone(result)
+                self.assertTrue('scheduling_unit_blueprint' in result['url']) # it's a blueprint (because the mode is "run")
+                self.assertEqual('schedulable', result['status'])
+
+                # test time scheduling_constraints: 'at' constraint
+                run_at = datetime.utcnow()
+                trigger_doc['mode'] = 'test'
+                trigger_doc['scheduling_unit_observing_strategy_template']['overrides']['scheduling_constraints_doc']['time']['at'] = run_at.isoformat()
+                scheduling_unit_draft = client.submit_trigger(trigger_doc)
+                # check that the draft has a 'before' time constraint (and assume that the scheduling constraints work in the dynamic scheduler)
+                self.assertEqual(run_at, parser.parse(scheduling_unit_draft['scheduling_constraints_doc']['time']['at']))
+
+                # test time scheduling_constraints: 'before' constraint
+                end_before = datetime.utcnow()
+                trigger_doc['mode'] = 'test'
+                trigger_doc['scheduling_unit_observing_strategy_template']['overrides']['scheduling_constraints_doc']['time']['before'] = end_before.isoformat()
+                scheduling_unit_draft = client.submit_trigger(trigger_doc)
+                # check that the draft has a 'before' time constraint (and assume that the scheduling constraints work in the dynamic scheduler)
+                self.assertEqual(end_before, parser.parse(scheduling_unit_draft['scheduling_constraints_doc']['time']['before']))
+
+
 # todo: move to t_permissions (I tried, but it broke)
 class SystemRolePermissionTestCase(unittest.TestCase):
 
@@ -3303,6 +3383,5 @@ class SystemRolePermissionTestCase(unittest.TestCase):
 
 
 if __name__ == "__main__":
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
     unittest.main()
 
diff --git a/SAS/TMSS/backend/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/backend/test/tmss_test_environment_unittest_setup.py
index acbc4384ad8402735abf504ee44d2ba66e662fa8..e107e2d063ccc3be94d3dcd1bd94879ee0ed9251 100644
--- a/SAS/TMSS/backend/test/tmss_test_environment_unittest_setup.py
+++ b/SAS/TMSS/backend/test/tmss_test_environment_unittest_setup.py
@@ -41,6 +41,7 @@ except Exception as e:
 def tearDownModule():
     tmss_test_env.stop()
 
+
 ################################################################################################
 # the methods below can be used to to HTTP REST calls to the django server and check the results
 ################################################################################################
diff --git a/SAS/TMSS/client/bin/CMakeLists.txt b/SAS/TMSS/client/bin/CMakeLists.txt
index dd2137cb32359e755c9d0e9b86149a855c326915..356d20d3db0c519afb5c19d00e8228863bb8f230 100644
--- a/SAS/TMSS/client/bin/CMakeLists.txt
+++ b/SAS/TMSS/client/bin/CMakeLists.txt
@@ -10,3 +10,4 @@ lofar_add_bin_scripts(tmss_cancel_subtask)
 lofar_add_bin_scripts(tmss_get_setting)
 lofar_add_bin_scripts(tmss_set_setting)
 lofar_add_bin_scripts(tmss_populate)
+lofar_add_bin_scripts(tmss_submit_trigger)
diff --git a/SAS/TMSS/client/bin/tmss_submit_trigger b/SAS/TMSS/client/bin/tmss_submit_trigger
new file mode 100755
index 0000000000000000000000000000000000000000..776b6789a17d732573f0c20a6759e94308bce5ca
--- /dev/null
+++ b/SAS/TMSS/client/bin/tmss_submit_trigger
@@ -0,0 +1,23 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+from lofar.sas.tmss.client.mains import main_submit_trigger
+
+if __name__ == "__main__":
+    main_submit_trigger()
diff --git a/SAS/TMSS/client/lib/mains.py b/SAS/TMSS/client/lib/mains.py
index 480b19daa35b72611aaac9936ee28dad8e6e57a4..06265568259fe7624d20d77ff636de020438e0dc 100644
--- a/SAS/TMSS/client/lib/mains.py
+++ b/SAS/TMSS/client/lib/mains.py
@@ -3,15 +3,21 @@ import argparse
 from pprint import pprint
 from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession
 from lofar.common.datetimeutils import parseDatetime
+from lofar.common.util import dict_search_and_replace
+import dateutil.parser
+import datetime
 
+import urllib3
+urllib3.disable_warnings()
 
 def main_get_subtask_parset():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", help="The ID of the TMSS subtask to get the parset from")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             print(session.get_subtask_parset(args.subtask_id))
     except Exception as e:
         print(e)
@@ -22,10 +28,11 @@ def main_get_subtask_predecessors():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get the predecessors for")
     parser.add_argument('-s', '--state', help="only get predecessors with this state")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.get_subtask_predecessors(args.subtask_id, state=args.state))
     except Exception as e:
         print(e)
@@ -36,10 +43,11 @@ def main_get_subtask_successors():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get the successors for")
     parser.add_argument('-s', '--state', help="only get successors with this state")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.get_subtask_successors(args.subtask_id, state=args.state))
     except Exception as e:
         print(e)
@@ -49,10 +57,11 @@ def main_get_subtask_successors():
 def main_get_subtask():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.get_subtask(args.subtask_id))
     except Exception as e:
         print(e)
@@ -67,10 +76,11 @@ def main_get_subtasks():
     parser.add_argument('--start_time_greater_then', help="only get subtasks with a start time greater then this timestamp")
     parser.add_argument('--stop_time_less_then', help="only get subtasks with a stop time less then this timestamp")
     parser.add_argument('--stop_time_greater_then', help="only get subtasks with a stop time greater then this timestamp")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             result = session.get_subtasks(state=args.state,
                                           cluster=args.cluster,
                                           start_time_less_then=parseDatetime(args.start_time_less_then) if args.start_time_less_then else None,
@@ -87,10 +97,11 @@ def main_set_subtask_state():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to set the status on")
     parser.add_argument("state", help="The state to set")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             changed_subtask = session.set_subtask_status(args.subtask_id, args.state)
             print("%s now has state %s, see: %s" % (changed_subtask['id'], changed_subtask['state_value'], changed_subtask['url']))
     except Exception as e:
@@ -104,10 +115,11 @@ def main_specify_observation_task():
     """
     parser = argparse.ArgumentParser()
     parser.add_argument("task_id", help="The ID of the TMSS task to specify for observation")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             result = session.specify_observation_task(args.task_id)
     except Exception as e:
         print(e)
@@ -117,10 +129,11 @@ def main_specify_observation_task():
 def main_schedule_subtask():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to be scheduled")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.schedule_subtask(args.subtask_id, retry_count=3))
     except Exception as e:
         print(e)
@@ -130,10 +143,11 @@ def main_schedule_subtask():
 def main_unschedule_subtask():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to be unscheduled")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.unschedule_subtask(args.subtask_id, retry_count=3))
     except Exception as e:
         print(e)
@@ -143,10 +157,11 @@ def main_unschedule_subtask():
 def main_cancel_subtask():
     parser = argparse.ArgumentParser()
     parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to be cancelled")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.cancel_subtask(args.subtask_id, retry_count=3))
     except Exception as e:
         print(e)
@@ -156,10 +171,11 @@ def main_cancel_subtask():
 def main_get_setting():
     parser = argparse.ArgumentParser()
     parser.add_argument("setting_name", type=str, help="The name of the TMSS setting to get")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.get_setting(args.setting_name))
     except Exception as e:
         print(e)
@@ -171,11 +187,73 @@ def main_set_setting():
     parser.add_argument("setting_name", type=str, help="The name of the TMSS setting to set")
     parser.add_argument("setting_value", type=lambda s: s.lower() in ['true', 'True', '1'], # argparse is noot very good at speaking bool...
                         help="The value to set for the TMSS setting")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
     args = parser.parse_args()
 
     try:
-        with TMSSsession.create_from_dbcreds_for_ldap() as session:
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
             pprint(session.set_setting(args.setting_name, args.setting_value))
     except Exception as e:
         print(e)
         exit(1)
+
+
+def main_submit_trigger():
+    '''
+    '''
+    parser = argparse.ArgumentParser()
+    parser.add_argument("observing_strategy_template", type=str, help="Which observing strategy template do you want to use to create the scheduling unit from? This template name should've been provided to you.")
+    parser.add_argument("pointing_angle1", type=float, help="First angle of the pointing [rad]")
+    parser.add_argument("pointing_angle2", type=float, help="Second angle of the pointing [rad]")
+    parser.add_argument("--pointing_direction_type", type=str, default="J2000", help="The direction type of the pointing. Defaults to J2000.")
+    parser.add_argument("--start_at", type=str, default=None, help="The start time of the observation as ISO string. If specified, a constraint will be added so that the observation must start exactly at this time. Defaults to None.")
+    parser.add_argument("--end_before", type=str, default=None, help="The latest end time of the observation as ISO string. If specified, a constraint will be added so that the observation will run at any time before this time. Defaults to None.")
+    parser.add_argument("--target_name", type=str, default='target1', help="The name of the observation target. Defaults to 'target1'.")
+    parser.add_argument("duration", type=int, help="The duration of the observation in seconds.")
+    parser.add_argument("scheduling_set_id", type=int, help="The ID of the scheduling set the trigger should be placed in. This should've been provided to you.")
+    parser.add_argument("--mode", type=str, required=True, help="Which trigger mode? 'test' or 'run'. Specify Whether this is a test submission or should be actually run.")
+    parser.add_argument('-R', '--rest_api_credentials', type=str, default='TMSSClient', help='TMSS django REST API credentials name, default: TMSSClient')
+    args = parser.parse_args()
+
+    try:
+        import uuid
+
+        with TMSSsession.create_from_dbcreds_for_ldap(dbcreds_name=args.rest_api_credentials) as session:
+            # get a prepared valid trigger_doc for the requested template
+            try:
+                trigger_doc = session.get_trigger_specification_doc_for_scheduling_unit_observing_strategy_template(args.observing_strategy_template)
+            except ValueError as e:
+                # no such template available... get a list of all templates and print it, so the user can lookup the one that he wants
+                print("ERROR:", e)
+                print()
+                print("Available templates:")
+                for template in session.get_scheduling_unit_observing_strategy_templates():
+                    print("id=%s name='%s' version=%s" % (template['id'], template['name'], template['version']))
+                exit(1)
+
+            # alter it with our parameters
+            trigger_doc["name"] = "Trigger - %s - %s" % (args.observing_strategy_template, datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M"))
+            trigger_doc["description"] = "Trigger submitted via CLI tools: %s" % args.observing_strategy_template
+            trigger_doc["mode"] = args.mode
+            trigger_doc["scheduling_set_id"] = args.scheduling_set_id
+            overrides = trigger_doc['scheduling_unit_observing_strategy_template']['overrides']
+
+            # this is a bit flacky, as it assumes that these parameters are available in the overrides dict
+            dict_search_and_replace(overrides, 'angle1', args.pointing_angle1)
+            dict_search_and_replace(overrides, 'angle2', args.pointing_angle2)
+            dict_search_and_replace(overrides, 'name', args.target_name)
+            dict_search_and_replace(overrides, 'target', args.target_name)
+            dict_search_and_replace(overrides, 'duration', args.duration)
+
+            if args.end_before:
+                overrides['scheduling_constraints_doc']['time']['before'] = args.end_before
+
+            if args.start_at:
+                overrides['scheduling_constraints_doc']['time']['at'] = args.start_at
+
+            print("Submitting trigger...")
+            scheduling_unit = session.submit_trigger(trigger_doc)
+            print("Submitted trigger, and created Scheduling Unit:", scheduling_unit['url'].replace('api/scheduling_unit_', 'schedulingunit/view/'))
+    except Exception as e:
+        print(e)
+        exit(1)
diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py
index d6012f41343d0b2c589aed0e72b64e6e630619c0..a1eab1b2c73ad4b53c470c2ebc1d208ccb44664f 100644
--- a/SAS/TMSS/client/lib/populate.py
+++ b/SAS/TMSS/client/lib/populate.py
@@ -66,9 +66,6 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None):
                     # override plain-text type by its url
                     template['type'] = client.get_full_url_for_path('task_type/' + template.get('type'))
 
-                # inject a unique id in the form of a unique URL to this schema
-                json_schema['$id'] = client.get_full_url_for_path('schemas/%s/%s/%s' % (template_name.replace('_', ''), template['name'], template['version']))
-
                 # make sure that all urls point to the tmss base_url
                 json_schema = json_utils.replace_host_in_urls(json_schema, new_base_url=client.host_url)
 
@@ -78,6 +75,8 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None):
                 if 'strategy_template' in template_name:
                     template['template'] = json_schema
                 else:
+                    # inject a unique id in the form of a unique URL to this schema
+                    json_schema['$id'] = client.get_full_url_for_path('schemas/%s/%s/%s' % (template_name.replace('_', ''), template['name'], template['version']))
                     template['schema'] = json_schema
 
                 # what are the references? on which other schema's does this schema depend?
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index a408f491639f8fec7c755361083fe7df8d4929cf..5ad370e07e98121bfc1804346ed7df121cc4dd5c 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -332,7 +332,7 @@ class TMSSsession(object):
                 raise ValueError("Found more then one %s for clauses: %s" % (template_type_name, clauses))
             elif len(result) == 1:
                 return result[0]
-            return None
+            raise ValueError("Could not find any template of type='%s' and clauses='%s'" % (template_type_name, clauses))
         return result
 
     def get_schedulingunit_template(self, name: str, version: int=None) -> dict:
@@ -454,10 +454,10 @@ class TMSSsession(object):
         returns the scheduled subtask upon success, or raises."""
         return self.post_to_path_and_get_result_as_json_object('scheduling_unit_draft/%s/create_blueprints_and_subtasks' % (scheduling_unit_draft_id), retry_count=retry_count)
 
-    def create_scheduling_unit_draft_from_strategy_template(self, scheduling_unit_observing_strategy_template_id: int, parent_scheduling_set_id: int, retry_count: int=0) -> {}:
+    def create_scheduling_unit_draft_from_strategy_template(self, scheduling_unit_observing_strategy_template_id: int, parent_scheduling_set_id: int, requirements_doc_overrides: dict=None, retry_count: int=0) -> {}:
         """create a scheduling_unit_blueprint, its specified taskblueprints and subtasks for the given scheduling_unit_draft_id.
         returns the created scheduling_unit_draft upon success, or raises."""
-        return self.post_to_path_and_get_result_as_json_object('scheduling_unit_observing_strategy_template/%s/create_scheduling_unit?scheduling_set_id=%s' % (scheduling_unit_observing_strategy_template_id, parent_scheduling_set_id), retry_count=retry_count)
+        return self.post_to_path_and_get_result_as_json_object('scheduling_unit_observing_strategy_template/%s/create_scheduling_unit?scheduling_set_id=%s' % (scheduling_unit_observing_strategy_template_id, parent_scheduling_set_id), json_data=requirements_doc_overrides, retry_count=retry_count)
 
     def get_schedulingunit_draft(self, scheduling_unit_draft_id: str, extended: bool=True) -> dict:
         '''get the schedulingunit_draft as dict for the given scheduling_unit_draft_id. When extended==True then you get the full scheduling_unit,task,subtask tree.'''
@@ -541,3 +541,33 @@ class TMSSsession(object):
         Return the updated subtask, or raise an error'''
         return self.get_path_as_json_object('/subtask/%s/reprocess_raw_feedback_for_subtask_and_set_to_finished_if_complete' % (subtask_id,))
 
+    def get_scheduling_unit_observing_strategy_templates(self) -> list:
+        '''get a list of all scheduling_unit_observing_strategy_template's as list of dicts'''
+        return self.get_path_as_json_object('scheduling_unit_observing_strategy_template')
+
+    def get_scheduling_unit_observing_strategy_template(self, name: str, version: int=None) -> dict:
+        '''get the scheduling_unit_observing_strategy_template as dict for the given name (and version)'''
+        return self._get_template('scheduling_unit_observing_strategy_template', name, version)
+
+    def get_scheduling_unit_observing_strategy_template_default_specification(self, name: str, version: int=None) -> dict:
+        '''get the scheduling_unit_observing_strategy_template as dict for the given name (and version) with completely filled in with all defaults'''
+        strategy_template = self.get_scheduling_unit_observing_strategy_template(name=name, version=version)
+        return self.get_url_as_json_object(strategy_template['url'].rstrip('/')+"/template_doc_complete_with_defaults")
+
+    def get_scheduling_unit_observing_strategy_template_specification_with_just_the_parameters(self, name: str, version: int=None) -> dict:
+        '''get the scheduling_unit_observing_strategy_template as dict for the given name (and version) with completely filled in with all defaults'''
+        strategy_template = self.get_scheduling_unit_observing_strategy_template(name=name, version=version)
+        return self.get_url_as_json_object(strategy_template['url'].rstrip('/')+"/template_doc_with_just_the_parameters")
+
+    def get_trigger_specification_doc_for_scheduling_unit_observing_strategy_template(self, name: str, version: int=None) -> dict:
+        '''get a trigger specification document for the scheduling_unit_observing_strategy_template with the given name (and version).
+        You can edit this document to your needs, and submit it via method trigger_scheduling_unit'''
+        strategy_template = self.get_scheduling_unit_observing_strategy_template(name, version)
+        return self.get_url_as_json_object(strategy_template['url'].rstrip('/')+'/trigger_doc')
+
+    def submit_trigger(self, trigger_doc:dict, retry_count: int=0, retry_interval: float=POST_RETRY_INTERVAL) -> dict:
+        """create a scheduling_unit_draft with the interrupts_telescope flag set.
+        When mode=='run' trigger_doc also create schedulable/runnable blueprints and subtasks, and trigger the dynamic scheduler to try to cancel to current observation and put this triggered observation in place.
+        Returns the created scheduling_unit_draft/blueprint upon success, or raises.
+        Use the method get_trigger_specification_doc_for_scheduling_unit_observing_strategy_template to get a valid trigger json doc, edit it, and sumbit using this method."""
+        return self.post_to_path_and_get_result_as_json_object('/submit_trigger', json_data=trigger_doc, retry_count=retry_count, retry_interval=retry_interval)