diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake
index db28a087be704c2b09c85cd66fea45146d617029..d4f6966e12814caac01dda87311fdbea2535433f 100644
--- a/CMake/LofarPackageList.cmake
+++ b/CMake/LofarPackageList.cmake
@@ -1,7 +1,7 @@
 # - Create for each LOFAR package a variable containing the absolute path to
 # its source directory. 
 #
-# Generated by gen_LofarPackageList_cmake.sh at do 29 okt 2020  7:42:34 CET
+# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST
 #
 #                      ---- DO NOT EDIT ----
 #
@@ -207,7 +207,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED)
   set(TaskPrescheduler_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/TaskPrescheduler)
   set(RACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/Common)
   set(TMSSClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/client)
-  set(TMSSSubtaskSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/subtask_scheduling)
+  set(TMSSSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/scheduling)
   set(TMSSFeedbackHandlingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/feedback_handling)
   set(TMSSPostgresListenerService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/tmss_postgres_listener)
   set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common)
diff --git a/LCS/PyCommon/datetimeutils.py b/LCS/PyCommon/datetimeutils.py
index 93841f255b159cd4d967e208f4eb8b93a6ff2967..fb1df8788220bf9af22515b5903c63c8c6a1147f 100644
--- a/LCS/PyCommon/datetimeutils.py
+++ b/LCS/PyCommon/datetimeutils.py
@@ -140,11 +140,33 @@ def from_milliseconds_since_unix_epoch(nr_of_milliseconds_since_epoch):
     '''
     return from_seconds_since_unix_epoch(nr_of_milliseconds_since_epoch/1000.0)
 
-def round_to_millisecond_precision(timestamp):
+def round_to_millisecond_precision(timestamp: datetime) -> datetime:
     """
     returns the given timestamp rounded to the nearest millisecond
     :param timestamp: datetime a python datetime timestamp
     :return: the given timestamp rounded to the nearest millisecond
     """
     diff_to_rounded_millisecond = timestamp.microsecond - 1000*round(timestamp.microsecond/1000)
-    return timestamp - timedelta(microseconds=diff_to_rounded_millisecond)
\ No newline at end of file
+    return timestamp - timedelta(microseconds=diff_to_rounded_millisecond)
+
+def round_to_second_precision(timestamp: datetime) -> datetime:
+    """
+    returns the given timestamp rounded to the nearest second
+    :param timestamp: datetime a python datetime timestamp
+    :return: the given timestamp rounded to the nearest second
+    """
+    if timestamp.microsecond < 500000:
+        return timestamp + timedelta(microseconds=-timestamp.microsecond)
+    else:
+        return timestamp + timedelta(microseconds=-timestamp.microsecond, seconds=1)
+
+def round_to_minute_precision(timestamp: datetime) -> datetime:
+    """
+    returns the given timestamp rounded to the nearest minute
+    :param timestamp: datetime a python datetime timestamp
+    :return: the given timestamp rounded to the nearest minute
+    """
+    if timestamp.second < 30:
+        return timestamp + timedelta(seconds=-timestamp.second, microseconds=-timestamp.microsecond)
+    else:
+        return timestamp + timedelta(minutes=1, seconds=-timestamp.second, microseconds=-timestamp.microsecond)
diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py
index 956fd3b0a29c34bc25bc3e204ff877943e266ca1..232ba7841c4d588378ab01fb192d8d25b59577bc 100644
--- a/LCS/PyCommon/json_utils.py
+++ b/LCS/PyCommon/json_utils.py
@@ -72,17 +72,38 @@ def _extend_with_required(validator_class):
 _DefaultValidatingDraft6Validator = _extend_with_default(jsonschema.Draft6Validator)
 _DefaultValidatingDraft6Validator = _extend_with_required(_DefaultValidatingDraft6Validator)
 
+# storage for validators, for fast caching of ref resolved urls.
+_schema_validators = {}
+_schema__defaults_addding_validators = {}
+
+def get_validator_for_schema(schema: dict, add_defaults: bool=False):
+    '''get a json validator for the given schema.
+    If the schema is already known in the cache by its $id, then the validator from the cached is return.
+    This saves many many lookups and ref resolving.
+    the 'add_defaults' parameter indicates if we want the validator to add defaults while validating or not.'''
+    if isinstance(schema, str):
+        schema = json.loads(schema)
+
+    validators_cache = _schema__defaults_addding_validators if add_defaults else _schema_validators
+
+    if '$id' in schema:
+        if schema['$id'] not in validators_cache:
+            validators_cache[schema['$id']] = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema)
+        validator = validators_cache[schema['$id']]
+    else:
+        validator = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema)
+
+    validator.schema = schema
+    return validator
 
 def get_default_json_object_for_schema(schema: str) -> dict:
     '''return a valid json object for the given schema with all properties with their default values'''
     return add_defaults_to_json_object_for_schema({}, schema)
 
-
 def add_defaults_to_json_object_for_schema(json_object: dict, schema: str) -> dict:
     '''return a copy of the json object with defaults filled in according to the schema for all the missing properties'''
     copy_of_json_object = deepcopy(json_object)
-    #TODO: investigate if we want to use a 'common'/singleton validator and use (remote) schema caching for faster validation
-    _DefaultValidatingDraft6Validator(schema).validate(copy_of_json_object)
+    get_validator_for_schema(schema, add_defaults=True).validate(copy_of_json_object)
     return copy_of_json_object
 
 def replace_host_in_urls(schema, new_base_url: str, keys=['$id', '$ref', '$schema']):
@@ -202,7 +223,7 @@ def validate_json_object_with_schema(json_object, schema):
     """
     Validate the given json_object with schema
     """
-    jsonschema.Draft6Validator(schema=schema).validate(json_object)
+    get_validator_for_schema(schema, add_defaults=False).validate(json_object)
 
 
 
diff --git a/LCS/PyCommon/postgres.py b/LCS/PyCommon/postgres.py
index 9c6d36e6e4369f722c807b198ae07b34b0924d06..b04e99e4cadcea254e8fb4925edfc1aea508798f 100644
--- a/LCS/PyCommon/postgres.py
+++ b/LCS/PyCommon/postgres.py
@@ -40,28 +40,29 @@ from lofar.common.dbcredentials import DBCredentials
 
 logger = logging.getLogger(__name__)
 
-def makePostgresNotificationQueries(schema, table, action, column_name='id'):
+def makePostgresNotificationQueries(schema, table, action, column_name=None, quote_column_value:bool=True, id_column_name='id', quote_id_value:bool=False):
     action = action.upper()
     if action not in ('INSERT', 'UPDATE', 'DELETE'):
         raise ValueError('''trigger_type '%s' not in ('INSERT', 'UPDATE', 'DELETE')''' % action)
 
     change_name = '''{table}_{action}'''.format(table=table, action=action)
-    if column_name != 'id':
+    if column_name is not None and column_name != id_column_name:
         change_name += '_column_' + column_name
     function_name = '''NOTIFY_{change_name}'''.format(change_name=change_name)
 
-    if action == 'UPDATE':
-        if column_name == 'id':
-            select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;'''
-        else:
-            select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || ', "''' + column_name + '''": "' || CAST(NEW.''' + column_name + ''' AS text)  || '"}' INTO payload;'''
-    elif action == 'INSERT':
-        select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;'''
-    elif action == 'DELETE':
-        select_payload = '''SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;'''
+    # build query string selecting the id:value (and col:col_value) into a json formatted object string
+    select_payload = '''SELECT '{po}"{id_column_name}": {id_value_quote}' || CAST({new_or_old}.{id_column_name} AS text) || '{id_value_quote}{column_key_value}{pc}' INTO payload;'''.format(
+        po="{",
+        id_column_name=id_column_name,
+        id_value_quote='"' if quote_id_value else '',
+        new_or_old='OLD' if action=='DELETE' else 'NEW',
+        column_key_value=''', "{column_name}": {column_value_quote}' || CAST(NEW.{column_name} AS text) || '{column_value_quote}'''.format(
+            column_name=column_name,
+            column_value_quote='"' if quote_column_value else '') if column_name else '',
+        pc = "}")
 
     if action == 'UPDATE':
-        begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name == 'id' else column_name)
+        begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name is None or column_name == id_column_name else column_name)
         end_update_check = 'END IF;'
     else:
         begin_update_check = ''
@@ -83,9 +84,8 @@ def makePostgresNotificationQueries(schema, table, action, column_name='id'):
                 function_name=function_name,
                 table=table,
                 action=action,
-                old_or_new=('OLD' if action == 'DELETE' else 'NEW') + '.' + column_name,
                 value='OLD' if action == 'DELETE' else 'NEW',
-                change_name=change_name.lower(),
+                change_name=change_name[:63].lower(), # postgres limits channel names to 63 chars
                 begin_update_check=begin_update_check,
                 select_payload=select_payload,
                 end_update_check=end_update_check)
@@ -441,7 +441,7 @@ class PostgresListener(PostgresDatabaseConnection):
 
         self.connect()
 
-        logger.info("Started listening to %s" % ', '.join([str(x) for x in list(self.__callbacks.keys())]))
+        logger.info("Started listening to %s on database %s", ', '.join([str(x) for x in list(self.__callbacks.keys())]), self.dbcreds.stringWithHiddenPassword())
 
         def eventLoop():
             while self.isListening():
@@ -477,7 +477,7 @@ class PostgresListener(PostgresDatabaseConnection):
         self.__thread.join()
         self.__thread = None
 
-        logger.info("Stopped listening")
+        logger.info("Stopped listening for notifications on database %s", self.dbcreds.stringWithHiddenPassword())
         self.stopWaiting()
         self.disconnect()
 
diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py
index 51e3be001e05424dea7358c5aa4f239e02140faf..104a43a7508372829b25ddce531534b2cf3fce90 100755
--- a/LCS/PyCommon/test/postgres.py
+++ b/LCS/PyCommon/test/postgres.py
@@ -70,7 +70,7 @@ class PostgresTestDatabaseInstance():
 
     def create(self):
         '''instantiate the isolated postgres server'''
-        logger.info('creating test-database instance...')
+        logger.info('%s creating test-database instance...', self.__class__.__name__)
 
         with self._named_lock:
             start_time = datetime.utcnow()
@@ -90,9 +90,9 @@ class PostgresTestDatabaseInstance():
                     # make the user known in the new test database
                     self._create_superuser(dsn)
 
-                    logger.info('Created test-database instance. It is available at: %s', self.dbcreds.stringWithHiddenPassword())
+                    logger.info('%s created test-database instance. It is available at: %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword())
 
-                    logger.info('Applying test-database schema...')
+                    logger.info('%s applying test-database schema...', self.__class__.__name__)
                     self.apply_database_schema()
                     return
                 except Exception as e:
@@ -117,9 +117,9 @@ class PostgresTestDatabaseInstance():
         '''destroy the running postgres server'''
         try:
             if self._postgresql:
-                logger.info('removing test-database instance at %s', self.dbcreds.stringWithHiddenPassword())
+                logger.info('%s removing test-database instance at %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword())
                 self._postgresql.stop()
-                logger.info('test-database instance removed')
+                logger.info('%s test-database instance removed', self.__class__.__name__)
         except Exception as e:
             logger.info('error while removing test-database instance at %s: %s', self.dbcreds.stringWithHiddenPassword(), e)
 
diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc
index a2462a0717583c1669d07e6ead0ab81c00c3c330..5a9bc3f4bea1cadc352584deeb3ff09fba52e036 100644
--- a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc
+++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc
@@ -127,9 +127,17 @@ std::string TMSSBridge::getParsetAsText(int subtask_id)
 bool TMSSBridge::setSubtaskState(int subtask_id, const string& state)
 {
     string queryStr = "/api/subtask/" + to_string(subtask_id) + "/";
+    string json_doc = "{ \"state\": \"/api/subtask_state/" + state +"/\"";
+    if(state == "finishing") {
+        // set stop_time to 'now' upon finished to get an actual record of when the observation stopped
+        ptime now = from_time_t(time(0));
+        json_doc += ", \"stop_time\": \"" + to_iso_extended_string(now) + "\"";
+    }
+    json_doc += " }";
+
     string result;
-    if(httpQuery(queryStr, result, "PATCH", "{ \"state\": \"/api/subtask_state/" + state +"/\" }")) {
-        LOG_INFO_STR("Updated subtask id=" << subtask_id << " to status=" << state);
+    if(httpQuery(queryStr, result, "PATCH", json_doc)) {
+        LOG_INFO_STR("Updated subtask state id=" << subtask_id << " with patch: " << json_doc);
         return true;
     }
 
diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py
index 75fe6059ed1fc2f9098c774c600d3439e7810960..f5711b4db9753551debdda209e418df0c022cdc2 100755
--- a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py
@@ -30,7 +30,7 @@ to assign resources to these tasks.
 import logging
 logger = logging.getLogger(__name__)
 
-from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
+from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME
 from lofar.messaging.rpc import RPCClientContextManagerMixin, RPCClient
 
 
diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py
index e97993b95a5533e282a1c2b106dbd514abc9b071..6875b8004895b7302e338e31d3a1e32df31aefe2 100755
--- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py
@@ -158,7 +158,7 @@ class ResourceAssigner(object):
         if spec.status == 'approved': # Only needed to send misc field info (storagemanager) to OTDB
             logger.info('Task otdb_id=%s tmss_id=%s is only approved, no resource assignment needed yet' % (otdb_id, tmss_id))
             self._send_task_status_notification(spec, 'approved')
-            return
+            return True
         #TODO have Specification propagate to the estimator?
         if self._schedule_resources(spec, specification_tree):
             # Cleanup the data of any previous run of the task
diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py
index ea374250236b38524742631147c1c98879f7867b..a2d282ce204f6d1361443d231c8f13b0865a26df 100644
--- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py
@@ -283,13 +283,21 @@ class BasicScheduler(object):
             # up more resources as a by-product, in which case other conflicts can simply be shifted to those newly freed
             # resources.
             conflict_claims = self.radb.getResourceClaims(task_ids=[self.task_id], status="conflict", extended=True)
-            logger.info("Resulting claims in conflict before resolution: %s", conflict_claims)
-
-            if conflict_claims and not any([self._resolve_conflict(c) for c in conflict_claims]):
-                if need_all or len(conflict_claims) == len(tentative_claims):
-                    # Could not resolve any conflict
-                    raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % (
-                                            len(tentative_claims), len(conflict_claims), conflict_claims))
+            if conflict_claims:
+                for conflict_claim in conflict_claims:
+                    logger.warning("conflicting_claim: %s\nresource:%s\noverlapping_claims:%s\noverlapping_tasks:%s", conflict_claim,
+                                   self.radb.getResources(resource_ids=[conflict_claim['resource_id']],
+                                                          include_availability=True,
+                                                          claimable_capacity_lower_bound=conflict_claim['starttime'],
+                                                          claimable_capacity_upper_bound=conflict_claim['endtime'])[0],
+                                   self.radb.get_overlapping_claims(conflict_claim['id']),
+                                   self.radb.get_overlapping_tasks(conflict_claim['id']))
+
+                if not any([self._resolve_conflict(c) for c in conflict_claims]):
+                    if need_all or len(conflict_claims) == len(tentative_claims):
+                        # Could not resolve any conflict
+                        raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % (
+                                                len(tentative_claims), len(conflict_claims), conflict_claims))
 
             # remove conflicting claims (allowing the next iteration to propose alternatives). Note that _handle_conflicts
             # could have reduced the number of conflicting claims.
diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py
index 5046b4eb97f63c4354418a1352c9e4803c641054..fc053cfb2674659aac93c62c0861fa436d109ca9 100644
--- a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py
@@ -41,7 +41,7 @@ class RATestEnvironment:
                  exchange: str=os.environ.get("RA_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("RA_BROKER", DEFAULT_BROKER)):
         self.radb_test_instance = RADBTestDatabaseInstance()
         self.radb = self.radb_test_instance.create_database_connection()
-        self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker)
+        self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker, num_threads=1)
         self.re_service = createEstimatorService(exchange=exchange, broker=broker)
         self.ra_service = RAService(radbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker)
 
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py
index 29358de27d660b822a48c48a705f5dd0ec6ff135..285933e110ad53c2a94c719bc15bb8386932059d 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py
@@ -659,7 +659,7 @@ class RADatabase(PostgresDatabaseConnection):
 
         return self._cursor.rowcount > 0
 
-    def _to_fields_and_value_placeholders_strings(self, fields: collections.Iterable) -> (str, str):
+    def _to_fields_and_value_placeholders_strings(self, fields: collections.abc.Iterable) -> (str, str):
         """convert a list of fields (column names) into a tuple of a comma-seperated string and a comma-seperated placeholder string
         For usage with prepared statements (postgres mogrify)"""
         fields_str = ', '.join(fields)
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py
index c0412d60c699d267af1993a0e185c43904ff2165..7a833e499e9449b557333504e3bf394e5515353d 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py
@@ -124,8 +124,12 @@ if __name__ == '__main__':
     logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                         level=logging.INFO)
 
+    class ExampleRADBEventMessageHandler(RADBEventMessageHandler):
+        def onTaskUpdated(self, updated_task):
+            logger.info("Example task updated: %s", updated_task)
+
     from lofar.messaging import BusListenerJanitor
-    with BusListenerJanitor(RADBEventMessageBusListener()):
+    with BusListenerJanitor(RADBEventMessageBusListener(handler_type=ExampleRADBEventMessageHandler)):
         waitForInterrupt()
 
 __all__ = ["RADBEventMessageBusListener", "RADBEventMessageHandler"]
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
index 6a1786252db82892e650c2e24899cc6836046570..e0e853db3c9267aa8e46e3a12263ab24f36ee671 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
@@ -122,14 +122,13 @@ class RADBPGListener(PostgresListener):
         r = {k:r[k] for k in ['id', 'total_capacity', 'available_capacity', 'used_capacity']}
         self._sendNotification('ResourceCapacityUpdated', r)
 
-    def __enter__(self):
-        super(RADBPGListener, self).__enter__()
+    def start(self):
+        super(RADBPGListener, self).start()
         self.radb.connect()
         self.event_bus.open()
-        return self
 
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        super(RADBPGListener, self).__exit__(exc_type, exc_val, exc_tb)
+    def stop(self):
+        super(RADBPGListener, self).stop()
         self.radb.disconnect()
         self.event_bus.close()
 
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
index 8ecc6fb4b4ebbb380d168e83e695431d4db9af91..422174462c3ddb222fc7437c38c3548c7eefb5ed 100755
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
@@ -55,8 +55,14 @@ class RADBTestDatabaseInstance(PostgresTestDatabaseInstance):
             for sql_path in sql_createdb_paths:
                 logger.debug("setting up database. applying sql file: %s", sql_path)
                 with open(sql_path) as sql:
+                    # temporarily suppress logging of queries to prevent the log from being spammed with the entire sql schema
+                    logging.getLogger('lofar.common.postgres').disabled = True
+
                     db.executeQuery(sql.read())
 
+                    # revert temporarily suppressed logging
+                    logging.getLogger('lofar.common.postgres').disabled = False
+
     def create_database_connection(self) -> RADatabase:
         self.radb = RADatabase(self.dbcreds)
         return self.radb
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
index 399b826974ae0275845fc6f639a66be40dddd980..f1ec6d530f2797ee805874072688562d4f103c21 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
@@ -161,12 +161,12 @@ class ResourceEstimatorHandler(ServiceMessageHandler):
         return self.get_subtree_estimate(specification_tree)
 
 
-def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER):
+def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, num_threads=1):
     return RPCService(service_name=DEFAULT_RESOURCEESTIMATOR_SERVICENAME,
                       handler_type=ResourceEstimatorHandler,
                       exchange=exchange,
                       broker=broker,
-                      num_threads=1)
+                      num_threads=num_threads)
 
 
 def main():
diff --git a/SAS/ResourceAssignment/ResourceAssignmentService/service.py b/SAS/ResourceAssignment/ResourceAssignmentService/service.py
index 732404dbadbe236fb8668ae75ed32b62e021a6c9..0ec23d0a9638704c53a74677b4599fc6f91605cf 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentService/service.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentService/service.py
@@ -386,13 +386,13 @@ class RADBServiceMessageHandler(ServiceMessageHandler):
         return { 'resource_claimable_capacity': resource_claimable_capacity}
 
 
-def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None):
+def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None, num_threads=4):
     return RPCService(DEFAULT_RADB_SERVICENAME,
                       RADBServiceMessageHandler,
                       handler_kwargs={'dbcreds': dbcreds},
                       exchange=exchange,
                       broker=broker,
-                      num_threads=4)
+                      num_threads=num_threads)
 
 def main():
     # make sure we run in UTC timezone
diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py
index ed05353790cb9db1ccdeeed71f0b5589201ca502..53f50b67a7a6dc12b0786de8359499e7660883e8 100644
--- a/SAS/TMSS/client/lib/populate.py
+++ b/SAS/TMSS/client/lib/populate.py
@@ -5,6 +5,7 @@ import json
 from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession
 from lofar.common import json_utils
 import os
+from concurrent.futures import ThreadPoolExecutor
 
 def populate_schemas_main():
     from optparse import OptionParser
@@ -34,7 +35,9 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None):
 
         with TMSSsession.create_from_dbcreds_for_ldap() as client:
             base_url = client.base_url.rstrip('/').rstrip('api').rstrip('/')
-            for template in templates:
+
+            # define upload method for parallel execution (see below)
+            def upload_template(template):
                 try:
                     with open(os.path.join(schema_dir, template.pop('file_name'))) as schema_file:
                         try:
@@ -69,7 +72,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None):
                             else:
                                 template['schema'] = json_schema
 
-                            logger.info("Uploading template template='%s' name='%s' version='%s'", template, name, version)
+                            logger.info("Uploading template name='%s' version='%s'", name, version)
 
                             client.post_template(template_path=template_name,
                                                   name=name,
@@ -81,3 +84,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None):
                 except Exception as e:
                     logger.error(e)
 
+            # do parallel upload
+            with ThreadPoolExecutor() as executor:
+                executor.map(upload_template, templates)
+
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index 7b5cf54ac67c77ff8a4a8e3e8f6ecb7e1cd7b7f8..a23c4ba587d35ef7fea7f4ac2aadbe038bf9c2f6 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -5,7 +5,7 @@ import requests
 from http.client import responses
 import os
 import json
-from datetime import datetime
+from datetime import datetime, timedelta
 from lofar.common.datetimeutils import formatDatetime
 from lofar.common.dbcredentials import DBCredentials
 
@@ -102,8 +102,12 @@ class TMSSsession(object):
 
     def set_subtask_status(self, subtask_id: int, status: str) -> {}:
         '''set the status for the given subtask, and return the subtask with its new state, or raise on error'''
+        json_doc = {'state': "%s/subtask_state/%s/" % (self.base_url, status)}
+        if status == 'finishing':
+            json_doc['stop_time'] = datetime.utcnow().isoformat()
+
         response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id),
-                                      json={'state': "%s/subtask_state/%s/" % (self.base_url, status)},
+                                      json=json_doc,
                                       params={'format':'json'})
 
         if response.status_code >= 200 and response.status_code < 300:
@@ -114,7 +118,8 @@ class TMSSsession(object):
 
     def get_subtask_parset(self, subtask_id) -> str:
         '''get the lofar parameterset (as text) for the given subtask'''
-        result = self.session.get(url='%s/subtask/%s/parset' % (self.base_url, subtask_id))
+        result = self.session.get(url=self.get_full_url_for_path('/subtask/%s/parset' % (subtask_id,)),
+                                  headers={'Accept': 'text/plain'})
         if result.status_code >= 200 and result.status_code < 300:
             return result.content.decode('utf-8')
         raise Exception("Could not get parameterset for subtask %s.\nResponse: %s" % (subtask_id, result))
@@ -175,7 +180,9 @@ class TMSSsession(object):
     def get_url_as_json_object(self, full_url: str, params={}) -> object:
         '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)'''
         response = self.session.get(url=full_url, params=params, timeout=100000)
-        logger.info("%s %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url)
+        logger.info("%s %s %s in %.1fms%s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code),
+                                                  response.elapsed.total_seconds()*1000, ' SLOW!' if response.elapsed > timedelta(seconds=1) else '',
+                                                  response.request.url)
 
         if response.status_code >= 200 and response.status_code < 300:
             result = json.loads(response.content.decode('utf-8'))
@@ -247,7 +254,7 @@ class TMSSsession(object):
 
     def specify_observation_task(self, task_id: int) -> requests.Response:
         """specify observation for the given draft task by just doing a REST API call """
-        result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id))
+        result = self.session.get(url=self.get_full_url_for_path('/task/%s/specify_observation' % (task_id,)))
         if result.status_code >= 200 and result.status_code < 300:
             return result.content.decode('utf-8')
         raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result))
@@ -265,7 +272,7 @@ class TMSSsession(object):
     def get_setting(self, setting_name: str) -> {}:
         """get the value of a TMSS setting.
         returns the setting value upon success, or raises."""
-        response = self.session.get(url='%s/setting/%s/' % (self.base_url, setting_name),
+        response = self.session.get(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)),
                                     params={'format': 'json'})
 
         if response.status_code >= 200 and response.status_code < 300:
@@ -277,7 +284,7 @@ class TMSSsession(object):
     def set_setting(self, setting_name: str, setting_value: bool) -> {}:
         """set a value for a TMSS setting.
         returns the setting value upon success, or raises."""
-        response = self.session.patch(url='%s/setting/%s/' % (self.base_url, setting_name),
+        response = self.session.patch(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)),
                                       json={'value': setting_value})
 
         if response.status_code >= 200 and response.status_code < 300:
@@ -297,7 +304,7 @@ class TMSSsession(object):
             json_data['template'] = json.loads(template) if isinstance(template, str) else template
         json_data.update(**kwargs)
 
-        response = self.session.post(url='%s/%s/' % (self.base_url, template_path), json=json_data)
+        response = self.session.post(url=self.get_full_url_for_path(template_path), json=json_data)
         if response.status_code == 201:
             logger.info("created new template: %s", json.loads(response.text)['url'])
         else:
@@ -310,7 +317,7 @@ class TMSSsession(object):
             new_feedback = feedback
         else:
             new_feedback = "%s\n%s" % (existing_feedback, feedback)
-        response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id),
+        response = self.session.patch(url=self.get_full_url_for_path('/subtask/%s/' % (subtask_id,)),
                                       json={'raw_feedback': new_feedback},
                                       params={'format': 'json'})
 
@@ -324,7 +331,7 @@ class TMSSsession(object):
     def process_subtask_feedback_and_set_finished(self, subtask_id: int) -> {}:
         '''process the raw_feedback of a given subtask and set the subtask to finished on succes. Return the subtask
         with its new state, or raise an error'''
-        response = self.session.post(url='%s/subtask/%s/process_feedback_and_set_finished' % (self.base_url, subtask_id),
+        response = self.session.post(url=self.get_full_url_for_path('/subtask/%s/process_feedback_and_set_finished' % (subtask_id,)),
                                      params={'format': 'json'})
 
         if response.status_code >= 200 and response.status_code < 300:
diff --git a/SAS/TMSS/client/lib/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py
index 81448e9a16c97e4cfb5f91213a218dde91f9edaf..75d63297e8d5dfff5403d560c6cbc3843ffcd71e 100644
--- a/SAS/TMSS/client/lib/tmssbuslistener.py
+++ b/SAS/TMSS/client/lib/tmssbuslistener.py
@@ -45,6 +45,7 @@ TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX               = _TMSS_EVENT_PREFIX_TEMPLATE %
 TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Object'
 TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Status'
 TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX     = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitDraft.Object'
+TMSS_SETTING_OBJECT_EVENT_PREFIX                 = _TMSS_EVENT_PREFIX_TEMPLATE % 'Setting.Object'
 TMSS_ALL_OBJECT_EVENTS_FILTER                    = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Object.#'
 TMSS_ALL_STATUS_EVENTS_FILTER                    = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Status.#'
 TMSS_ALL_EVENTS_FILTER                           = _TMSS_EVENT_PREFIX_TEMPLATE % '#'
@@ -55,13 +56,20 @@ class TMSSEventMessageHandler(AbstractMessageHandler):
     Base-type messagehandler for handling all TMSS event messages.
     Typical usage is to derive your own subclass from TMSSEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in.
     '''
+
+    def __init__(self, log_event_messages: bool=False) -> None:
+        self.log_event_messages = log_event_messages
+        super().__init__()
+
+
     def handle_message(self, msg: EventMessage):
         if not isinstance(msg, EventMessage):
             raise ValueError("%s: Ignoring non-EventMessage: %s" % (self.__class__.__name__, msg))
 
         stripped_subject = msg.subject.replace(_TMSS_EVENT_PREFIX_TEMPLATE%('',), '')
 
-        logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content)))
+        if self.log_event_messages:
+            logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content)))
 
         # sorry, very big if/elif/else tree.
         # it just maps all possible event subjects for all possible objects and statuses onto handler methods.
@@ -93,6 +101,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler):
             self.onSchedulingUnitDraftCreated(**msg.content)
         elif stripped_subject == 'SchedulingUnitDraft.Object.Updated':
             self.onSchedulingUnitDraftUpdated(**msg.content)
+        elif stripped_subject == 'SchedulingUnitDraft.Object.Constraints.Updated':
+            self.onSchedulingUnitDraftConstraintsUpdated(**msg.content)
         elif stripped_subject == 'SchedulingUnitDraft.Object.Deleted':
             self.onSchedulingUnitDraftDeleted(**msg.content)
         elif stripped_subject.startswith('SubTask.Status.'):
@@ -101,6 +111,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler):
             self.onTaskBlueprintStatusChanged(**msg.content)
         elif stripped_subject.startswith('SchedulingUnitBlueprint.Status.'):
             self.onSchedulingUnitBlueprintStatusChanged(**msg.content)
+        elif stripped_subject == 'Setting.Object.Updated':
+            self.onSettingUpdated(**msg.content)
         else:
             raise MessageHandlerUnknownSubjectError("TMSSBusListener.handleMessage: unknown subject: %s" %  msg.subject)
 
@@ -192,6 +204,12 @@ class TMSSEventMessageHandler(AbstractMessageHandler):
         '''
         pass
 
+    def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict):
+        '''onSchedulingUnitDraftConstraintsUpdated is called upon receiving a SchedulingUnitDraft.Object.Constraints.Updated message, which is sent when a the constraints on a SchedulingUnitDrafts were updated.
+        :param id: the TMSS id of the SchedulingUnitDraft
+        '''
+        pass
+
     def onSchedulingUnitDraftDeleted(self, id: int):
         '''onSchedulingUnitDraftDeleted is called upon receiving a SchedulingUnitDraft.Object.Deleted message, which is sent when a SchedulingUnitDrafts was created.
         :param id: the TMSS id of the SchedulingUnitDraft
@@ -216,6 +234,13 @@ class TMSSEventMessageHandler(AbstractMessageHandler):
         '''
         pass
 
+    def onSettingUpdated(self, name: str, value):
+        '''onSettingUpdated is called upon receiving a Setting.Object.Updated message, which is sent when a Setting was updated.
+        :param name: the name of the Setting
+        '''
+        pass
+
+
 
 class TMSSBusListener(BusListener):
     def __init__(self,
diff --git a/SAS/TMSS/docker-compose-scu199.yml b/SAS/TMSS/docker-compose-scu199.yml
index 0778331fa0f4cbdbc15cf49c1c3c88273b98b4db..85cfd2d27d6fd292129294551405937b511a07bf 100644
--- a/SAS/TMSS/docker-compose-scu199.yml
+++ b/SAS/TMSS/docker-compose-scu199.yml
@@ -7,7 +7,7 @@ services:
     env_file:
       - ./.env
     network_mode: "host"
-    command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 --data'
+    command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 -sSd'
     ports:
       - "8008:8008"
   testprovider:
diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt
index b1cdad1bc8906d3ba0302fe6c867a6eb8bff9df1..7ca90e1a5220ba1c278a45e986029e408c2506d6 100644
--- a/SAS/TMSS/services/CMakeLists.txt
+++ b/SAS/TMSS/services/CMakeLists.txt
@@ -1,4 +1,4 @@
-lofar_add_package(TMSSSubtaskSchedulingService subtask_scheduling)
+lofar_add_package(TMSSSchedulingService scheduling)
 lofar_add_package(TMSSFeedbackHandlingService feedback_handling)
 lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener)
 
diff --git a/SAS/TMSS/services/scheduling/CMakeLists.txt b/SAS/TMSS/services/scheduling/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..34de269349de481543af911fa1ad28162fb07b2f
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/CMakeLists.txt
@@ -0,0 +1,11 @@
+lofar_package(TMSSSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging)
+
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+
+include(FindPythonModule)
+find_python_module(astroplan REQUIRED)            # pip3 install astroplan
+
+add_subdirectory(lib)
+add_subdirectory(bin)
+add_subdirectory(test)
+
diff --git a/SAS/TMSS/services/scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a84d2f43814392f07e0b938b47c91e386e95fe4f
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt
@@ -0,0 +1,4 @@
+lofar_add_bin_scripts(tmss_scheduling_service)
+
+# supervisord config files
+lofar_add_sysconf_files(tmss_scheduling_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service
new file mode 100755
index 0000000000000000000000000000000000000000..5f4d206b4a453635cb8f5ffcab9234b5b468da30
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service
@@ -0,0 +1,57 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+
+import os
+from optparse import OptionParser
+import logging
+logger = logging.getLogger(__name__)
+
+from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME
+
+def main():
+    # make sure we run in UTC timezone
+    os.environ['TZ'] = 'UTC'
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+    # Check the invocation arguments
+    parser = OptionParser('%prog [options]',
+                          description='run the tmss_subtask_scheduling_service which automatically schedules the defined successor tasks for finished subtasks')
+    parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the messaging broker, default: %default')
+    parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the exchange on the messaging broker, default: %default')
+    parser.add_option('-t', '--tmss_client_credentials_id', dest='tmss_client_credentials_id', type='string',
+                      default=os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient"),
+                      help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default')
+    (options, args) = parser.parse_args()
+
+    os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings"
+    import django
+    django.setup()
+
+    from lofar.common.util import waitForInterrupt
+    from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service
+    from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service
+
+    with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id):
+        with create_dynamic_scheduling_service(options.exchange, options.broker):
+            waitForInterrupt()
+
+if __name__ == '__main__':
+    main()
diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini
similarity index 100%
rename from SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini
rename to SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini
diff --git a/SAS/TMSS/services/scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f4808987873979c7d600174fca802f167d1689a6
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt
@@ -0,0 +1,13 @@
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+    dynamic_scheduling.py
+    subtask_scheduling.py
+    constraints/__init__.py
+    constraints/template_constraints_v1.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/services/scheduling)
+
diff --git a/SAS/TMSS/services/scheduling/lib/constraints/__init__.py b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..49f9857f8f2630dee58271dd8b59596fe168f702
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2020
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it
+# and/or modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+#
+
+"""
+This __init__ module for this constraints python package defines the 'API' to:
+ - filter a list of schedulable scheduling_units by checking their constraints: see method filter_scheduling_units_using_constraints
+ - sort a (possibly filtered) list of schedulable scheduling_units evaluating their constraints and computing a 'fitness' score: see method get_sorted_scheduling_units_scored_by_constraints
+These main methods are used in the dynamic_scheduler to pick the next best scheduling unit, and compute the midterm schedule.
+
+Currently we have only one SchedulingConstraintsTemplate in TMSS, named 'constraints', version 1.
+But, it is envisioned that we we get more templates.
+So, based on the template the actual filter and score methods are selected from a specific module.
+By convention we use one module per template. Currently, we have and use only module template_constraints_v1.py
+
+If/When we add a new SchedulingConstraintsTemplate, then we should add a new module with the specific filter and score methods,
+and add a extra 'if' in the strategy pattern used here. (see below for implementation)
+"""
+
+import logging
+logger = logging.getLogger(__name__)
+from datetime import datetime
+from typing import NamedTuple
+
+from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.exceptions import *
+
+################## main data struct and methods ##################
+
+class ScoredSchedulingUnit(NamedTuple):
+    '''struct for collecting scores per constraint and a weighted_score for a scheduling_unit at the given start_time
+    '''
+    scheduling_unit: models.SchedulingUnitBlueprint
+    scores: dict
+    start_time: datetime
+    weighted_score: float
+
+
+def filter_scheduling_units_using_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime, upper_bound: datetime) -> [models.SchedulingUnitBlueprint]:
+    """
+    Filter the given scheduling_units by whether their constraints are met within the given timewindow.
+    If one or more scheduling units can run only within this time window and not after it, then only these exclusivly runnable scheduling units.
+    :param lower_bound: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time.
+    :param upper_bound: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time.
+    :param scheduling_units: evaluate/filter these scheduling_units.
+    Returns a list scheduling_units for which their constraints are met within the given timewindow.
+    """
+    runnable_scheduling_units = []
+    runnable_exclusive_in_this_window_scheduling_units = []
+
+    for scheduling_unit in scheduling_units:
+        try:
+            if can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound):
+                runnable_scheduling_units.append(scheduling_unit)
+
+                # if a schedulingunit cannot run after this window, then apparently its limited to run exclusively in this time window.
+                earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound)
+                if not can_run_after(scheduling_unit, earliest_possible_start_time+scheduling_unit.duration):
+                    runnable_exclusive_in_this_window_scheduling_units.append(scheduling_unit)
+        except UnknownTemplateException as e:
+            # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template?
+            # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks.
+            # This ensures that the unit is not schedulable anymore, and forces the user to take action.
+            # For example, the user can choose a different template,
+            # or submit a feature request to implement constraint solvers for this new template.
+            logger.warning(e)
+            for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all():
+                subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value)
+                subtask.save()
+
+    # if we have schedulingunit(s) that can run exclusively in this time window (and not afterwards), then return only these.
+    if runnable_exclusive_in_this_window_scheduling_units:
+        return runnable_exclusive_in_this_window_scheduling_units
+
+    # there are no exclusive units, so return all runnable_scheduling_units
+    return runnable_scheduling_units
+
+
+def get_best_scored_scheduling_unit_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time:datetime, upper_bound_stop_time:datetime) -> ScoredSchedulingUnit:
+    """
+    get the best scored schedulable scheduling_unit which can run withing the given time window from the given scheduling_units.
+    :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time.
+    :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time.
+    :param scheduling_units: evaluate these scheduling_units.
+    Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints.
+    """
+    sorted_scored_scheduling_units = sort_scheduling_units_scored_by_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time)
+
+    if sorted_scored_scheduling_units:
+        # they are sorted best to worst, so return/use first.
+        best_scored_scheduling_unit = sorted_scored_scheduling_units[0]
+        return best_scored_scheduling_unit
+
+    return None
+
+
+def sort_scheduling_units_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> [ScoredSchedulingUnit]:
+    """
+    Compute the score and proposed start_time for all given scheduling_units. Return them sorted by their weighted_score.
+    :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time.
+    :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time.
+    :param scheduling_units: evaluate these scheduling_units.
+    Returns a list of ScoredSchedulingUnit structs with the score details, a weighted_score and a proposed start_time where it best fits its contraints.
+    """
+
+    scored_scheduling_units = []
+    for scheduling_unit in scheduling_units:
+        try:
+            scored_scheduling_unit = compute_scores(scheduling_unit, lower_bound_start_time, upper_bound_stop_time)
+
+            # check and ensure that the proposed start_time is within the required [lower_bound_start_time, upper_bound_stop_time] window.
+            schedulable_unit = scored_scheduling_unit.scheduling_unit
+            proposed_start_time = scored_scheduling_unit.start_time
+            proposed_stop_time = proposed_start_time + schedulable_unit.duration
+
+            if proposed_start_time < lower_bound_start_time:
+                raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed start_time '%s' before the given lower bound '%s'" % (
+                    schedulable_unit.id, proposed_start_time, lower_bound_start_time))
+
+            if proposed_stop_time > upper_bound_stop_time:
+                raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed stop_time '%s' after the given upper bound '%s'" % (
+                    schedulable_unit.id, proposed_stop_time, upper_bound_stop_time))
+
+            scored_scheduling_units.append(scored_scheduling_unit)
+        except (UnknownTemplateException, DynamicSchedulingException) as e:
+            # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template?
+            # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks.
+            # This ensures that the unit is not schedulable anymore, and forces the user to take action.
+            # For example, the user can choose a different template,
+            # or submit a feature request to implement constraint solvers for this new template.
+            logger.warning(e)
+            for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all():
+                subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value)
+                subtask.save()
+
+    return sorted(scored_scheduling_units, key=lambda x: x.weighted_score, reverse=True)
+
+
+################## helper methods #################################################################
+#                                                                                                 #
+# these helper methods are selected by a strategy pattern based on the template name and version  #
+# The actual implementation can be found in the other module(s) in this package                   #
+# Currently we only have one template with one implementation in template_constraints_v1.py       #
+#                                                                                                 #
+###################################################################################################
+
+def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool:
+    '''Check if the given scheduling_unit can run somewhere within the given time window depending on the sub's constrains-template/doc.'''
+    constraints_template = scheduling_unit.draft.scheduling_constraints_template
+
+    # choose appropriate method based on template (strategy pattern), or raise
+    if constraints_template.name == 'constraints' and constraints_template.version == 1:
+        # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache.
+        from . import template_constraints_v1
+        return template_constraints_v1.can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound)
+
+    # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern)
+
+    raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run between '%s' and '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % (
+                                    scheduling_unit.id, lower_bound, upper_bound, constraints_template.name, constraints_template.version))
+
+
+def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool:
+    '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.'''
+    constraints_template = scheduling_unit.draft.scheduling_constraints_template
+
+    # choose appropriate method based on template (strategy pattern), or raise
+    if constraints_template.name == 'constraints' and constraints_template.version == 1:
+        # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache.
+        from . import template_constraints_v1
+        return template_constraints_v1.can_run_after(scheduling_unit, lower_bound)
+
+    # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern)
+
+    raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run after '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % (
+                                    scheduling_unit.id, lower_bound, constraints_template.name, constraints_template.version))
+
+
+
+def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit:
+    '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.'''
+    constraints_template = scheduling_unit.draft.scheduling_constraints_template
+
+    # choose appropriate method based on template (strategy pattern), or raise
+    if constraints_template.name == 'constraints' and constraints_template.version == 1:
+        # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache.
+        from . import template_constraints_v1
+        return template_constraints_v1.compute_scores(scheduling_unit, lower_bound, upper_bound)
+
+    # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern)
+
+    raise UnknownTemplateException("Cannot compute scores for scheduling_unit id=%s, because we have no score computation method for scheduling constraints template '%s' version=%s" % (
+                                    scheduling_unit.id, constraints_template.name, constraints_template.version))
+
+
+def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime:
+    '''determine the earliest possible start_time for the given scheduling unit, taking into account all its constraints'''
+    constraints_template = scheduling_unit.draft.scheduling_constraints_template
+
+    # choose appropriate method based on template (strategy pattern), or raise
+    if constraints_template.name == 'constraints' and constraints_template.version == 1:
+        # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache.
+        from . import template_constraints_v1
+        return template_constraints_v1.get_earliest_possible_start_time(scheduling_unit, lower_bound)
+
+    # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern)
+
+    raise UnknownTemplateException("Cannot compute earliest possible start_time for scheduling_unit id=%s, because we have no constraint checker for scheduling constraints template '%s' version=%s" % (
+                                    scheduling_unit.id, constraints_template.name, constraints_template.version))
+
+
+def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime) -> datetime:
+    '''deterimine the earliest possible starttime over all given scheduling units, taking into account all their constraints'''
+    try:
+        return min(get_earliest_possible_start_time(scheduling_unit, lower_bound) for scheduling_unit in scheduling_units)
+    except ValueError:
+        return lower_bound
+
+
+
+
diff --git a/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py
new file mode 100644
index 0000000000000000000000000000000000000000..247f89851ccdda58cdb07b98639c1349c45825fc
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python3
+
+# dynamic_scheduling.py
+#
+# Copyright (C) 2020
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it
+# and/or modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+#
+# $Id:  $
+
+"""
+"""
+
+import logging
+logger = logging.getLogger(__name__)
+from datetime import datetime, timedelta
+from dateutil import parser
+
+from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.tmssapp.conversions import create_astroplan_observer_for_station, Time, timestamps_and_stations_to_sun_rise_and_set
+
+from . import ScoredSchedulingUnit
+
+def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool:
+    '''determine if the given scheduling_unit can run withing the given timewindow evaluating all constraints from the "constraints" version 1 template'''
+    if has_manual_scheduler_constraint(scheduling_unit):
+        return False
+
+    if not can_run_within_timewindow_with_time_constraints(scheduling_unit, lower_bound, upper_bound):
+        return False
+
+    if not can_run_within_timewindow_with_sky_constraints(scheduling_unit, lower_bound, upper_bound):
+        return False
+
+    if not can_run_within_timewindow_with_daily_constraints(scheduling_unit, lower_bound, upper_bound):
+        return False
+
+    return True
+
+
+def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool:
+    '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+    if 'before' in constraints['time']:
+        before = parser.parse(constraints['time']['before'], ignoretz=True)
+        return before > lower_bound
+
+    return True
+
+# only expose the can_run_within_timewindow and can_run_after methods, and keep the details hidden for this module's importers who do not need these implemnetation details
+__all__ = ['can_run_within_timewindow', 'can_run_after']
+
+
+def has_manual_scheduler_constraint(scheduling_unit: models.SchedulingUnitBlueprint) -> bool:
+    '''evaluate the scheduler contraint. Should this unit be manually scheduled?'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+    return constraints.get('scheduler', '') == 'manual'
+
+
+def can_run_within_timewindow_with_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool:
+    '''evaluate the daily contraint'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+    if not (constraints['daily']['require_day'] and constraints['daily']['require_night']):
+        # no day/night restrictions, can run any time
+        return True
+
+    if constraints['daily']['require_day'] or constraints['daily']['require_night']:
+        # TODO: TMSS-254 and TMSS-255
+        # TODO: take avoid_twilight into account
+        # Please note that this first crude proof of concept treats sunset/sunrise as 'events',
+        # whereas in our definition they are transition periods. See: TMSS-435
+
+        # Ugly code. Should be improved. Works for demo.
+        # create a series of timestamps in the window of opportunity, and evaluate of there are all during day or night
+        possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound)
+
+        # ToDo: use specified total observation duration, and ignore pipelines who don't care about day/night
+        possible_stop_time = possible_start_time + scheduling_unit.duration
+        timestamps = [possible_start_time]
+        while timestamps[-1] < possible_stop_time - timedelta(hours=8):
+            timestamps.append(timestamps[-1] + timedelta(hours=8))
+        timestamps.append(possible_stop_time)
+
+        LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002')
+        if constraints['daily']['require_night'] and all(LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps):
+            return True
+
+        if constraints['daily']['require_day'] and all(not LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps):
+            return True
+
+    return False
+
+
+def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool:
+    '''evaluate the time contraint(s)'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+    # TODO: TMSS-244 (and more?), evaluate the constraints in constraints['time']
+    if has_manual_scheduler_constraint(scheduling_unit):
+        at = parser.parse(constraints['time']['at'], ignoretz=True)
+        return at >= lower_bound and at+scheduling_unit.duration <= upper_bound
+
+    if 'before' in constraints['time']:
+        before = parser.parse(constraints['time']['before'], ignoretz=True)
+        return before <= upper_bound-scheduling_unit.duration
+
+    if 'after' in constraints['time']:
+        after = parser.parse(constraints['time']['after'], ignoretz=True)
+        return lower_bound >= after
+
+    # if 'between' in constraints['time']:
+    #     betweens = [ dateutil.parser.parse(constraints['time']['between'])
+    #     return lower_bound >= after
+
+    return True # for now, ignore time contraints.
+
+
+def can_run_within_timewindow_with_sky_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool:
+    '''evaluate the time contraint(s)'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+    # TODO: TMSS-245 TMSS-250 (and more?), evaluate the constraints in constraints['sky']
+    # maybe even split this method into sub methods for the very distinct sky constraints: min_calibrator_elevation, min_target_elevation, transit_offset & min_distance
+    return True # for now, ignore sky contraints.
+
+
+def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime:
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+
+    try:
+        if has_manual_scheduler_constraint(scheduling_unit) and 'at' in constraints['time']:
+            at = parser.parse(constraints['time']['at'], ignoretz=True)
+            return at
+
+        if 'after' in constraints['time']:
+            return parser.parse(constraints['time']['after'], ignoretz=True)
+
+        if constraints['daily']['require_day'] or constraints['daily']['require_night']:
+
+            # TODO: TMSS-254 and TMSS-255
+            # TODO: take avoid_twilight into account
+            # for now, use the incorrect proof of concept which works for the demo
+            # but... this should be rewritten completely using Joerns new sun_events
+            LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002')
+            sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=[lower_bound], stations=['CS002'])['CS002']
+            sun_set = sun_events['sunset'][0]['start']
+            sun_rise = sun_events['sunrise'][0]['end']
+            if constraints['daily']['require_day']:
+                if lower_bound+scheduling_unit.duration > sun_set:
+                    return LOFAR_CENTER_OBSERVER.sun_rise_time(time=Time(sun_set), which='next').to_datetime()
+                if lower_bound >= sun_rise:
+                    return lower_bound
+                return sun_rise
+
+            if constraints['daily']['require_night']:
+                if lower_bound+scheduling_unit.duration < sun_rise:
+                    return lower_bound
+                if lower_bound >= sun_set:
+                    return lower_bound
+                return sun_set
+    except Exception as e:
+        logger.exception(str(e))
+
+    # no constraints dictating starttime? make a guesstimate.
+    return lower_bound
+
+
+def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit:
+    '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.'''
+    constraints = scheduling_unit.draft.scheduling_constraints_doc
+
+    # TODO: add compute_scores methods for each type of constraint
+    # TODO: take start_time into account. For example, an LST constraint yields a better score when the starttime is such that the center of the obs is at LST.
+    # TODO: TMSS-??? (and more?), compute score using the constraints in constraints['daily']
+    # TODO: TMSS-244 (and more?), compute score using the constraints in constraints['time']
+    # TODO: TMSS-245 TMSS-250 (and more?),  compute score using the constraints in constraints['sky']
+
+    # for now (as a proof of concept and sort of example), just return 1's
+    scores = {'daily': 1.0,
+              'time': 1.0,
+              'sky': 1.0 }
+
+    # add "common" scores which do not depend on constraints, such as project rank and creation date
+    # TODO: should be normalized!
+    scores['project_rank'] = scheduling_unit.draft.scheduling_set.project.priority_rank
+    #scores['age'] = (datetime.utcnow() - scheduling_unit.created_at).total_seconds()
+
+    try:
+        # TODO: apply weights. Needs some new weight model in django, probably linked to constraints_template.
+        # for now, just average the scores
+        weighted_score = sum(scores.values())/len(scores)
+    except:
+        weighted_score = 1
+
+    return ScoredSchedulingUnit(scheduling_unit=scheduling_unit,
+                                scores=scores,
+                                weighted_score=weighted_score,
+                                start_time=get_earliest_possible_start_time(scheduling_unit, lower_bound))
+
diff --git a/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py
new file mode 100644
index 0000000000000000000000000000000000000000..a15475960a3e94e18d3dbe0afbf2bd7c93dc3fc5
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python3
+
+# dynamic_scheduling.py
+#
+# Copyright (C) 2020
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it
+# and/or modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+#
+# $Id:  $
+
+"""
+"""
+
+import os
+import logging
+logger = logging.getLogger(__name__)
+from datetime import datetime, timedelta, time
+
+from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.tmssapp.tasks import schedule_independent_subtasks_in_scheduling_unit_blueprint, unschedule_subtasks_in_scheduling_unit_blueprint
+from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit, clear_defined_subtasks_start_stop_times_for_scheduling_unit
+from lofar.sas.tmss.client.tmssbuslistener import *
+from lofar.common.datetimeutils import round_to_second_precision
+from threading import Thread, Event
+
+from lofar.sas.tmss.services.scheduling.constraints import *
+
+# LOFAR needs to have a gap in between observations to (re)initialize hardware.
+DEFAULT_INTER_OBSERVATION_GAP = timedelta(seconds=60)
+
+################## core dynamic scheduling methods ################################################
+#                                                                                                 #
+# This module starts with the core dynamic scheduling methods which are used in the dynamic       #
+# scheduling service. These high level methods only filter/score/sort in a generic way.           #
+# The detailed concrete filter/score/sort methods are pick by a strategy pattern in the           #
+# constraints package based on each scheduling unit's scheduling_constrains template.             #
+#                                                                                                 #
+###################################################################################################
+
+def find_best_next_schedulable_unit(scheduling_units:[models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> ScoredSchedulingUnit:
+    """
+    find the best schedulable scheduling_unit which can run withing the given time window from the given scheduling_units.
+    :param lower_bound_start_time: evaluate the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time.
+    :param upper_bound_stop_time: evaluate the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time.
+    :param scheduling_units: evaluate these scheduling_units.
+    Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints.
+    """
+    # ensure upper is greater than or equal to lower
+    upper_bound_stop_time = max(lower_bound_start_time, upper_bound_stop_time)
+
+    filtered_scheduling_units = filter_scheduling_units_using_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time)
+
+    if filtered_scheduling_units:
+        best_scored_scheduling_unit = get_best_scored_scheduling_unit_scored_by_constraints(filtered_scheduling_units, lower_bound_start_time, upper_bound_stop_time)
+        return best_scored_scheduling_unit
+
+    # no filtered scheduling units found...
+    logger.debug("No schedulable scheduling units found which meet the requirements between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time)
+    return None
+
+
+def schedule_next_scheduling_unit() -> models.SchedulingUnitBlueprint:
+    '''find the best next schedulable scheduling unit and try to schedule it.
+    Overlapping existing scheduled units are unscheduled if their score is lower.
+    :return: the scheduled scheduling unit.'''
+
+    # --- setup of needed variables ---
+    schedulable_units = get_schedulable_scheduling_units()
+
+    # estimate the lower_bound_start_time
+    lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, datetime.utcnow())
+
+    # estimate the upper_bound_stop_time, which may give us a small timewindow before any next scheduled unit, or a default window of a day
+    try:
+        upper_bound_stop_time = max(su.start_time for su in get_scheduled_scheduling_units(lower=lower_bound_start_time, upper=lower_bound_start_time + timedelta(days=1)))
+    except ValueError:
+        upper_bound_stop_time = lower_bound_start_time + timedelta(days=1)
+
+    # no need to irritate user in log files with subsecond scheduling precision
+    lower_bound_start_time = round_to_second_precision(lower_bound_start_time)
+    upper_bound_stop_time = max(round_to_second_precision(upper_bound_stop_time), lower_bound_start_time)
+
+    # --- core routine ---
+    while lower_bound_start_time < upper_bound_stop_time:
+        try:
+            # try to find the best next scheduling_unit
+            logger.info("schedule_next_scheduling_unit: searching for best scheduling unit to schedule between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time)
+            best_scored_scheduling_unit = find_best_next_schedulable_unit(schedulable_units, lower_bound_start_time, upper_bound_stop_time)
+            if best_scored_scheduling_unit:
+                best_scheduling_unit = best_scored_scheduling_unit.scheduling_unit
+                best_scheduling_unit_score = best_scored_scheduling_unit.weighted_score
+                best_start_time = best_scored_scheduling_unit.start_time
+
+                # make start_time "look nice" for us humans
+                best_start_time = round_to_second_precision(best_start_time)
+
+                logger.info("schedule_next_scheduling_unit: found best candidate id=%s '%s' weighted_score=%s start_time=%s",
+                            best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time)
+
+                if unschededule_blocking_scheduled_units_if_needed_and_possible(best_scored_scheduling_unit):
+                    # no (old) scheduled scheduling_units in the way, so schedule our candidate!
+                    scheduled_scheduling_unit = schedule_independent_subtasks_in_scheduling_unit_blueprint(best_scheduling_unit, start_time=best_start_time)
+
+                    logger.info("schedule_next_scheduling_unit: scheduled best candidate id=%s '%s' score=%s start_time=%s",
+                                best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time)
+                    return scheduled_scheduling_unit
+
+        except SubtaskSchedulingException as e:
+            logger.error("Could not schedule scheduling_unit id=%s name='%s'. Error: %s", best_scheduling_unit.id, best_scheduling_unit.name, e)
+
+        # nothing was found, or an error occurred.
+        # seach again... (loop) with the remaining schedulable_units and new lower_bound_start_time
+        schedulable_units = get_schedulable_scheduling_units()
+        lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, lower_bound_start_time + timedelta(hours=1))
+
+
+def assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time: datetime):
+    ''''''
+    logger.info("Estimating mid-term schedule...")
+
+    scheduling_units = get_schedulable_scheduling_units()
+
+    upper_bound_stop_time = lower_bound_start_time + timedelta(days=365)
+
+    # update the start_times of the remaining ones (so they form queue, and can be visualized in a timeline)
+    while scheduling_units and lower_bound_start_time < upper_bound_stop_time:
+        best_scored_scheduling_unit = find_best_next_schedulable_unit(scheduling_units, lower_bound_start_time, upper_bound_stop_time)
+
+        if best_scored_scheduling_unit:
+            scheduling_unit = best_scored_scheduling_unit.scheduling_unit
+            start_time = round_to_second_precision(best_scored_scheduling_unit.start_time)
+            logger.info("mid-term schedule: next scheduling unit id=%s '%s' start_time=%s", scheduling_unit.id, scheduling_unit.name, start_time)
+            update_subtasks_start_times_for_scheduling_unit(scheduling_unit, start_time)
+
+            # keep track of the lower_bound_start_time based on last sub.stoptime and gap
+            lower_bound_start_time = scheduling_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP
+
+            scheduling_units.remove(scheduling_unit)
+        else:
+            # search again in a later timeslot
+            min_earliest_possible_start_time = get_min_earliest_possible_start_time(scheduling_units, lower_bound_start_time+timedelta(minutes=10))
+            if min_earliest_possible_start_time > lower_bound_start_time:
+                lower_bound_start_time = min_earliest_possible_start_time
+            else:
+                # cannot advance anymore to find more
+                logger.warning("Cannot assign start/stop times to remaining scheduling units for mid-term schedule...")
+                for su in scheduling_units:
+                    logger.warning("Remaining scheduling unit: id=%s '%s'", su.id, su.name)
+
+                    # clear start/stop times, so they don't show up in the timeline,
+                    # and we can filter/show them in a seperate list which the user can tinker on the constraints
+                    clear_defined_subtasks_start_stop_times_for_scheduling_unit(su)
+                break
+
+    logger.info("Estimating mid-term schedule... finished")
+
+
+def do_dynamic_schedule() -> models.SchedulingUnitBlueprint:
+    '''do a full update of the schedule: schedule next scheduling unit and assign start stop times to remaining schedulable scheduling units'''
+    logger.info("Updating dynamic schedule....")
+    scheduled_unit = schedule_next_scheduling_unit()
+
+    # determine next possible start time for remaining scheduling_units
+    if scheduled_unit:
+        lower_bound_start_time = scheduled_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP
+    else:
+        try:
+            scheduled_units = get_scheduled_scheduling_units(datetime.utcnow(), datetime.utcnow())
+            lower_bound_start_time = max([s.stop_time for s in scheduled_units if s.stop_time is not None]) + DEFAULT_INTER_OBSERVATION_GAP
+        except:
+            lower_bound_start_time = datetime.utcnow()
+
+    # round up to next nearest second
+    lower_bound_start_time += timedelta(microseconds=1000000-lower_bound_start_time.microsecond)
+
+    # determine mid-term schedule by assigning start/stop times to remaining schedulable units using the same search strategy
+    assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time)
+    logger.info("Finished updating dynamic schedule")
+
+    return scheduled_unit
+
+
+################## service/messagebug handler class ###############################################
+
+class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler):
+    '''
+    The TMSSDynamicSchedulingMessageHandler reacts to TMSS EventMessages by triggering a new full update of the dynamic
+    schedule.
+    The actual schedule-update method runs on a backround thread, and can take some time to complete ranging from a
+    few seconds to several minutes. In the mean time new EventMessages may be received. These are handled by raising a flag
+    that signals the schedule-update-thread that a new full update is needed. This way, a burst of Events results in
+    a single update, and it also ensures that we always compute the schedule with the latest data.
+    '''
+
+    def __init__(self):
+        super().__init__(log_event_messages=True)
+        self._scheduling_thread = None
+        self._scheduling_thread_running = False
+        self._do_schedule_event = Event()
+
+    def start_handling(self):
+        # start the background thread which waits until the _do_schedule_event event is set upon receiving to the correct TMSS EVentMessages.
+        self._scheduling_thread = Thread(target=TMSSDynamicSchedulingMessageHandler._scheduling_loop, kwargs={'self':self})
+        self._scheduling_thread.daemon = True
+        self._scheduling_thread_running = True
+        self._scheduling_thread.start()
+        super().start_handling()
+
+    def stop_handling(self):
+        self._scheduling_thread_running = False
+        self._scheduling_thread.join()
+        self._scheduling_thread = None
+        super().stop_handling()
+
+    def onSchedulingUnitBlueprintStatusChanged(self, id: int, status: str):
+        if status in ["schedulable", "observed", "finished", "cancelled"]:
+            logger.info("onSchedulingUnitBlueprintStatusChanged(id=%s, status=%s): triggering update of dynamic schedule...", id, status)
+            # scheduling takes a long time, longer then creating many scheduling units in bulk
+            # so, we do not create a complete new schedule for each new unit,
+            # but we only trigger a new schedule update.
+            # This way we are sure that the latest units are always taken into account while scheduling, but we do not waste cpu cylces.
+            self._do_schedule_event.set()
+
+    def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict):
+        affected_scheduling_units = models.SchedulingUnitBlueprint.objects.filter(draft__id=id).all()
+        for scheduling_unit in affected_scheduling_units:
+            if scheduling_unit.status == 'scheduled':
+                unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit)
+
+        self._do_schedule_event.set()
+
+    def onSettingUpdated(self, name: str, value: bool):
+        if name == models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value and value:
+            logger.info("%s was set to %s: triggering update of dynamic schedule...", name, value)
+            self._do_schedule_event.set()
+
+    def _scheduling_loop(self):
+        while self._scheduling_thread_running:
+            if self._do_schedule_event.wait(timeout=10):
+                self._do_schedule_event.clear()
+                try:
+                    if models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value:
+                        do_dynamic_schedule()
+                    else:
+                        logger.warning("Skipping update of dynamic schedule because the setting %s=%s", models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value, models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value)
+                except Exception as e:
+                    logger.exception(str(e))
+                    # just continue processing events. better luck next time...
+
+
+def create_dynamic_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER):
+    return TMSSBusListener(handler_type=TMSSDynamicSchedulingMessageHandler,
+                           handler_kwargs=None,
+                           exchange=exchange,
+                           broker=broker)
+
+
+
+
+
+################## helper methods #################################################################
+
+def get_schedulable_scheduling_units() -> [models.SchedulingUnitBlueprint]:
+    '''get a list of all schedulable scheduling_units'''
+    defined_independend_subtasks = models.Subtask.independent_subtasks().filter(state__value='defined')
+    defined_independend_subtask_ids = defined_independend_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct().all()
+    scheduling_units = models.SchedulingUnitBlueprint.objects.filter(id__in=defined_independend_subtask_ids).select_related('draft', 'draft__scheduling_constraints_template').all()
+    return [su for su in scheduling_units if su.status == 'schedulable']
+
+
+def get_scheduled_scheduling_units(lower:datetime=None, upper:datetime=None) -> [models.SchedulingUnitBlueprint]:
+    '''get a list of all scheduled scheduling_units'''
+    scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled')
+    if lower is not None:
+        scheduled_subtasks = scheduled_subtasks.filter(stop_time__gte=lower)
+    if upper is not None:
+        scheduled_subtasks = scheduled_subtasks.filter(start_time__lte=upper)
+    return list(models.SchedulingUnitBlueprint.objects.filter(id__in=scheduled_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct()).all())
+
+
+def unschededule_blocking_scheduled_units_if_needed_and_possible(candidate: ScoredSchedulingUnit) -> bool:
+    '''check if there are any already scheduled units in the way, and unschedule them if allowed. Return True if nothing is blocking anymore.'''
+    # check any previously scheduled units, and unschedule if needed/allowed
+    scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time,
+                                                                upper=candidate.start_time + candidate.scheduling_unit.duration)
+
+    # check if we can and need to unschedule the blocking units
+    for scheduled_scheduling_unit in scheduled_scheduling_units:
+        scheduled_score = compute_scores(scheduled_scheduling_unit, candidate.start_time, candidate.start_time + candidate.scheduling_unit.duration)
+
+        if candidate.weighted_score > scheduled_score.weighted_score:
+            # ToDo: also check if the scheduled_scheduling_unit is manually/dynamically scheduled
+            logger.info("unscheduling id=%s '%s' because it is in the way and has a lower score than the best candidate id=%s '%s' score=%s start_time=%s",
+                scheduled_scheduling_unit.id, scheduled_scheduling_unit.name,
+                candidate.scheduling_unit.id, candidate.scheduling_unit.name, candidate.weighted_score, candidate.scheduling_unit.start_time)
+
+            unschedule_subtasks_in_scheduling_unit_blueprint(scheduled_scheduling_unit)
+
+    # check again... are still there any scheduled_scheduling_units in the way?
+    scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time,
+                                                                upper=candidate.start_time + candidate.scheduling_unit.duration)
+    if scheduled_scheduling_units:
+        # accept current solution with current scheduled_scheduling_units
+        logger.info("keeping current scheduled unit(s) which have a better (or equal) score: %s", "; ".join(
+            "id=%s '%s' start_time='%s'" % (su.id, su.name, su.start_time) for su in scheduled_scheduling_units))
+
+        # indicate there are still blocking units
+        return False
+
+    # all clear, nothing is blocking anymore
+    return True
+
+
+
diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py
similarity index 95%
rename from SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py
rename to SAS/TMSS/services/scheduling/lib/subtask_scheduling.py
index 524a616a86fa35fca2351278a1d69b1df46d882f..af80ff8c94b1576407ede4b51df456d52cb0a495 100644
--- a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py
+++ b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py
@@ -77,7 +77,7 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSEventMessageHandler):
                 except Exception as e:
                     logger.error(e)
 
-def create_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None):
+def create_subtask_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None):
     return TMSSBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler,
                                   handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id},
                                   exchange=exchange,
@@ -99,7 +99,7 @@ def main():
                       help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default')
     (options, args) = parser.parse_args()
 
-    with create_service(options.exchange, options.broker, options.tmss_client_credentials_id):
+    with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id):
         waitForInterrupt()
 
 if __name__ == '__main__':
diff --git a/SAS/TMSS/services/scheduling/test/CMakeLists.txt b/SAS/TMSS/services/scheduling/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e3547f4fe0a484e2a395b50411e3e7d8b7486879
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/test/CMakeLists.txt
@@ -0,0 +1,11 @@
+# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
+
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    lofar_add_test(t_subtask_scheduling_service)
+    lofar_add_test(t_dynamic_scheduling)
+
+    set_tests_properties(t_subtask_scheduling_service PROPERTIES TIMEOUT 300)
+    set_tests_properties(t_dynamic_scheduling PROPERTIES TIMEOUT 300)
+endif()
diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py
new file mode 100755
index 0000000000000000000000000000000000000000..81acf398781285a91fefad08e53db84778fc256e
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import uuid
+
+import logging
+logger = logging.getLogger(__name__)
+
+from lofar.common.test_utils import skip_integration_tests
+if skip_integration_tests():
+    exit(3)
+
+TEST_UUID = uuid.uuid1()
+
+from datetime import datetime, timedelta
+from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema
+from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor
+
+tmp_exchange = TemporaryExchange("t_dynamic_scheduling_%s" % (TEST_UUID,))
+tmp_exchange.open()
+
+# override DEFAULT_BUSNAME
+import lofar
+lofar.messaging.config.DEFAULT_BUSNAME = tmp_exchange.address
+
+from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
+tmss_test_env = TMSSTestEnvironment(exchange=tmp_exchange.address,
+                                    populate_schemas=True, populate_test_data=False,
+                                    start_postgres_listener=True, start_subtask_scheduler=False,
+                                    start_ra_test_environment=True, enable_viewflow=False,
+                                    start_dynamic_scheduler=False)  # do not start the dynamic scheduler in the testenv, because it is the object-under-test.
+tmss_test_env.start()
+
+def tearDownModule():
+    tmss_test_env.stop()
+    tmp_exchange.close()
+
+from lofar.sas.tmss.test.tmss_test_data_django_models import *
+from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft
+from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtask
+from lofar.common.postgres import PostgresDatabaseConnection
+
+# the module under test
+from lofar.sas.tmss.services.scheduling.dynamic_scheduling import *
+
+
+class TestDynamicScheduling(unittest.TestCase):
+    '''
+    Tests for the Dynamic Scheduling
+    '''
+    @classmethod
+    def setUpClass(cls) -> None:
+        # make some re-usable projects with high/low priority
+        cls.project_low = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=1))
+        cls.project_medium = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=2))
+        cls.project_high = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=3))
+        cls.scheduling_set_low = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_low))
+        cls.scheduling_set_medium = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_medium))
+        cls.scheduling_set_high = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_high))
+
+    def setUp(self) -> None:
+        # wipe all radb entries (via cascading deletes) in between tests, so the tests don't influence each other
+        with PostgresDatabaseConnection(tmss_test_env.ra_test_environment.radb_test_instance.dbcreds) as radb:
+            radb.executeQuery('DELETE FROM resource_allocation.specification;')
+            radb.executeQuery('TRUNCATE resource_allocation.resource_usage;')
+            radb.commit()
+
+        # wipe all scheduling_unit_drafts in between tests, so the tests don't influence each other
+        for scheduling_set in [self.scheduling_set_low, self.scheduling_set_medium, self.scheduling_set_high]:
+            for scheduling_unit_draft in scheduling_set.scheduling_unit_drafts.all():
+                for scheduling_unit_blueprint in scheduling_unit_draft.scheduling_unit_blueprints.all():
+                    for task_blueprint in scheduling_unit_blueprint.task_blueprints.all():
+                        for subtask in task_blueprint.subtasks.all():
+                            try:
+                                if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value:
+                                    unschedule_subtask(subtask)
+                            except Exception as e:
+                                logger.exception(e)
+                            for output in subtask.outputs.all():
+                                for dataproduct in output.dataproducts.all():
+                                    dataproduct.delete()
+                                for consumer in output.consumers.all():
+                                    consumer.delete()
+                                output.delete()
+                            for input in subtask.inputs.all():
+                                input.delete()
+                            subtask.delete()
+                        task_blueprint.draft.delete()
+                        task_blueprint.delete()
+                    scheduling_unit_blueprint.delete()
+                scheduling_unit_draft.delete()
+
+    @staticmethod
+    def create_simple_observation_scheduling_unit(name:str=None, scheduling_set=None,
+                                                  obs_duration:int=60,
+                                                  constraints=None):
+        constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints")
+        constraints = add_defaults_to_json_object_for_schema(constraints or {}, constraints_template.schema)
+
+        strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation")
+        scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template,
+                                                                      strategy_template.scheduling_unit_template.schema)
+        scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = obs_duration
+
+        # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it!
+        return models.SchedulingUnitDraft.objects.create(name=name,
+                                                         scheduling_set=scheduling_set,
+                                                         requirements_template=strategy_template.scheduling_unit_template,
+                                                         requirements_doc=scheduling_unit_spec,
+                                                         observation_strategy_template=strategy_template,
+                                                         scheduling_constraints_doc=constraints,
+                                                         scheduling_constraints_template=constraints_template)
+
+
+    def test_three_simple_observations_no_constraints_different_project_priority(self):
+        scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low)
+        scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low)
+
+        scheduling_unit_draft_medium = self.create_simple_observation_scheduling_unit("scheduling unit medium", scheduling_set=self.scheduling_set_medium)
+        scheduling_unit_blueprint_medium = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_medium)
+
+        scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high)
+        scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high)
+
+        # call the method-under-test.
+        scheduled_scheduling_unit = do_dynamic_schedule()
+
+        # we expect the scheduling_unit with the highest project rank to be scheduled first
+        self.assertIsNotNone(scheduled_scheduling_unit)
+        self.assertEqual(scheduling_unit_blueprint_high.id, scheduled_scheduling_unit.id)
+
+        # check the results
+        # we expect the sub_high to be scheduled
+        scheduling_unit_blueprint_low.refresh_from_db()
+        scheduling_unit_blueprint_medium.refresh_from_db()
+        scheduling_unit_blueprint_high.refresh_from_db()
+        self.assertEqual(scheduling_unit_blueprint_low.status, 'schedulable')
+        self.assertEqual(scheduling_unit_blueprint_medium.status, 'schedulable')
+        self.assertEqual(scheduling_unit_blueprint_high.status, 'scheduled')
+
+        # check the scheduled subtask
+        upcoming_scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled',
+                                                                    task_blueprint__scheduling_unit_blueprint__in=(scheduling_unit_blueprint_low,
+                                                                                                                   scheduling_unit_blueprint_medium,
+                                                                                                                   scheduling_unit_blueprint_high)).all()
+        self.assertEqual(1, upcoming_scheduled_subtasks.count())
+        self.assertEqual(scheduling_unit_blueprint_high.id, upcoming_scheduled_subtasks[0].task_blueprint.scheduling_unit_blueprint.id)
+
+        # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high
+        self.assertGreater(scheduling_unit_blueprint_low.start_time, scheduling_unit_blueprint_medium.start_time)
+        self.assertGreater(scheduling_unit_blueprint_medium.start_time, scheduling_unit_blueprint_high.start_time)
+
+        # ensure DEFAULT_INTER_OBSERVATION_GAP between them
+        self.assertGreaterEqual(scheduling_unit_blueprint_medium.start_time - scheduling_unit_blueprint_high.stop_time, DEFAULT_INTER_OBSERVATION_GAP)
+        self.assertGreaterEqual(scheduling_unit_blueprint_low.start_time - scheduling_unit_blueprint_medium.stop_time, DEFAULT_INTER_OBSERVATION_GAP)
+
+
+    def test_time_bound_unit_wins_even_at_lower_priority(self):
+        # create two schedunits, one with high one with low prio.
+        # first create them without any further constraints, and check if high prio wins.
+        scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low)
+        scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low)
+
+        scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high)
+        scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high)
+
+        now = datetime.utcnow()
+        tomorrow = now+timedelta(days=1)
+
+        # call the method-under-test.
+        best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow)
+
+        # we expect the scheduling_unit with the highest project rank to be scheduled first
+        self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id)
+
+        #now update the low prio unit with a time constraint, "forcing" it to be run in a very thight upcoming time window.
+        scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration).isoformat()+'Z' }
+        scheduling_unit_draft_low.save()
+        scheduling_unit_blueprint_low.refresh_from_db()
+
+        # call the method-under-test.
+        best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow)
+
+        # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow
+        self.assertEqual(scheduling_unit_draft_low.id, best_scored_scheduling_unit.scheduling_unit.id)
+
+
+        #  update the low prio unit. enlarge the time window constraint a bit, so both low and high prio units can fit
+        # this should result that the high prio goes first, and the low prio (which now fits as well) goes second
+        scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration+scheduling_unit_draft_high.duration).isoformat()+'Z' }
+        scheduling_unit_draft_low.save()
+        scheduling_unit_blueprint_low.refresh_from_db()
+
+        # call the method-under-test.
+        best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow)
+
+        # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow
+        self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id)
+
+        # call the method-under-test again but search after first unit (should return low prio unit)
+        stop_time_of_first =  best_scored_scheduling_unit.start_time + best_scored_scheduling_unit.scheduling_unit.duration
+        best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], stop_time_of_first, tomorrow)
+        self.assertEqual(scheduling_unit_blueprint_low.id, best_scored_scheduling_unit.scheduling_unit.id)
+
+
+    def test_manual_constraint_is_preventing_scheduling_unit_from_being_scheduled_dynamically(self):
+        scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low,
+                                                                                      constraints={'scheduler': 'manual'})
+        scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual)
+        self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable")
+
+        # call the method-under-test.
+        scheduled_scheduling_unit = do_dynamic_schedule()
+
+        # we expect no scheduling_unit to be scheduled, because the only one is set to 'manual' constraint
+        self.assertIsNone(scheduled_scheduling_unit)
+
+        # check the results
+        scheduling_unit_blueprint_manual.refresh_from_db()
+        self.assertEqual(scheduling_unit_blueprint_manual.status, 'schedulable')
+
+
+    def test_manually_scheduled_blocking_dynamically_scheduled(self):
+        scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low,
+                                                                                      constraints={'scheduler': 'manual'})
+        scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual)
+        self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable")
+
+        schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint_manual, datetime.utcnow())
+        self.assertEqual(scheduling_unit_blueprint_manual.status, "scheduled")
+
+        scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit online high", scheduling_set=self.scheduling_set_high)
+        scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high)
+
+        # call the method-under-test.
+        scheduled_scheduling_unit = do_dynamic_schedule()
+
+        # we expect the no scheduling_unit to be scheduled, because the manual is in the way
+        self.assertIsNone(scheduled_scheduling_unit)
+
+        # check the results
+        # we expect the sub_high to be scheduled
+        scheduling_unit_blueprint_high.refresh_from_db()
+        self.assertEqual(scheduling_unit_blueprint_high.status, 'schedulable')
+
+        # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high
+        self.assertGreater(scheduling_unit_blueprint_high.start_time, scheduling_unit_blueprint_manual.start_time)
+
+        # ensure DEFAULT_INTER_OBSERVATION_GAP between them
+        self.assertGreaterEqual(scheduling_unit_blueprint_high.start_time - scheduling_unit_blueprint_manual.stop_time, DEFAULT_INTER_OBSERVATION_GAP)
+
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run
new file mode 100755
index 0000000000000000000000000000000000000000..d0831a318c2949b8a6990c0cef62fa6ea3bac68b
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+python3 t_dynamic_scheduling.py
+
diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ee5a97caed28fae29660df70d067fd9170658d70
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_dynamic_scheduling
\ No newline at end of file
diff --git a/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py
new file mode 100755
index 0000000000000000000000000000000000000000..57d3ca6f86bbc6ab3b9e5d5a7de7c051e75e2650
--- /dev/null
+++ b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import uuid
+
+import logging
+logger = logging.getLogger('lofar.'+__name__)
+
+from lofar.common.test_utils import skip_integration_tests
+if skip_integration_tests():
+    exit(3)
+
+from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor
+from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service
+from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema
+
+from time import sleep
+from datetime import datetime, timedelta
+
+class TestSubtaskSchedulingService(unittest.TestCase):
+    '''
+    Tests for the SubtaskSchedulingService
+    '''
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.TEST_UUID = uuid.uuid1()
+
+        cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID))
+        cls.tmp_exchange.open()
+
+        # override DEFAULT_BUSNAME
+        import lofar
+        lofar.messaging.config.DEFAULT_BUSNAME = cls.tmp_exchange.address
+
+        # import here, and not at top of module, because DEFAULT_BUSNAME needs to be set before importing
+        from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment
+        from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
+        from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
+
+        cls.ra_test_env = RATestEnvironment(exchange=cls.tmp_exchange.address)
+        cls.ra_test_env.start()
+
+        cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, populate_schemas=True, populate_test_data=False,
+                                                start_subtask_scheduler=True, start_postgres_listener=True, start_ra_test_environment=True,
+                                                start_dynamic_scheduler=False, enable_viewflow=False)
+        cls.tmss_test_env.start()
+
+        cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url,
+                                                        (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password))
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmss_test_env.stop()
+        cls.ra_test_env.stop()
+        cls.tmp_exchange.close()
+
+    @staticmethod
+    def wait_for_subtask_to_get_status(tmss_client, subtask_id, expected_status, timeout=30):
+        '''helper method to poll for a subtask's status.
+        raises TimeoutError if expected_status is not met withing timout seconds.
+        returns subtask when expected_status is met.'''
+        start = datetime.utcnow()
+        subtask = tmss_client.get_subtask(subtask_id)
+        while subtask['state_value'] != expected_status:
+            sleep(0.5)
+            logger.info("Waiting for subtask id=%s to get status '%s'. Current status='%s'. Polling...", subtask_id, expected_status, subtask['state_value'])
+            subtask = tmss_client.get_subtask(subtask_id)
+            if datetime.utcnow() - start > timedelta(seconds=timeout):
+                raise TimeoutError("timeout while waiting for subtask id=%s to get status '%s'. It currently has status '%s'" % (
+                    subtask_id, expected_status, subtask['state_value']))
+        return subtask
+
+    def test_01_for_expected_behaviour_of_two_connected_subtasks(self):
+        '''
+        This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled.
+        '''
+        return
+
+        logger.info(' -- test_01_for_expected_behaviour -- ')
+
+        # create and start the service (the object under test)
+        service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id)
+        with BusListenerJanitor(service):
+            # -------------------------
+            # long setup of objects....
+
+            # setup proper template
+            subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/')
+
+            # create two subtasks
+            subtask1 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/')
+            subtask2 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url, task_blueprint_url=subtask1['task_blueprint']), '/subtask/')
+
+            # connect them
+            output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1['url']), '/subtask_output/')
+            input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2['url'], subtask_output_url=output_url), '/subtask_input/')
+
+            # ... end of long setup of objects
+            # --------------------------------
+
+            # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled
+            with self.tmss_test_env.create_tmss_client() as tmss_client:
+                subtask1 = tmss_client.get_subtask(subtask1['id'])
+                subtask2 = tmss_client.get_subtask(subtask2['id'])
+
+                self.assertEqual(subtask1['state_value'], 'defined')
+                self.assertEqual(subtask2['state_value'], 'defined')
+
+                # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask.
+                tmss_client.set_subtask_status(subtask1['id'], 'finished')
+
+                subtask2 = self.wait_for_subtask_to_get_status(tmss_client, subtask2['id'], 'scheduled')
+
+                # subtask2 should now be scheduled
+                self.assertEqual(subtask2['state_value'], 'scheduled')
+
+    def test_02_for_expected_behaviour_of_UC1_scheduling_unit(self):
+        '''
+        This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled.
+        '''
+
+        logger.info(' -- test_02_for_expected_behaviour_of_UC1_scheduling_unit -- ')
+
+        # import here, and not at top of module, because the tmsstestenv needs to be running before importing
+        from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft
+        from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit
+        from lofar.sas.tmss.tmss.tmssapp import models
+        from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data
+
+        # create and start the service (the object under test)
+        service = create_subtask_scheduling_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id)
+        with BusListenerJanitor(service):
+            # -------------------------
+            # setup of objects: create the UC1 scheduling unit, and then select the first runnable subtasks
+            strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
+            spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema)
+            scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 CTC+pipelines",
+                                                                              scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()),
+                                                                              requirements_template=strategy_template.scheduling_unit_template,
+                                                                              requirements_doc=spec,
+                                                                              observation_strategy_template=strategy_template,
+                                                                              scheduling_constraints_doc=get_default_json_object_for_schema(models.SchedulingConstraintsTemplate.objects.get(name="constraints").schema),
+                                                                              scheduling_constraints_template=models.SchedulingConstraintsTemplate.objects.get(name="constraints"))
+
+            scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
+
+            # assign some non-overlapping starttimes, so the tasks can be scheduled
+            update_subtasks_start_times_for_scheduling_unit(scheduling_unit_blueprint, datetime.utcnow())
+
+            # scheduling_unit_blueprint now has task_blueprints and subtasks
+            # "unpack" the whole graph, so we can "walk" it and see if the correct subtasks are scheduled once its predecessors are finished
+            obs_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 1")
+            obs_cal1_st_obs = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value)))
+            obs_cal1_st_qa1 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value)))
+            obs_cal1_st_qa2 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value)))
+
+            pl_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 1")
+            pl_cal1_st = pl_cal1.subtasks.first()
+
+            obs_tgt = scheduling_unit_blueprint.task_blueprints.get(name="Target Observation")
+            obs_tgt_st_obs = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value)))
+            obs_tgt_st_qa1 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value)))
+            obs_tgt_st_qa2 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value)))
+
+            pl_tgt1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target1")
+            pl_tgt1_st = pl_tgt1.subtasks.first()
+
+            pl_tgt2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target2")
+            pl_tgt2_st = pl_tgt2.subtasks.first()
+
+            obs_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 2")
+            obs_cal2_st_obs = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value)))
+            obs_cal2_st_qa1 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value)))
+            obs_cal2_st_qa2 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value)))
+
+            pl_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 2")
+            pl_cal2_st = pl_cal2.subtasks.first()
+
+            # define the graph in an iterable way: as tuples of a subtask-successors-pair
+            # the graph is also ordered in a 'data-flow' direction
+            graph = (# calibrator1 obs, qa and pipeline
+                     (obs_cal1_st_obs, (obs_cal1_st_qa1, pl_cal1_st)),
+                     (obs_cal1_st_qa1, (obs_cal1_st_qa2,)),
+                     (obs_cal1_st_qa2, tuple()),
+                     (pl_cal1_st, tuple()),
+                     #target obs, qa and pipelines
+                     (obs_tgt_st_obs, (obs_tgt_st_qa1, pl_tgt1_st, pl_tgt2_st)),
+                     (obs_tgt_st_qa1, (obs_tgt_st_qa2,)),
+                     (obs_tgt_st_qa2, tuple()),
+                     (pl_tgt1_st, tuple()),
+                     (pl_tgt2_st, tuple()),
+                     # calibrator2 obs, qa and pipeline
+                     (obs_cal2_st_obs, (obs_cal2_st_qa1, pl_cal2_st)),
+                     (obs_cal2_st_qa1, (obs_cal2_st_qa2,)),
+                     (obs_cal2_st_qa2, tuple()),
+                     (pl_cal2_st, tuple()) )
+
+            logger.info(" --- test_02_for_expected_behaviour_of_UC1_scheduling_unit setup finished. starting actual test ---")
+            # ... end of long setup of objects
+            # --------------------------------
+
+            # now for the real test: use only the http rest api to check statuses and call schedule methods
+            with self.tmss_test_env.create_tmss_client() as tmss_client:
+                # walk the graph in a "data-flow" direction
+                for subtask, successors in graph:
+                    # get up-to-date subtask via the rest client
+                    subtask1 = tmss_client.get_subtask(subtask.id)
+                    logger.info("subtask id=%s status=%s successors: %s", subtask1['id'], subtask1['state_value'], ','.join(str(s.id) for s in successors))
+
+                    if subtask1['state_value'] == 'defined':
+                        for successor in successors:
+                            # get up-to-date subtask via the rest client
+                            subtask2 = tmss_client.get_subtask(successor.id)
+                            self.assertEqual(subtask2['state_value'], 'defined')
+
+                        # simulate that some scheduler schedules the first subtask (which does not depend on predecessors)...
+                        if len(tmss_client.get_subtask_predecessors(subtask1['id'])) == 0:
+                            subtask1 = tmss_client.schedule_subtask(subtask1['id'])
+                            self.assertEqual(subtask1['state_value'], 'scheduled')
+
+                    if subtask1['state_value'] == 'scheduled':
+                        # simulate that the first subtask ran, and is now finished...
+                        # cycle over the 'run time' statuses, concluding with status to finished.
+                        # The finished status should trigger the scheduling service to schedule the successor subtask(s).
+                        for status in ['queueing', 'queued', 'starting', 'started', 'finishing', 'finished']:
+                            tmss_client.set_subtask_status(subtask1['id'], status)
+
+                        for successor in successors:
+                            # get up-to-date subtask via the rest client
+                            subtask2 = self.wait_for_subtask_to_get_status(tmss_client, successor.id, 'scheduled')
+                            self.assertEqual(subtask2['state_value'], 'scheduled')
+
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run
similarity index 100%
rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run
rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run
diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh
similarity index 100%
rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh
rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh
diff --git a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt
deleted file mode 100644
index 460e356bc2c99121eb41a48fc27fad7d20a51fac..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-lofar_package(TMSSSubtaskSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging)
-
-lofar_find_package(PythonInterp 3.4 REQUIRED)
-
-add_subdirectory(lib)
-add_subdirectory(bin)
-add_subdirectory(test)
-
diff --git a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt
deleted file mode 100644
index 07e30a532f710dd1242ba026ad12e9ce014f1125..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-lofar_add_bin_scripts(tmss_subtask_scheduling_service)
-
-# supervisord config files
-lofar_add_sysconf_files(tmss_subtask_scheduling_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service
deleted file mode 100755
index 2ecd686a25fd88e45094bf4cda143e41de1fb61d..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
-# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
-#
-# This file is part of the LOFAR software suite.
-# The LOFAR software suite is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# The LOFAR software suite is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
-
-
-from lofar.sas.tmss.services.subtask_scheduling import main
-
-if __name__ == "__main__":
-    main()
diff --git a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt
deleted file mode 100644
index 7cf0b591612ccb75bc2a73c1a6f9d1d8a2c2d9da..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-lofar_find_package(PythonInterp 3.4 REQUIRED)
-include(PythonInstall)
-
-set(_py_files
-    subtask_scheduling.py
-    )
-
-python_install(${_py_files}
-    DESTINATION lofar/sas/tmss/services)
-
diff --git a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt
deleted file mode 100644
index b9da06a5dc6b27fde81e26c6cc5ba027cae2d821..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
-
-if(BUILD_TESTING)
-    include(LofarCTest)
-
-    lofar_add_test(t_subtask_scheduling_service)
-endif()
diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py
deleted file mode 100755
index 84d85d879019b0a5d09832d7cf5815f53ef12a2b..0000000000000000000000000000000000000000
--- a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
-# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
-#
-# This file is part of the LOFAR software suite.
-# The LOFAR software suite is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# The LOFAR software suite is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
-
-import unittest
-import uuid
-
-import logging
-logger = logging.getLogger(__name__)
-logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
-
-from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
-from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
-
-from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor
-from lofar.sas.tmss.services.subtask_scheduling import create_service
-from lofar.common.test_utils import integration_test
-from time import sleep
-from datetime import datetime, timedelta
-
-@integration_test
-class TestSubtaskSchedulingService(unittest.TestCase):
-    '''
-    Tests for the SubtaskSchedulingService
-    '''
-    @classmethod
-    def setUpClass(cls) -> None:
-        cls.TEST_UUID = uuid.uuid1()
-
-        cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID))
-        cls.tmp_exchange.open()
-
-        cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address)
-        cls.tmss_test_env.start()
-
-        cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url,
-                                                        (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password))
-
-    @classmethod
-    def tearDownClass(cls) -> None:
-        cls.tmss_test_env.stop()
-        cls.tmp_exchange.close()
-
-    def test_01_for_expected_behaviour(self):
-        '''
-        This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled.
-        '''
-
-        logger.info(' -- test_01_for_expected_behaviour -- ')
-
-        # create and start the service (the object under test)
-        service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id)
-        with BusListenerJanitor(service):
-            # -------------------------
-            # long setup of objects....
-
-            # setup proper template
-            subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/')
-
-            # create two subtasks
-            subtask1_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/')
-            subtask2_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/')
-
-            # ugly
-            subtask1_id = subtask1_url.split('/')[subtask1_url.split('/').index('subtask') + 1]
-            subtask2_id = subtask2_url.split('/')[subtask2_url.split('/').index('subtask') + 1]
-
-            # connect them
-            output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1_url), '/subtask_output/')
-            input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2_url, subtask_output_url=output_url), '/subtask_input/')
-
-            # ... end of long setup of objects
-            # --------------------------------
-
-            # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled
-            with self.tmss_test_env.create_tmss_client() as tmss_client:
-                subtask1 = tmss_client.get_subtask(subtask1_id)
-                subtask2 = tmss_client.get_subtask(subtask2_id)
-
-                self.assertEqual(subtask1['state_value'], 'defined')
-                self.assertEqual(subtask2['state_value'], 'defined')
-
-                # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask.
-                tmss_client.set_subtask_status(subtask1_id, 'finished')
-
-                # allow some time for the scheduling service to do its thing...
-                start = datetime.utcnow()
-                while subtask2['state_value'] != 'scheduled':
-                    subtask2 = tmss_client.get_subtask(subtask2_id)
-                    sleep(0.5)
-                    if datetime.utcnow() - start >  timedelta(seconds=2):
-                        raise TimeoutError()
-
-                # subtask2 should now be scheduled
-                self.assertEqual(subtask2['state_value'], 'scheduled')
-
-if __name__ == '__main__':
-    #run the unit tests
-    unittest.main()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
index 3cf20c24ec7ed26321f2c8acc85e09a14961b6eb..51532b9390cc3e2b54a2f637f4bc26faf992b4e7 100644
--- a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
+++ b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
@@ -54,7 +54,7 @@ class TMSSPGListener(PostgresListener):
         self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'delete'))
         self.subscribe('tmssapp_subtask_delete', self.onSubTaskDeleted)
 
-        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', 'state_id'))
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', column_name='state_id', quote_column_value=True))
         self.subscribe('tmssapp_subtask_update_column_state_id', self.onSubTaskStateUpdated)
 
 
@@ -107,6 +107,13 @@ class TMSSPGListener(PostgresListener):
         self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'delete'))
         self.subscribe('tmssapp_schedulingunitdraft_delete', self.onSchedulingUnitDraftDeleted)
 
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'update', column_name='scheduling_constraints_doc', quote_column_value=False))
+        self.subscribe('tmssapp_schedulingunitdraft_update_column_scheduling_constraints_doc'[:63], self.onSchedulingUnitDraftConstraintsUpdated)
+
+        # Settings
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_setting', 'update', id_column_name='name_id', quote_id_value=True, column_name='value', quote_column_value=True))
+        self.subscribe('tmssapp_setting_update_column_value', self.onSettingUpdated)
+
         return super().start()
 
     def __exit__(self, exc_type, exc_val, exc_tb):
@@ -191,6 +198,17 @@ class TMSSPGListener(PostgresListener):
     def onSchedulingUnitDraftDeleted(self, payload = None):
         self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Deleted', payload)
 
+    def onSchedulingUnitDraftConstraintsUpdated(self, payload = None):
+        # convert payload string to nested json doc
+        self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Constraints.Updated', payload)
+
+    def onSettingUpdated(self, payload = None):
+        payload = json.loads(payload)
+        payload['name'] = payload['name_id']
+        del payload['name_id']
+        payload['value'] = payload['value'] in ('true', 'True', 't')
+        self._sendNotification(TMSS_SETTING_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
 
 def create_service(dbcreds, exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER):
     '''create a TMSSPGListener instance'''
@@ -216,7 +234,7 @@ def main():
     parser.add_option_group(group)
 
     parser.add_option_group(dbcredentials.options_group(parser))
-    parser.set_defaults(dbcredentials=os.environ.get('TMSS_CLIENT_DBCREDENTIALS', 'TMSS'))
+    parser.set_defaults(dbcredentials=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS'))
     (options, args) = parser.parse_args()
 
     dbcreds = dbcredentials.parse_options(options)
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
old mode 100644
new mode 100755
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run
old mode 100644
new mode 100755
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh
old mode 100644
new mode 100755
diff --git a/SAS/TMSS/src/CMakeCache.txt b/SAS/TMSS/src/CMakeCache.txt
deleted file mode 100644
index 0b2dc14cb11f159cf34cbf5f5ad840ce0aaab7d0..0000000000000000000000000000000000000000
--- a/SAS/TMSS/src/CMakeCache.txt
+++ /dev/null
@@ -1,326 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: /lofar/SAS/TMSS/src
-# It was generated by CMake: /usr/bin/cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//No help, variable specified on the command line.
-BUILD_PACKAGES:UNINITIALIZED=TMSS
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/cc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=' '
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Enable/Disable output of compile commands during generation.
-CMAKE_EXPORT_COMPILE_COMMANDS:BOOL=OFF
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=/usr/local
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=' '
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=/usr/bin/objcopy
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=/usr/bin/objdump
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=Project
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=' '
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//If set, runtime paths are not added when installing shared libraries,
-// but are added when building.
-CMAKE_SKIP_INSTALL_RPATH:BOOL=NO
-
-//If set, runtime paths are not added when using shared libraries.
-CMAKE_SKIP_RPATH:BOOL=NO
-
-//Flags used by the linker during the creation of static libraries.
-CMAKE_STATIC_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_STATIC_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_STATIC_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:PATH=
-
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:PATH=
-
-//Value Computed by CMake
-Project_BINARY_DIR:STATIC=/lofar/SAS/TMSS/src
-
-//Value Computed by CMake
-Project_SOURCE_DIR:STATIC=/lofar/SAS
-
-
-########################
-# INTERNAL cache entries
-########################
-
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=/lofar/SAS/TMSS/src
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=12
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=/usr/bin/cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=/usr/bin/cpack
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=/usr/bin/ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=/usr/bin/ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=ELF
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXPORT_COMPILE_COMMANDS
-CMAKE_EXPORT_COMPILE_COMMANDS-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Name of generator toolset.
-CMAKE_GENERATOR_TOOLSET:INTERNAL=
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=/lofar/SAS
-//Install .so files without execute permission.
-CMAKE_INSTALL_SO_NO_EXE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=/usr/share/cmake
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SKIP_INSTALL_RPATH
-CMAKE_SKIP_INSTALL_RPATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS
-CMAKE_STATIC_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_DEBUG
-CMAKE_STATIC_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL
-CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELEASE
-CMAKE_STATIC_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-
diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt
index fd5a8389a74c27f43c3def1fadb5a87813d9212f..d86760922e83a89788d24c0ba54299d120dc4cf4 100644
--- a/SAS/TMSS/src/CMakeLists.txt
+++ b/SAS/TMSS/src/CMakeLists.txt
@@ -17,6 +17,7 @@ find_python_module(django_jsonforms REQUIRED)   # pip3 install django-jsonforms
 find_python_module(django_json_widget REQUIRED) # pip3 install django-json-widget
 find_python_module(jsoneditor REQUIRED)         # pip3 install django-jsoneditor
 find_python_module(jsonschema REQUIRED)         # pip3 install jsonschema
+find_python_module(astropy REQUIRED)            # pip3 install astropy
 
 # modules for swagger API export
 find_python_module(drf_yasg REQUIRED)           # pip install drf-yasg
diff --git a/SAS/TMSS/src/migrate_momdb_to_tmss.py b/SAS/TMSS/src/migrate_momdb_to_tmss.py
index 13efa43bbc7759f453875c51cdbfb3f9b5734fb9..e2d0c8102979755204db98ddc326c00a62a44230 100755
--- a/SAS/TMSS/src/migrate_momdb_to_tmss.py
+++ b/SAS/TMSS/src/migrate_momdb_to_tmss.py
@@ -512,8 +512,7 @@ def create_subtask_trees_for_project_in_momdb(project_mom2id, project):
                    "priority": project.priority_rank,  # todo: correct to derive from project?
                    # optional:
                    "start_time": start_time,
-                   "stop_time": stop_time,
-                   "schedule_method": models.ScheduleMethod.objects.get(value="manual"),  # todo: correct? Or leave None?
+                   "stop_time": stop_time
                    # "created_or_updated_by_user" = None,
                    # "raw_feedback" = None,
                    # "do_cancel": None,
diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py
index a320dbd527a5a58a0d7274836beb66f9f5387c1c..e45ba40745dbfac84a842d9334b3fd687ad2cc23 100644
--- a/SAS/TMSS/src/tmss/exceptions.py
+++ b/SAS/TMSS/src/tmss/exceptions.py
@@ -22,3 +22,11 @@ class SubtaskSchedulingException(SchedulingException):
 
 class TaskSchedulingException(SchedulingException):
     pass
+
+class DynamicSchedulingException(SchedulingException):
+    pass
+
+class UnknownTemplateException(TMSSException):
+    '''raised when TMSS trying to base its processing routines on the chosen template, but this specific template is unknown.'''
+    pass
+
diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py
index 7f160668b40ac7164efdfaea77f44fb018e32d7d..9ba919e02252205cd5b2d7c0e83565bd2cf088c4 100644
--- a/SAS/TMSS/src/tmss/settings.py
+++ b/SAS/TMSS/src/tmss/settings.py
@@ -24,52 +24,21 @@ logger = logging.getLogger(__name__)
 LOGGING = {
     'version': 1,
     'disable_existing_loggers': False,
-    'filters': {
-        'require_debug_false': {
-            '()': 'django.utils.log.RequireDebugFalse',
-        },
-        'require_debug_true': {
-            '()': 'django.utils.log.RequireDebugTrue',
-        },
-    },
     'formatters': {
-        'django.server': {
-            '()': 'django.utils.log.ServerFormatter',
-            'format': '%(asctime)s %(levelname)s %(message)s',
-        },
-        'lofar': {
+        'lofar_formatter': {
             'format': '%(asctime)s %(levelname)s %(message)s',
         },
     },
     'handlers': {
         'console': {
             'level': 'DEBUG',
-            'filters': ['require_debug_true'],
             'class': 'logging.StreamHandler',
-        },
-        'django.server': {
-            'level': 'DEBUG',
-            'class': 'logging.StreamHandler',
-            'formatter': 'django.server',
-        },
-        'mail_admins': {
-            'level': 'ERROR',
-            'filters': ['require_debug_false'],
-            'class': 'django.utils.log.AdminEmailHandler'
-        },
-        'lofar': {
-            'level': 'DEBUG',
-            'class': 'logging.StreamHandler',
-            'formatter': 'lofar',
+            'formatter': 'lofar_formatter',
         },
     },
     'loggers': {
-        'django': {
-            'handlers': ['console', 'mail_admins'],
-            'level': 'INFO',
-        },
         'django.server': {
-            'handlers': ['django.server'],
+            'handlers': ['console'],
             'level': 'INFO',
             'propagate': False,
         },
@@ -82,13 +51,15 @@ LOGGING = {
             'level': 'DEBUG',  # change debug level as appropiate
             'propagate': False,
         },
-        'django.db.backends': {
-            'level': 'INFO',
-            'handlers': ['console'],
-        },
+        # 'django.db.backends': { # uncomment to enable logging of each db query. Very spammy and slow, but also usefull for performance improvement. Gives more even detail/insight than django debug toolbar.
+        #     'level': 'DEBUG',
+        #     'handlers': ['console'],
+        #     'propagate': False,
+        # },
         'lofar': {
-            'handlers': ['lofar'],
+            'handlers': ['console'],
             'level': 'INFO',
+            'propagate': False,
         },
     }
 }
@@ -121,20 +92,9 @@ INSTALLED_APPS = [
     'drf_yasg',
     'django_filters',
     'material',
-    'material.frontend',
-    'viewflow',
-    'viewflow.frontend',
-    'lofar.sas.tmss.tmss.workflowapp',
+    'material.frontend'
 ]
 
-
-def show_debug_toolbar(*args, **kwargs):
-    return os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False)
-
-DEBUG_TOOLBAR_CONFIG = {
-    'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar
-}
-
 MIDDLEWARE = [
     'django.middleware.gzip.GZipMiddleware',
     'django.middleware.security.SecurityMiddleware',
@@ -146,16 +106,25 @@ MIDDLEWARE = [
     'django.middleware.clickjacking.XFrameOptionsMiddleware'
 ]
 
+def show_debug_toolbar(*args, **kwargs):
+    return bool(os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False))
+
 if show_debug_toolbar():
+    DEBUG_TOOLBAR_CONFIG = { 'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar }
     INSTALLED_APPS.append('debug_toolbar')
     MIDDLEWARE.insert(MIDDLEWARE.index('django.middleware.gzip.GZipMiddleware')+1, 'debug_toolbar.middleware.DebugToolbarMiddleware')
 
+
+if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)):
+    INSTALLED_APPS.extend(['viewflow', 'viewflow.frontend', 'lofar.sas.tmss.tmss.workflowapp'])
+
+
 ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls'
 
 TEMPLATES = [
     {
         'BACKEND': 'django.template.backends.django.DjangoTemplates',
-        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp')],
+        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp')],
         'APP_DIRS': True,
         'OPTIONS': {
             'context_processors': [
@@ -169,7 +138,7 @@ TEMPLATES = [
 ]
 
 STATICFILES_DIRS = [
-    os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/static')
+    os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/static')
 ]
 
 WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application'
diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py
index ce112f7b30b8f697baf91d4da9202899703715ba..ec85e15186b4ce899544024fe550a895cbac01cf 100644
--- a/SAS/TMSS/src/tmss/tmssapp/conversions.py
+++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py
@@ -1,6 +1,5 @@
 from astropy.time import Time
 import astropy.units
-from lofar.lta.sip import station_coordinates
 from datetime import datetime
 from astropy.coordinates.earth import EarthLocation
 from astropy.coordinates import Angle
@@ -13,7 +12,7 @@ def create_astroplan_observer_for_station(station: str) -> Observer:
     :param station: a station name, e.g. "CS002"
     :return: astroplan.observer.Observer object
     '''
-
+    from lofar.lta.sip import station_coordinates
     coords = station_coordinates.parse_station_coordinates()["%s_LBA" % station.upper()]
     location = EarthLocation.from_geocentric(x=coords['x'], y=coords['y'], z=coords['z'],  unit=astropy.units.m)
     observer = Observer(location, name="LOFAR", timezone="UTC")
@@ -71,6 +70,8 @@ def local_sidereal_time_for_utc_and_station(timestamp: datetime = None,
     :param kind: 'mean' or 'apparent'
     :return:
     """
+    from lofar.lta.sip import station_coordinates
+
     if timestamp is None:
         timestamp = datetime.utcnow()
     station_coords = station_coordinates.parse_station_coordinates()
diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
index 7faae82d9b3a694ab79ebddd5b3d0c5676d8ffac..f375f739ae5a435eff01474107753925b5b4208f 100644
--- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.7 on 2020-10-29 16:37
+# Generated by Django 3.0.9 on 2020-11-17 13:44
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -17,34 +17,6 @@ class Migration(migrations.Migration):
     ]
 
     operations = [
-        migrations.CreateModel(
-            name='SchedulingUnitBlueprintSummary',
-            fields=[
-                ('id', models.IntegerField(primary_key=True, serialize=False)),
-                ('sub_id', models.IntegerField()),
-                ('taskblueprint_id', models.IntegerField()),
-                ('task_type', models.CharField(max_length=128)),
-                ('derived_task_status', models.CharField(max_length=128)),
-            ],
-            options={
-                'db_table': 'tmssapp_schedulingunitblueprintsummary',
-                'managed': False,
-            },
-        ),
-        migrations.CreateModel(
-            name='TaskBlueprintSummary',
-            fields=[
-                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
-                ('taskblueprint_id', models.IntegerField()),
-                ('subtask_id', models.IntegerField()),
-                ('substate', models.CharField(max_length=128)),
-                ('subtask_type', models.CharField(max_length=128)),
-            ],
-            options={
-                'db_table': 'tmssapp_taskblueprintsummary',
-                'managed': False,
-            },
-        ),
         migrations.CreateModel(
             name='Algorithm',
             fields=[
@@ -483,15 +455,6 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
-        migrations.CreateModel(
-            name='ScheduleMethod',
-            fields=[
-                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
-            ],
-            options={
-                'abstract': False,
-            },
-        ),
         migrations.CreateModel(
             name='SchedulingConstraintsTemplate',
             fields=[
@@ -630,7 +593,6 @@ class Migration(migrations.Migration):
                 ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)),
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')),
                 ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)),
-                ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')),
                 ('raw_feedback', models.CharField(help_text='The raw feedback for this Subtask', max_length=1048576, null=True)),
             ],
             options={
@@ -852,9 +814,9 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')),
-                ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskDraft')),
+                ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskDraft')),
                 ('placement', models.ForeignKey(help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')),
-                ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskDraft')),
+                ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskDraft')),
             ],
             options={
                 'abstract': False,
@@ -868,9 +830,9 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')),
-                ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskBlueprint')),
+                ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskBlueprint')),
                 ('placement', models.ForeignKey(default='after', help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')),
-                ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskBlueprint')),
+                ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')),
             ],
             options={
                 'abstract': False,
@@ -1075,11 +1037,6 @@ class Migration(migrations.Migration):
             name='global_identifier',
             field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
         ),
-        migrations.AddField(
-            model_name='subtask',
-            name='schedule_method',
-            field=models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod'),
-        ),
         migrations.AddField(
             model_name='subtask',
             name='specifications_template',
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
index 6e58f28a9dcd373dc38be715dd609274e2e6deb1..188b5c3086547549a8f527febaf37f6749044238 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
@@ -47,6 +47,7 @@ class SubtaskState(AbstractChoice):
         DEFINED = "defined"
         SCHEDULING = "scheduling"
         SCHEDULED = "scheduled"
+        UNSCHEDULING = "unscheduling"
         QUEUEING = "queueing"
         QUEUED = "queued"
         STARTING = "starting"
@@ -92,16 +93,6 @@ class Algorithm(AbstractChoice):
         AES256 = 'aes256'
 
 
-class ScheduleMethod(AbstractChoice):
-    """Defines the model and predefined list of possible Algorithm's for DataproductHash.
-    The items in the Choices class below are automagically populated into the database via a data migration."""
-
-    class Choices(Enum):
-        MANUAL = 'manual'
-        BATCH = 'batch'
-        DYNAMIC = 'dynamic'
-
-
 #
 # Templates
 #
@@ -152,8 +143,6 @@ class Subtask(BasicCommon):
     task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.')
     specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.')
     do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
-    priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')
-    schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).')
     cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
     # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work?
     created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.')
@@ -166,6 +155,34 @@ class Subtask(BasicCommon):
         # keep original state for logging
         self.__original_state_id = self.state_id
 
+    @property
+    def duration(self) -> timedelta:
+        '''the duration of this subtask (stop-start), or 0 if start/stop are None'''
+        if self.start_time is None or self.stop_time is None:
+            return timedelta(seconds=0)
+        return self.stop_time - self.start_time
+
+    @property
+    def specified_duration(self) -> timedelta:
+        '''get the specified (or estimated) duration of this subtask based on the specified task duration and the subtask type'''
+        if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
+            # observations have a specified duration, so grab it from the spec.
+            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', 0))
+
+        if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
+            # pipelines usually do not have a specified duration, so make a guess (half the obs duration?).
+            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2))
+
+        # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now)
+        return timedelta(minutes=5)
+
+    @staticmethod
+    def independent_subtasks() -> QuerySet:
+        '''return a QuerySet of all subtasks with no input (i.e. which are "independent" because they have no predecessors)
+           If you want the result, add .all() like so: Subtask.independent_subtasks().all()
+        '''
+        return Subtask.objects.filter(inputs=None)
+
     @property
     def successors(self) -> QuerySet:
         '''return the connect successor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
@@ -188,6 +205,20 @@ class Subtask(BasicCommon):
                                                     "INNER JOIN tmssapp_subtaskinput as st_input on st_input.producer_id = st_output.id\n"
                                                     "WHERE st_input.subtask_id = %s", params=[self.id]))
 
+    @property
+    def input_dataproducts(self) -> QuerySet:
+        '''return the input dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
+        If you want the result, add .all() like so: my_subtask.input_dataproducts.all()
+        '''
+        return Dataproduct.objects.filter(subtaskinput__subtask_id=self.id)
+
+    @property
+    def output_dataproducts(self) -> QuerySet:
+        '''return the output dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
+        If you want the result, add .all() like so: my_subtask.input_dataproducts.all()
+        '''
+        return Dataproduct.objects.filter(producer__subtask_id=self.id)
+
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         creating = self._state.adding  # True on create, False on update
 
@@ -204,16 +235,10 @@ class Subtask(BasicCommon):
             if duplicate_names:
                 raise ValidationError("Pointings defined in the same Subtask must have unique names. Duplicate names %s in subtask id=%s." % (duplicate_names, self.pk))
 
-        # check if we have a start time or there were predecessors
+        # check if we have a start time when scheduling
         if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state_id == SubtaskState.Choices.SCHEDULING.value:
             if self.start_time is None:
-                if self.predecessors.all().count() == 0:
                     raise SubtaskSchedulingException("Cannot schedule subtask id=%s when start time is 'None'." % (self.pk, ))
-                else:
-                    self.start_time = datetime.utcnow()
-
-        if self.state.value == SubtaskState.Choices.FINISHING.value:
-            self.stop_time = datetime.utcnow()
 
         super().save(force_insert, force_update, using, update_fields)
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
index 7ec6f980a09fb9dd3d765efb164611a5d898b8a6..f2f04f15009885ff04473a3601256dd878108803 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
@@ -21,6 +21,7 @@ from django.urls import reverse as revese_url
 from collections import Counter
 from django.utils.functional import cached_property
 
+
 #
 # Common
 #
@@ -150,12 +151,11 @@ class SchedulingRelationPlacement(AbstractChoice):
         BEFORE = "before"
         PARALLEL = "parallel"
 
-
 class Flag(AbstractChoice):
     """Defines the model and predefined list of possible Flags to be used in Setting.
     The items in the Choises class below are automagically populated into the database via a data migration."""
     class Choices(Enum):
-        AUTOSCHEDULE = "allow_scheduling_observations"
+        DYNAMIC_SCHEDULING_ENABLED = "dynamic_scheduling_enabled"
 
 
 
@@ -375,35 +375,6 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon):
     name = CharField(max_length=128, unique=True)
     template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT)
 
-
-#
-# DatabaseView  objects
-#
-class TaskBlueprintSummary(Model):
-    taskblueprint_id = IntegerField()
-    subtask_id = IntegerField()
-    substate = CharField(max_length=128)
-    subtask_type = CharField(max_length=128)
-
-    class Meta:
-        managed = False
-        db_table = 'tmssapp_taskblueprintsummary'
-
-
-class SchedulingUnitBlueprintSummary(Model):
-    # Using in an id and ForeignKey is not common for a view BUT the id is a 'dummy' to be able to use in Django
-    # https://resources.rescale.com/using-database-views-in-django-orm/
-    # otherwise an exception will be thrown
-    id = IntegerField(primary_key=True)
-    sub_id = IntegerField()
-    taskblueprint_id = IntegerField()
-    task_type = CharField(max_length=128)
-    derived_task_status = CharField(max_length=128)
-
-    class Meta:
-        managed = False
-        db_table = 'tmssapp_schedulingunitblueprintsummary'
-
 #
 # Instance Objects
 #
@@ -813,7 +784,7 @@ class TaskDraft(NamedCommon):
     def relative_start_time(self) -> datetime.timedelta:
         '''return the earliest relative start time of all subtasks of this task
         '''
-        scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
+        scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all())
         for scheduling_relation in scheduling_relations:
             # sometimes self._id does not exist so use self.id instead to avoid Exception
             if hasattr(self, '_id'):
@@ -934,9 +905,11 @@ class TaskBlueprint(NamedCommon):
 
     @cached_property
     def relative_start_time(self) -> datetime.timedelta:
-        '''return the earliest relative start time of all subtasks of this task
+        '''The relative start time is relative to the start time of the 'start' of the parent scheduling unit.
+        It's based on the scheduling_relation's from the scheduling_unit's specification,
+        and hence it determines the order in which the tasks within the unit should be executed.
         '''
-        scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
+        scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all())
         for scheduling_relation in scheduling_relations:
             # sometimes self._id does not exist so use self.id instead to avoid Exception
             if hasattr(self, '_id'):
@@ -960,7 +933,8 @@ class TaskBlueprint(NamedCommon):
 
     @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
-        '''return the latest relative stop time of all subtasks of this task
+        '''The relative_stop_time is the relative_start_time+duration.
+        See relative_start_time for an explanation of it's intended usage.
         '''
         # todo: when it was added, check if subtask.specifications_template.type.value == TaskType.Choices.OBSERVATION.value:
         try:
@@ -1010,7 +984,7 @@ class TaskBlueprint(NamedCommon):
             return "defined"
 
         if len([s for s in subtasks if s['state'] == 'finished']) == nr_of_subtasks:
-                return "finished"
+            return "finished"
 
         if any(s for s in subtasks if s['state'] in ('cancelling', 'cancelled')):
             return "cancelled"
@@ -1069,8 +1043,8 @@ class TaskRelationBlueprint(BasicCommon):
 
 
 class TaskSchedulingRelationBlueprint(BasicCommon):
-    first = ForeignKey('TaskBlueprint', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Blueprint to connect.')
-    second = ForeignKey('TaskBlueprint', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Blueprint to connect.')
+    first = ForeignKey('TaskBlueprint', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Blueprint to connect.')
+    second = ForeignKey('TaskBlueprint', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Blueprint to connect.')
     placement = ForeignKey('SchedulingRelationPlacement', null=False, default="after", on_delete=PROTECT, help_text='Task scheduling relation placement.')
     time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')  
   
@@ -1083,8 +1057,8 @@ class TaskSchedulingRelationBlueprint(BasicCommon):
     
 
 class TaskSchedulingRelationDraft(BasicCommon):
-    first = ForeignKey('TaskDraft', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Draft to connect.')
-    second = ForeignKey('TaskDraft', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Draft to connect.')
+    first = ForeignKey('TaskDraft', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Draft to connect.')
+    second = ForeignKey('TaskDraft', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Draft to connect.')
     placement = ForeignKey('SchedulingRelationPlacement', null=False, on_delete=PROTECT, help_text='Task scheduling relation placement.')
     time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')  
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py
index b786248f34773046434364d3ddc887ecd6d59e3a..05ec07e83f2f102caa1f65d1bcadf8ffb3447935 100644
--- a/SAS/TMSS/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/src/tmss/tmssapp/populate.py
@@ -35,12 +35,12 @@ def populate_choices(apps, schema_editor):
     :return: None
     '''
     for choice_class in [Role, Datatype, Dataformat, CopyReason,
-                         SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod, SchedulingRelationPlacement,
+                         SubtaskState, SubtaskType, StationType, Algorithm, SchedulingRelationPlacement,
                          Flag, ProjectCategory, PeriodCategory, Quantity, TaskType]:
         choice_class.objects.bulk_create([choice_class(value=x.value) for x in choice_class.Choices])
 
 def populate_settings(apps, schema_editor):
-    Setting.objects.create(name=Flag.objects.get(value='allow_scheduling_observations'), value=True)
+    Setting.objects.create(name=Flag.objects.get(value='dynamic_scheduling_enabled'), value=False)
 
 def populate_test_data():
     """
@@ -52,7 +52,7 @@ def populate_test_data():
         # only add (with  expensive setup time) example data when developing/testing and we're not unittesting
         if isTestEnvironment() or isDevelopmentEnvironment():
             from lofar.sas.tmss.tmss.exceptions import TMSSException
-            from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data, SchedulingUnitDraft_test_data
+            from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingUnitDraft_test_data
             from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft
             from lofar.sas.tmss.tmss.tmssapp.subtasks import schedule_subtask
             from lofar.common.json_utils import get_default_json_object_for_schema
@@ -60,48 +60,51 @@ def populate_test_data():
             constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints")
             constraints_spec = get_default_json_object_for_schema(constraints_template.schema)
 
-            strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
-
-            # create a Test Scheduling Set UC1 under project TMSS-Commissioning
-            tmss_project = models.Project.objects.get(name="TMSS-Commissioning")
-            for set_nr in range(2):
-                scheduling_set_data = SchedulingSet_test_data(name="Test Scheduling Set UC1 example %s" % (set_nr,), project=tmss_project)
-                scheduling_set = models.SchedulingSet.objects.create(**scheduling_set_data)
-                scheduling_set.tags = ["TEST", "UC1"]
-                scheduling_set.save()
-
-                logger.info('created test scheduling_set: %s', scheduling_set.name)
-
-                for unit_nr in range(5):
-
-                    # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template
-                    # a user might 'upload' a partial json-data blob, so add all the known defaults
-                    scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema)
-
-                    # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it!
-                    scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 test scheduling unit %s.%s" % (set_nr+1, unit_nr+1),
-                                                                                      scheduling_set=scheduling_set,
-                                                                                      requirements_template=strategy_template.scheduling_unit_template,
-                                                                                      requirements_doc=scheduling_unit_spec,
-                                                                                      observation_strategy_template=strategy_template,
-                                                                                      scheduling_constraints_doc=constraints_spec,
-                                                                                      scheduling_constraints_template=constraints_template)
-                    scheduling_unit_draft.tags = ["TEST", "UC1"]
-                    scheduling_unit_draft.save()
-
-                    logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name)
-
-                    try:
-                        if set_nr==0 and unit_nr==0:
-                            scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-                            scheduled_subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint=scheduling_unit_blueprint, task_blueprint__name='Calibrator Observation 1', specifications_template__type='observation').all()
-                            for subtask in scheduled_subtasks:
-                                schedule_subtask(subtask)
-                        else:
-                            create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-
-                    except TMSSException as e:
-                        logger.exception(e)
+            uc1_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
+            simple_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation")
+
+            projects = models.Project.objects.order_by('-priority_rank').all()
+            for tmss_project in projects:
+                if 'Commissioning' not in tmss_project.tags:
+                    continue
+
+                for scheduling_set in tmss_project.scheduling_sets.all():
+                    for unit_nr in range(2):
+                        for strategy_template in [uc1_strategy_template, simple_strategy_template]:
+                            # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template
+                            # a user might 'upload' a partial json-data blob, so add all the known defaults
+                            scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema)
+
+                            # limit target obs duration for demo data
+                            if strategy_template == uc1_strategy_template:
+                                scheduling_unit_spec['tasks']['Calibrator Observation 1']['specifications_doc']['duration'] = 2*60
+                                scheduling_unit_spec['tasks']['Target Observation']['specifications_doc']['duration'] = 2*3600
+                                scheduling_unit_spec['tasks']['Calibrator Observation 2']['specifications_doc']['duration'] = 2*60
+                            elif strategy_template == simple_strategy_template:
+                                scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = 5*60
+
+                            # set some constraints, so the dynamic scheduler has something to chew on.
+                            # DISABLED for now, because the 'daily' constraint solver is not ready yet.
+                            # constraints_spec['daily']['require_day'] = unit_nr%2==0
+                            # constraints_spec['daily']['require_night'] = unit_nr%2==1
+                            # constraints_spec['daily']['avoid_twilight'] = unit_nr%4>1
+
+                            # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it!
+                            scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="%s %s %0d" % ('UC1' if strategy_template==uc1_strategy_template else 'Obs', tmss_project.name, unit_nr+1),
+                                                                                              scheduling_set=scheduling_set,
+                                                                                              description="Test scheduling unit",
+                                                                                              requirements_template=strategy_template.scheduling_unit_template,
+                                                                                              requirements_doc=scheduling_unit_spec,
+                                                                                              observation_strategy_template=strategy_template,
+                                                                                              scheduling_constraints_doc=constraints_spec,
+                                                                                              scheduling_constraints_template=constraints_template)
+
+                            logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name)
+
+                            try:
+                                create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
+                            except TMSSException as e:
+                                logger.exception(e)
     except ImportError:
         pass
 
@@ -136,14 +139,22 @@ def populate_cycles(apps, schema_editor):
 
 
 def populate_projects(apps, schema_editor):
-    tmss_project = models.Project.objects.create(name="TMSS-Commissioning",
-                                             description="Project for all TMSS tests and commissioning",
-                                             priority_rank=1.0,
-                                             can_trigger=False,
-                                             private_data=True,
-                                             expert=True,
-                                             filler=False)
-    tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")])
+    from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data
+
+    for name, rank in (("high", 3), ("normal", 2), ("low", 1)):
+        tmss_project = models.Project.objects.create(name=name,
+                                                 description="Project for all TMSS tests and commissioning (%s priority)" % (name,),
+                                                 priority_rank=rank,
+                                                 can_trigger=False,
+                                                 private_data=True,
+                                                 expert=True,
+                                                 filler=False)
+        tmss_project.tags = ["Commissioning"]
+        tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")])
+        tmss_project.save()
+
+        # for convenience, create a schedulingset for each project
+        models.SchedulingSet.objects.create(**SchedulingSet_test_data(name="Test Scheduling Set", project=tmss_project))
 
 
 def populate_resources(apps, schema_editor):
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..d12982a89ed3b79fc306a4a26c2c667f60662e6a
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json
@@ -0,0 +1,33 @@
+{
+  "$id": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title": "datetime",
+  "description": "This schema defines datetime objects like timestamp and timewindow.",
+  "version": 1,
+  "type": "object",
+  "definitions": {
+    "timestamp": {
+      "description": "A timestamp defined in UTC",
+      "type": "string",
+      "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z?",
+      "format": "date-time"
+    },
+    "timewindow": {
+      "type": "object",
+      "description": "A timewindow interval: [from, to)",
+      "properties": {
+        "from": {
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp"
+        },
+        "to": {
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "from",
+        "to"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
index 77a916705c8df50c069f5929e11fc03d5586acf7..9caf086d923d583720925e44d47dfbc255f95885 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
@@ -5,29 +5,6 @@
   "description": "This schema defines the scheduling constraints for a scheduling unit",
   "version": 1,
   "definitions": {
-    "timestamp": {
-      "description": "A timestamp defined in UTC",
-      "type": "string",
-      "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z",
-      "format": "date-time"
-    },
-    "timewindow": {
-      "type": "object",
-      "description": "A timewindow interval: [from, to)",
-      "properties": {
-        "from": {
-          "$ref": "#/definitions/timestamp"
-        },
-        "to": {
-          "$ref": "#/definitions/timestamp"
-        }
-      },
-      "additionalProperties": false,
-      "required": [
-        "from",
-        "to"
-      ]
-    },
     "distance_on_sky": {
       "type": "number",
       "minimum": 0,
@@ -40,38 +17,39 @@
     }
   },
   "type": "object",
+  "default": {},
   "properties": {
     "scheduler": {
       "name": "Scheduler",
-      "description": "Which scheduling system will schedule this",
+      "description": "Schedule manually at the 'time.at' moment, of dynamically taking all time constraints into consideration.",
       "type": "string",
       "enum": [
         "manual",
-        "online"
+        "dynamic"
       ],
-      "default": "online"
+      "default": "dynamic"
     },
     "time": {
       "type": "object",
       "default": {},
       "properties": {
         "at": {
-          "description": "Start at this moment",
-          "$ref": "#/definitions/timestamp"
+          "description": "Start at this moment. Requires 'scheduler' to be set to 'manual'.",
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp"
         },
         "after": {
           "description": "Start after this moment",
-          "$ref": "#/definitions/timestamp"
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp"
         },
         "before": {
           "description": "End before this moment",
-          "$ref": "#/definitions/timestamp"
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp"
         },
         "between": {
           "description": "Run within one of these time windows",
           "type": "array",
           "items": {
-            "$ref": "#/definitions/timewindow"
+            "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow"
           },
           "minItems":0,
           "uniqueItems":true,
@@ -81,7 +59,7 @@
           "description": "Do NOT run within any of these time windows",
           "type": "array",
           "items": {
-            "$ref": "#/definitions/timewindow"
+            "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow"
           },
           "minItems":0,
           "uniqueItems":true,
@@ -129,6 +107,7 @@
         "transit_offset": {
           "description": "Offset window to LST centering",
           "type": "object",
+          "default": {},
           "properties": {
             "from": {
               "type": "number",
@@ -145,6 +124,7 @@
         },
         "min_distance": {
           "type": "object",
+          "default": {},
           "properties": {
             "sun": {
               "$ref": "#/definitions/distance_on_sky",
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json
new file mode 100644
index 0000000000000000000000000000000000000000..cfa908a68e642538b03c398a65e6d752f2e81db2
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json
@@ -0,0 +1,74 @@
+{
+  "tasks": {
+    "Observation": {
+      "description": "A simple short test observation",
+      "tags": [],
+      "specifications_doc": {
+        "QA": {
+          "plots": {
+            "enabled": true,
+            "autocorrelation": true,
+            "crosscorrelation": true
+          },
+          "file_conversion": {
+            "enabled": true,
+            "nr_of_subbands": -1,
+            "nr_of_timestamps": 256
+          }
+        },
+        "duration": 600,
+        "correlator": {
+          "storage_cluster": "CEP4",
+          "integration_time": 1,
+          "channels_per_subband": 64
+        },
+        "antenna_set": "HBA_DUAL_INNER",
+        "filter": "HBA_110_190",
+        "stations": ["CS001"],
+        "tile_beam": {
+          "direction_type": "J2000",
+          "angle1": 0.42,
+          "angle2": 0.43,
+          "angle3": 0.44
+        },
+        "SAPs": [
+          {
+            "name": "target0",
+            "digital_pointing": {
+              "direction_type": "J2000",
+          "angle1": 0.42,
+          "angle2": 0.43,
+          "angle3": 0.44
+            },
+            "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
+          }
+        ]
+      },
+      "specifications_template": "target observation"
+    }
+  },
+  "task_relations": [
+  ],
+  "task_scheduling_relations": [
+  ],
+  "parameters": [
+    {
+      "refs": [
+        "#/tasks/Observation/specifications_doc/duration"
+      ],
+      "name": "Duration"
+    },
+    {
+      "refs": [
+        "#/tasks/Observation/specifications_doc/SAPs/0/digital_pointing"
+      ],
+      "name": "Target Pointing"
+    },
+    {
+      "refs": [
+        "#/tasks/Observation/specifications_doc/tile_beam"
+      ],
+      "name": "Tile Beam"
+    }
+  ]
+}
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
index 27f52ee1913374218f741e80cd33a3ac96a84e06..6e1d2c710101efe1a396935340fcdee899fe3ded 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
@@ -1,4 +1,8 @@
 [
+  {
+    "file_name": "common_schema_template-datetime-1.json",
+    "template": "common_schema_template"
+  },
   {
     "file_name": "common_schema_template-pointing-1.json",
     "template": "common_schema_template"
@@ -96,6 +100,10 @@
     "realtime": true,
     "queue": false
   },
+  {
+    "file_name": "scheduling_constraints_template-constraints-1.json",
+    "template": "scheduling_constraints_template"
+  },
   {
     "file_name": "UC1-scheduling-unit-observation-strategy.json",
     "template": "scheduling_unit_observing_strategy_template",
@@ -106,8 +114,13 @@
     "version": 1
   },
   {
-    "file_name": "scheduling_constraints_template-constraints-1.json",
-    "template": "scheduling_constraints_template"
+    "file_name": "simple-observation-scheduling-unit-observation-strategy.json",
+    "template": "scheduling_unit_observing_strategy_template",
+    "scheduling_unit_template_name": "scheduling unit",
+    "scheduling_unit_template_version": "1",
+    "name": "Simple Observation",
+    "description": "This observation strategy template defines a single simple Target observation.",
+    "version": 1
   },
   {
     "file_name": "sap_template-1.json",
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index 85d7bd21c54ca2ad78badd911131847c11fb3375..e70f7585074cf5c87edce6ae0c8d10f7475d712e 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -40,12 +40,6 @@ class AlgorithmSerializer(RelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
-class ScheduleMethodSerializer(RelationalHyperlinkedModelSerializer):
-    class Meta:
-        model = models.ScheduleMethod
-        fields = '__all__'
-
-
 class SubtaskTemplateSerializer(AbstractTemplateSerializer):
     class Meta:
         model = models.SubtaskTemplate
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
index 279d0ae76212c863bdbb69146dc0cebe9c375612..0c215aa57d1915e0660bd31572775bd3992d00d9 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
@@ -259,7 +259,7 @@ class FlagSerializer(RelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
-class SettingSerializer(RelationalHyperlinkedModelSerializer):
+class SettingSerializer(serializers.HyperlinkedModelSerializer):
     class Meta:
         model = models.Setting
         fields = '__all__'
@@ -334,7 +334,7 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.TaskDraft
         fields = '__all__'
-        extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'relative_start_time', 'relative_stop_time']
+        extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration', 'relative_start_time', 'relative_stop_time']
 
 
 class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer):
@@ -347,7 +347,7 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.TaskBlueprint
         fields = '__all__'
-        extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration',
+        extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration',
                         'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time', 'status']
 
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
index d200d964073c2f1786dba6e8dadb53bfdc2be3e2..2c0a5a50dbce25b18ba327c0872c7fa31cc7ea56 100644
--- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
@@ -249,8 +249,6 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB
                      "task_blueprint": task_blueprint,
                      "specifications_template": subtask_template,
                      "tags": [],
-                     "priority": 1,
-                     "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
                      "cluster": Cluster.objects.get(name=cluster_name)
                      }
     subtask = Subtask.objects.create(**subtask_data)
@@ -312,8 +310,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
                             "task_blueprint": observation_subtask.task_blueprint,
                             "specifications_template": qafile_subtask_template,
                             "specifications_doc": qafile_subtask_spec,
-                            "priority": 1,
-                            "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
                             "cluster": observation_subtask.cluster}
     qafile_subtask = Subtask.objects.create(**qafile_subtask_data)
 
@@ -335,7 +331,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
 
 
 def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask:
-
     qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value]
     if qafile_subtasks:
         qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks?
@@ -377,8 +372,6 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta
                              "task_blueprint": qafile_subtask.task_blueprint,
                              "specifications_template": qaplots_subtask_template,
                              "specifications_doc": qaplots_subtask_spec_doc,
-                             "priority": 1,
-                             "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
                              "cluster": qafile_subtask.cluster}
     qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data)
 
@@ -424,8 +417,6 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri
                      "task_blueprint": task_blueprint,
                      "specifications_template": subtask_template,
                      "specifications_doc": subtask_specs,
-                     "priority": 1,
-                     "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
                      "cluster": Cluster.objects.get(name=cluster_name) }
     subtask = Subtask.objects.create(**subtask_data)
 
@@ -470,8 +461,6 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) ->
                     "task_blueprint": task_blueprint,
                     "specifications_template": subtask_template,
                     "specifications_doc": subtask_specs,
-                    "priority": 1,
-                    "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
                     "cluster": Cluster.objects.get(name=cluster_name)}
     subtask = Subtask.objects.create(**subtask_data)
 
@@ -519,6 +508,35 @@ def schedule_subtask(subtask: Subtask) -> Subtask:
 
         raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." %
                                          (subtask.pk, subtask.specifications_template.type.value))
+    except Exception as e:
+        try:
+            # set the subtask to state 'ERROR'...
+            subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.ERROR.value)
+            subtask.save()
+        except Exception as e2:
+            logger.error(e2)
+        finally:
+            # ... and re-raise the original exception (wrapped)
+            raise SubtaskSchedulingException("Error while scheduling subtask id=%d: %s" % (subtask.pk, str(e)))
+
+
+def unschedule_subtask(subtask: Subtask) -> Subtask:
+    '''unschedule the given subtask, removing all output dataproducts, and setting its state back to 'defined'.'''
+    if subtask.state.value != SubtaskState.Choices.SCHEDULED.value:
+        raise SubtaskSchedulingException("Cannot unschedule subtask id=%d because it is not SCHEDULED. Current state=%s" % (subtask.pk, subtask.state.value))
+
+    try:
+        subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.UNSCHEDULING.value)
+        subtask.save()
+
+        for output in subtask.outputs.all():
+            output.dataproducts.all().delete()
+        #TODO: delete dataproduct transforms
+
+        _assign_or_unassign_resources(subtask)
+
+        subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
+        subtask.save()
     except Exception as e:
         try:
             # set the subtask to state 'ERROR'...
@@ -530,6 +548,62 @@ def schedule_subtask(subtask: Subtask) -> Subtask:
             # ... and re-raise the original exception
             raise
 
+def unschedule_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint):
+    '''Convenience method: Unschedule (and return) all scheduled subtasks in the task_blueprint'''
+    scheduled_subtasks = list(task_blueprint.subtasks.filter(state__value=SubtaskState.Choices.SCHEDULED.value).all())
+    for subtask in scheduled_subtasks:
+        unschedule_subtask(subtask)
+
+
+def schedule_subtask_and_update_successor_start_times(subtask: Subtask) -> Subtask:
+    scheduled_subtask = schedule_subtask(subtask)
+    shift_successors_until_after_stop_time(scheduled_subtask)
+    return scheduled_subtask
+
+
+def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint, start_time: datetime):
+    for task_blueprint in scheduling_unit.task_blueprints.all():
+        defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all()
+        for subtask in defined_independend_subtasks:
+            update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time)
+
+
+def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime):
+    subtask.start_time = start_time
+    subtask.stop_time = subtask.start_time + subtask.specified_duration
+    subtask.save()
+
+    shift_successors_until_after_stop_time(subtask)
+
+
+def shift_successors_until_after_stop_time(subtask: Subtask):
+    for successor in subtask.successors:
+        # by default, let the successor directly follow this tasks...
+        successor_start_time = subtask.stop_time
+
+        # ... but adjust it if there is a scheduling_relation with an offset.
+        # so, check if these successive subtasks have different task_blueprint parents
+        if subtask.task_blueprint.id != successor.task_blueprint.id:
+            relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprint, second=successor.task_blueprint) |
+                         TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprint, second=subtask.task_blueprint)).all()
+            if relations:
+                # there should be only one scheduling relation between the tasks
+                relation = relations[0]
+                successor_start_time += timedelta(seconds=relation.time_offset)
+
+        # update the starttime and recurse to shift the successor successors as well
+        update_start_time_and_shift_successors_until_after_stop_time(successor, successor_start_time)
+
+
+def clear_defined_subtasks_start_stop_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint):
+    '''set start/stop times of all the subtasks in the scheduling unit to None'''
+    for task_blueprint in scheduling_unit.task_blueprints.all():
+        defined_subtasks = task_blueprint.subtasks.filter(state__value='defined').all()
+        for subtask in defined_subtasks:
+            subtask.start_time = None
+            subtask.stop_time = None
+            subtask.save()
+
 
 def check_prerequities_for_scheduling(subtask: Subtask) -> bool:
     if subtask.state.value != SubtaskState.Choices.DEFINED.value:
@@ -540,17 +614,11 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool:
             raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s"
                                              % (subtask.pk, predecessor.pk, predecessor.state.value))
 
-    # check if settings allow scheduling observations
-    setting = Setting.objects.get(name='allow_scheduling_observations')
-    if not setting.value:
-        raise SubtaskSchedulingException("Cannot schedule subtask id=%d because setting %s=%s does not allow that." %
-                                         (subtask.pk, setting.name, setting.value))
-
     return True
 
-def _assign_resources(subtask: Subtask):
-    if subtask.state.value != SubtaskState.Choices.SCHEDULING.value:
-        raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in SCHEDULING state. "
+def _assign_or_unassign_resources(subtask: Subtask):
+    if subtask.state.value not in [SubtaskState.Choices.SCHEDULING.value, SubtaskState.Choices.UNSCHEDULING.value]:
+        raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in (UN)SCHEDULING state. "
                                          "Current state=%s" % (subtask.pk, subtask.state.value))
 
     def create_ra_specification(_subtask):
@@ -558,7 +626,7 @@ def _assign_resources(subtask: Subtask):
         return { 'tmss_id': _subtask.id,
                  'task_type': _subtask.specifications_template.type.value.lower(),
                  'task_subtype': parset_dict.get("Observation.processSubtype","").lower(),
-                 'status': 'prescheduled',
+                 'status': 'prescheduled' if subtask.state.value == SubtaskState.Choices.SCHEDULING.value else 'approved',
                  'starttime': _subtask.start_time,
                  'endtime': _subtask.stop_time,
                  'cluster': _subtask.cluster.name,
@@ -696,7 +764,7 @@ def get_previous_related_task_blueprint_with_time_offset(task_blueprint):
     previous_related_task_blueprint = None
     time_offset = 0
 
-    scheduling_relations = list(task_blueprint.first_to_connect.all()) + list(task_blueprint.second_to_connect.all())
+    scheduling_relations = list(task_blueprint.first_scheduling_relation.all()) + list(task_blueprint.second_scheduling_relation.all())
     for scheduling_relation in scheduling_relations:
         if scheduling_relation.first.id == task_blueprint.id and scheduling_relation.placement.value == "after":
             previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id)
@@ -708,38 +776,6 @@ def get_previous_related_task_blueprint_with_time_offset(task_blueprint):
 
     return previous_related_task_blueprint, time_offset
 
-# todo: maybe this can now be replaced by subtask.relative_start_time
-def calculate_start_time(observation_subtask: Subtask):
-    """
-    Calculate the start time of an observation subtask. It should calculate the starttime in case of 'C-T-C train'
-    The start time of an observation depends on the start_time+duration and offset time of the previous observation
-    and so its scheduling relations should be known.
-    If there is no previous observation the 'default' start time is in two minutes from now
-    For demo purposes, will be changed into dynamic scheduled in the future
-    Note that the method is not robust now when previous start time is unknown. Also parallel observations are
-    not supported yet
-    :param observation_subtask:
-    :return: start_time (utc time)
-    """
-    previous_related_task_blueprint, time_offset = get_previous_related_task_blueprint_with_time_offset(observation_subtask.task_blueprint)
-    if previous_related_task_blueprint is None:
-        # This is the first observation so take start time 2 minutes from now
-        now = datetime.utcnow()
-        next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond)
-    else:
-        # Get the duration of last/previous observation
-        duration_in_sec = previous_related_task_blueprint.specifications_doc["duration"]
-        logger.info("Duration of previous observation '%s' (id=%s) is %d seconds",
-                    previous_related_task_blueprint.pk, previous_related_task_blueprint.pk, duration_in_sec)
-        # Get the previous observation subtask, should actually be one
-        lst_previous_subtasks_obs = [st for st in previous_related_task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value]
-        previous_subtask_obs = lst_previous_subtasks_obs[0]
-        logger.info("The previous observation subtask is id=%s", previous_subtask_obs.pk)
-        if previous_subtask_obs.start_time is None:
-            raise SubtaskSchedulingException("Cannot compute start_time for subtask id=%s because the its predecessor id=%s has not start_time" %(observation_subtask.id, previous_subtask_obs.id))
-        next_start_time = previous_subtask_obs.start_time + timedelta(seconds=duration_in_sec+time_offset)
-    return next_start_time
-
 
 def schedule_observation_subtask(observation_subtask: Subtask):
     ''' Schedule the given observation_subtask
@@ -761,17 +797,18 @@ def schedule_observation_subtask(observation_subtask: Subtask):
     observation_subtask.save()
 
     # step 1a: check start/stop times
+    # start time should be known. If not raise. Then the user and/or scheduling service should supply a properly calculated/estimated start_time first.
     if observation_subtask.start_time is None:
-        next_start_time = calculate_start_time(observation_subtask)
-        logger.info("observation id=%s has no starttime. assigned default: %s", observation_subtask.pk, formatDatetime(next_start_time))
-        observation_subtask.start_time = next_start_time
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no start_time" % (observation_subtask.pk,
+                                                                                                                 observation_subtask.specifications_template.type))
 
-    if observation_subtask.stop_time is None:
-        duration_in_sec = observation_subtask.task_blueprint.specifications_doc["duration"]
-        logger.info("Duration of observation id=%s is %d seconds", observation_subtask.pk, duration_in_sec)
-        stop_time = observation_subtask.start_time + timedelta(seconds=duration_in_sec)
-        logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time))
-        observation_subtask.stop_time = stop_time
+    if observation_subtask.specified_duration < timedelta(seconds=1):
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (observation_subtask.pk,
+                                                                                                                                    observation_subtask.specifications_template.type,
+                                                                                                                                    observation_subtask.specified_duration))
+
+    # always update the stop_time according to the spec
+    observation_subtask.stop_time = observation_subtask.start_time + observation_subtask.specified_duration
 
     # step 2: define input dataproducts
     # TODO: are there any observations that take input dataproducts?
@@ -802,23 +839,22 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                                       }
                                                     },
                                  specifications_template=SAPTemplate.objects.get(name="SAP"))
-        sap.save()
-        for sb_nr in pointing['subbands']:
-            dp = Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
-                                       directory=directory,
-                                       dataformat=Dataformat.objects.get(value="MeasurementSet"),
-                                       datatype=Datatype.objects.get(value="visibilities"),  # todo: is this correct?
-                                       producer=subtask_output,
-                                       specifications_doc={"sap": [str(sap_nr)]},  # todo: set correct value. This will be provided by the RA somehow
-                                       specifications_template=dataproduct_specifications_template,
-                                       feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
-                                       feedback_template=dataproduct_feedback_template,
-                                       size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr,
-                                       expected_size=1024*1024*1024*sb_nr,
-                                       sap=sap)
-            dp.save()
+
+        Dataproduct.objects.bulk_create([Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
+                                                     directory=directory,
+                                                     dataformat=Dataformat.objects.get(value="MeasurementSet"),
+                                                     datatype=Datatype.objects.get(value="visibilities"),
+                                                     producer=subtask_output,
+                                                     specifications_doc={"sap": [str(sap_nr)]},
+                                                     specifications_template=dataproduct_specifications_template,
+                                                     feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
+                                                     feedback_template=dataproduct_feedback_template,
+                                                     size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr,
+                                                     expected_size=1024*1024*1024*sb_nr,
+                                                     sap=sap) for sb_nr in pointing['subbands']])
+
     # step 4: resource assigner (if possible)
-    _assign_resources(observation_subtask)
+    _assign_or_unassign_resources(observation_subtask)
 
     # TODO: TMSS-382: evaluate the scheduled stations and see if the requiments given in the subtask.task_bluepring.specifications_doc are met for the station_groups and max_nr_missing.
 
@@ -854,10 +890,13 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
         logger.info("pipeline id=%s has no starttime. assigned default: %s", pipeline_subtask.pk, formatDatetime(now))
         pipeline_subtask.start_time = now
 
-    if pipeline_subtask.stop_time is None:
-        stop_time = pipeline_subtask.start_time  + timedelta(hours=+1)
-        logger.info("pipeline id=%s has no stop_time. assigned default: %s", pipeline_subtask.pk, formatDatetime(stop_time))
-        pipeline_subtask.stop_time = stop_time
+    if pipeline_subtask.specified_duration < timedelta(seconds=1):
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (pipeline_subtask.pk,
+                                                                                                                                    pipeline_subtask.specifications_template.type,
+                                                                                                                                    pipeline_subtask.specified_duration))
+
+    # always update the stop_time according to the spec
+    pipeline_subtask.stop_time = pipeline_subtask.start_time + pipeline_subtask.specified_duration
 
     # step 2: link input dataproducts
     if pipeline_subtask.inputs.count() == 0:
@@ -881,31 +920,35 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
 
         # step 3: create output dataproducts, and link these to the output
         # TODO: create them from the spec, instead of "copying" the input filename
-        output_dps = []
+        dataformat = Dataformat.objects.get(value="MeasurementSet")
+        input_dps = list(pipeline_subtask_input.dataproducts.all())
+        output_dp_objects = []
         for input_dp in pipeline_subtask_input.dataproducts.all():
             if '_' in input_dp.filename and input_dp.filename.startswith('L'):
                 filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename.split('_', 1)[1])
             else:
                 filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename)
 
-            output_dp = Dataproduct.objects.create(filename=filename,
-                                                   directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)),
-                                                   dataformat=Dataformat.objects.get(value="MeasurementSet"),
-                                                   datatype=Datatype.objects.get(value="visibilities"),  # todo: is this correct?
-                                                   producer=pipeline_subtask_output,
-                                                   specifications_doc={},
-                                                   specifications_template=dataproduct_specifications_template,
-                                                   feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
-                                                   feedback_template=dataproduct_feedback_template,
-                                                   sap=input_dp.sap)
-            output_dp.save()
-            DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False)
-            output_dps.append(output_dp)
-
+            output_dp = Dataproduct(filename=filename,
+                                    directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)),
+                                    dataformat=dataformat,
+                                    datatype=Datatype.objects.get(value="visibilities"),  # todo: is this correct?
+                                    producer=pipeline_subtask_output,
+                                    specifications_doc=get_default_json_object_for_schema(dataproduct_specifications_template.schema),
+                                    specifications_template=dataproduct_specifications_template,
+                                    feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
+                                    feedback_template=dataproduct_feedback_template,
+                                    sap=input_dp.sap)
+            output_dp_objects.append(output_dp)
+
+        output_dps = Dataproduct.objects.bulk_create(output_dp_objects)
         pipeline_subtask_output.dataproducts.set(output_dps)
 
+        transforms = [DataproductTransform(input=input_dp, output=output_dp, identity=False) for input_dp,output_dp in zip(input_dps, output_dps)]
+        DataproductTransform.objects.bulk_create(transforms)
+
         # step 4: resource assigner (if possible)
-        _assign_resources(pipeline_subtask)
+        _assign_or_unassign_resources(pipeline_subtask)
 
         # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
         pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
@@ -973,14 +1016,17 @@ def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBluepri
     create_subtasks_from_task_blueprint(task_blueprint)
     return schedule_independent_subtasks_in_task_blueprint(task_blueprint)
 
-def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]:
-    '''Convenience method: Schedule the subtasks in the task_blueprint that are not dependend on predecessors'''
-    subtasks = list(task_blueprint.subtasks.all())
 
-    for subtask in subtasks:
-        if len(subtask.predecessors.all()) == len(subtask.predecessors.filter(state__value='finished').all()):
-            schedule_subtask(subtask)
-    return subtasks
+def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint, start_time: datetime=None) -> [Subtask]:
+    '''Convenience method: Schedule (and return) the subtasks in the task_blueprint that are not dependend on any predecessors'''
+    independent_subtasks = list(Subtask.independent_subtasks().filter(task_blueprint_id=task_blueprint.id, state__value=SubtaskState.Choices.DEFINED.value).all())
+
+    for subtask in independent_subtasks:
+        if start_time is not None:
+            subtask.start_time = start_time
+        schedule_subtask_and_update_successor_start_times(subtask)
+
+    return independent_subtasks
 
 
 def _generate_subtask_specs_from_preprocessing_task_specs(preprocessing_task_specs, default_subtask_specs):
diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py
index 7f7ad51bd577466945b176d334aaeda4feffa880..987f89153e14aa5f91c90993cb00ee70b780dd79 100644
--- a/SAS/TMSS/src/tmss/tmssapp/tasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py
@@ -1,12 +1,14 @@
 from lofar.sas.tmss.tmss.exceptions import *
 from lofar.sas.tmss.tmss.tmssapp import models
-from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint
-from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, SchedulingUnitDraft, TaskDraft, SchedulingRelationPlacement
+from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtasks_in_task_blueprint
+from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint
 from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint
 from functools import cmp_to_key
+import os
 from copy import deepcopy
 from lofar.common.json_utils import add_defaults_to_json_object_for_schema
 import logging
+from datetime import datetime
 
 logger = logging.getLogger(__name__)
 
@@ -18,8 +20,8 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_
     logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s')", scheduling_unit_draft.pk, scheduling_unit_draft.name)
 
     scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create(
-        name="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.name,),
-        description="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.description or "<no description>",),
+        name=scheduling_unit_draft.name,
+        description=scheduling_unit_draft.description,
         requirements_doc=scheduling_unit_draft.requirements_doc,
         do_cancel=False,
         draft=scheduling_unit_draft,
@@ -90,7 +92,7 @@ def create_scheduling_unit_draft_from_scheduling_unit_blueprint(scheduling_unit_
 
 
 def copy_task_draft(task_draft: models.TaskDraft, copy_reason: str) -> models.TaskDraft:
-    
+
     task_template_name = task_draft.specifications_template
     task_template = models.TaskTemplate.objects.get(name=task_template_name)
 
@@ -114,7 +116,7 @@ def copy_task_blueprint_to_task_draft(task_blueprint:models.TaskBlueprint ) -> m
     :raises Exception if instantiate fails.
     """
     logger.debug("Create Task Draft  from Task Blueprint (id=%s)", task_blueprint.pk)
-    
+
     original_task_draft = task_blueprint.draft
     task_template_name = original_task_draft.specifications_template
     task_template = models.TaskTemplate.objects.get(name=task_template_name)
@@ -276,7 +278,7 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model
                                 task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk,)
 
     # Do the same 'trick' for Task Scheduling Relation Draft to Blueprint
-    task_draft_scheduling_relations = list(task_draft.first_to_connect.all()) + list(task_draft.second_to_connect.all())
+    task_draft_scheduling_relations = list(task_draft.first_scheduling_relation.all()) + list(task_draft.second_scheduling_relation.all())
     for task_scheduling_relation_draft in task_draft_scheduling_relations:
         for first_task_blueprint in task_scheduling_relation_draft.first.task_blueprints.all():
             for second_task_blueprint in task_scheduling_relation_draft.second.task_blueprints.all():
@@ -360,21 +362,35 @@ def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(schedulin
 
     # refresh so all related fields are updated.
     scheduling_unit_blueprint.refresh_from_db()
+
     return scheduling_unit_blueprint
 
 
 def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint:
     '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors'''
     scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint)
+    scheduling_unit_blueprint = schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint)
+    return scheduling_unit_blueprint
+
+
+def schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint, start_time: datetime=None) -> models.SchedulingUnitBlueprint:
+    '''Convenience method: Schedule the subtasks in the scheduling_unit_blueprint that are not dependend on predecessors'''
     task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all())
 
-    # sort task_blueprint(s) in 'data-flow'-order,
-    # because successors can depend on predecessors, so the first tbp's need to be subtask'd first.
-    task_blueprints.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0))
+    for task_blueprint in task_blueprints:
+        schedule_independent_subtasks_in_task_blueprint(task_blueprint, start_time=start_time+task_blueprint.relative_start_time)
+
+    scheduling_unit_blueprint.refresh_from_db()
+    return scheduling_unit_blueprint
+
+
+def unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint:
+    '''Convenience method: Unschedule all scheduled subtasks in the scheduling_unit_blueprint'''
+    task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all())
 
     for task_blueprint in task_blueprints:
-        schedule_independent_subtasks_in_task_blueprint(task_blueprint)
+        unschedule_subtasks_in_task_blueprint(task_blueprint)
 
-    # refresh so all related fields are updated.
     scheduling_unit_blueprint.refresh_from_db()
     return scheduling_unit_blueprint
+
diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py
index 851a625197765c401e1cc54db50c4b33d986b2e7..9e9b71ae635b4ffb0a8995b38138986ffaa72573 100644
--- a/SAS/TMSS/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/src/tmss/tmssapp/views.py
@@ -35,7 +35,7 @@ def subtask_parset(request, subtask_pk:int):
 
 
 def index(request):
-    return render(request, os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/index.html'))
+    return render(request, os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/index.html'))
     #return render(request, "../../../frontend/frontend_poc/build/index.html")
 
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
index 2bc7b1814e5c667bcdd9fae7bea322e7696cdf82..bcd3eaf22671451c5d005e36c178c56f66b1c0f3 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
@@ -35,8 +35,6 @@ from lofar.sas.tmss.tmss.tmssapp.renderers import PlainTextRenderer
 from rest_framework.views import APIView
 from rest_framework.decorators import api_view, renderer_classes
 
-from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct
-from lofar.lta.sip import visualizer
 
 class TextPlainAutoSchema(SwaggerAutoSchema):
     def get_produces(self):
@@ -79,10 +77,6 @@ class AlgorithmViewSet(LOFARViewSet):
     queryset = models.Algorithm.objects.all()
     serializer_class = serializers.AlgorithmSerializer
 
-class ScheduleMethodViewSet(LOFARViewSet):
-    queryset = models.ScheduleMethod.objects.all()
-    serializer_class = serializers.ScheduleMethodSerializer
-
 class SubtaskTemplateFilter(filters.FilterSet):
     class Meta:
         model = models.SubtaskTemplate
@@ -145,7 +139,8 @@ class SubtaskViewSet(LOFARViewSet):
     filter_class = SubTaskFilter
     ordering = ('start_time',)
 
-    queryset = queryset.prefetch_related('state')
+    # performance boost: select the related models in a single db call.
+    queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user')
 
     @swagger_auto_schema(auto_schema=TextPlainAutoSchema,
                          responses={200: 'A LOFAR parset for this subtask (as plain text)',
@@ -292,6 +287,9 @@ class SubtaskNestedViewSet(LOFARNestedViewSet):
     filter_class = SubTaskFilter
     ordering = ('start_time',)
 
+    # performance boost: select the related models in a single db call.
+    queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user')
+
     def get_queryset(self):
         if 'task_blueprint_id' in self.kwargs:
             task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id'])
@@ -318,6 +316,7 @@ class DataproductViewSet(LOFARViewSet):
     @action(methods=['get'], detail=True, url_name="sip")
     def sip(self, request, pk=None):
         dataproduct = get_object_or_404(models.Dataproduct, pk=pk)
+        from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct
         return HttpResponse(generate_sip_for_dataproduct(dataproduct).get_prettyxml(), content_type='application/xml')
 
     @swagger_auto_schema(responses={200: 'The SIP graph for this dataproduct',
@@ -326,6 +325,8 @@ class DataproductViewSet(LOFARViewSet):
     @action(methods=['get'], detail=True, url_name="sip_graph")
     def sip_graph(self, request, pk=None):
         dataproduct = get_object_or_404(models.Dataproduct, pk=pk)
+        from lofar.lta.sip import visualizer
+        from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct
         return HttpResponse(visualizer.visualize_sip(generate_sip_for_dataproduct(dataproduct)), content_type='image/svg+xml')
 
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
index 6e292b61afa714df6356cf528da69ebc18a555f3..f4f1e95ddbe38152855429597c6360be6448e4dc 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
@@ -318,6 +318,10 @@ class SchedulingUnitDraftViewSet(LOFARViewSet):
                        .select_related('copy_reason') \
                        .select_related('scheduling_set')
 
+    # use select_related for forward related references
+    queryset = queryset.select_related('copy_reason', 'scheduling_set', 'requirements_template', 'observation_strategy_template', 'scheduling_constraints_template')
+
+
     @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header',
                                     403: 'forbidden'},
                          operation_description="Carve SchedulingUnitDraft in stone, and make an (uneditable) blueprint out of it.")
@@ -594,6 +598,9 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet):
     # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
     queryset = queryset.prefetch_related('task_blueprints')
 
+    # use select_related for forward related references
+    queryset = queryset.select_related('requirements_template', 'draft')
+
     @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and (scheduled) Subtasks.",
                                     403: 'forbidden'},
                          operation_description="Create TaskBlueprint(s) for this scheduling unit, create subtasks, and schedule the ones that are not dependend on predecessors.")
@@ -662,16 +669,16 @@ class TaskDraftViewSet(LOFARViewSet):
     serializer_class = serializers.TaskDraftSerializer
 
     # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
-    queryset = queryset.prefetch_related('first_to_connect') \
-                       .prefetch_related('second_to_connect')\
+    queryset = queryset.prefetch_related('first_scheduling_relation') \
+                       .prefetch_related('second_scheduling_relation')\
                        .prefetch_related('produced_by')\
                        .prefetch_related('consumed_by')\
                        .prefetch_related('task_blueprints')\
                        .prefetch_related('copied_from')
 
     # prefetch nested references in reverse models to avoid duplicate lookup queries
-    queryset = queryset.prefetch_related('first_to_connect__placement') \
-                       .prefetch_related('second_to_connect__placement')
+    queryset = queryset.prefetch_related('first_scheduling_relation__placement') \
+                       .prefetch_related('second_scheduling_relation__placement')
 
     # select all references to other models to avoid even more duplicate queries
     queryset = queryset.select_related('copies') \
@@ -771,15 +778,19 @@ class TaskBlueprintViewSet(LOFARViewSet):
     serializer_class = serializers.TaskBlueprintSerializer
 
     # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
-    queryset = queryset.prefetch_related('first_to_connect')\
-                       .prefetch_related('second_to_connect')\
+    queryset = queryset.prefetch_related('first_scheduling_relation')\
+                       .prefetch_related('second_scheduling_relation')\
                        .prefetch_related('produced_by')\
                        .prefetch_related('consumed_by')\
                        .prefetch_related('subtasks')
 
     # prefetch nested references in reverse models to avoid duplicate lookup queries
-    queryset = queryset.prefetch_related('first_to_connect__placement') \
-                       .prefetch_related('second_to_connect__placement')
+    queryset = queryset.prefetch_related('first_scheduling_relation__placement') \
+                       .prefetch_related('second_scheduling_relation__placement') \
+                       .prefetch_related('subtasks__specifications_template')
+
+    # use select_related for forward related references
+    queryset = queryset.select_related('draft', 'specifications_template', 'specifications_template__type', 'scheduling_unit_blueprint')
 
     @swagger_auto_schema(responses={201: "This TaskBlueprint, with it is created subtasks",
                                     403: 'forbidden'},
diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py
index 4da12093c1a60461d210a3441bca7c86b8409b9b..1327d5b5a41ba2e80d100c254ef60c7ddc91aa0b 100644
--- a/SAS/TMSS/src/tmss/urls.py
+++ b/SAS/TMSS/src/tmss/urls.py
@@ -32,9 +32,6 @@ from datetime import datetime
 import os
 from material.frontend import urls as frontend_urls
 
-from viewflow.flow.viewset import FlowViewSet
-from .workflowapp import viewsets as workflow_viewsets
-
 
 #
 # Django style patterns
@@ -177,7 +174,6 @@ router.register(r'subtask_state', viewsets.SubtaskStateViewSet)
 router.register(r'subtask_type', viewsets.SubtaskTypeViewSet)
 router.register(r'station_type', viewsets.StationTypeViewSet)
 router.register(r'algorithm', viewsets.AlgorithmViewSet)
-router.register(r'schedule_method', viewsets.ScheduleMethodViewSet)
 router.register(r'scheduling_relation_placement', viewsets.SchedulingRelationPlacement)
 
 # templates
@@ -204,21 +200,7 @@ router.register(r'user', viewsets.UserViewSet)
 router.register(r'sap', viewsets.SAPViewSet)
 router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
 
-# --- 
-# QA Workflow steps
-viewflow_urlpatterns = []
-
-viewflow_router = OptionalSlashRouter()
-viewflow_router.APIRootView = TMSSAPIRootView
-
-viewflow_router.register('scheduling_unit_flow/su', workflow_viewsets.SchedulingUnitFlowViewSet, basename='su')
-viewflow_router.register('scheduling_unit_flow/qa_reporting_to', workflow_viewsets.QAReportingTOViewSet, basename='qa_reporting_to')
-viewflow_router.register('scheduling_unit_flow/qa_reporting_sos', workflow_viewsets.QAReportingSOSViewSet, basename='qa_reporting_sos')
-viewflow_router.register('scheduling_unit_flow/qa_pi_verification', workflow_viewsets.PIVerificationViewSet, basename='qa_pi_verification')
-viewflow_router.register('scheduling_unit_flow/qa_decide_acceptance', workflow_viewsets.DecideAcceptanceViewSet, basename='qa_decide_acceptance')
-viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_process', workflow_viewsets.SchedulingUnitProcessViewSet, basename='qa_scheduling_unit_process')
-
-viewflow_urlpatterns.extend(viewflow_router.urls)
+# ---
 
 urlpatterns.extend(router.urls)
 
@@ -226,12 +208,31 @@ frontend_urlpatterns = [
     path("", views.index, name="index")
 ]
 
-urlpatterns = [ url(r'^api$', RedirectView.as_view(url='/api/')),
-                url(r'^api/', include(urlpatterns)),
-                url(r'^oidc$', RedirectView.as_view(url='/oidc/')),
+
+urlpatterns = [url(r'^api$', RedirectView.as_view(url='/api/')),
+                url(r'^api/', include(urlpatterns)), url(r'^oidc$',
+                RedirectView.as_view(url='/oidc/')),
                 url(r'^oidc/', include('mozilla_django_oidc.urls')), 
-                url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)),
-                url(r'^workflow_api/',  include(viewflow_urlpatterns)),
                 url(r'', include(frontend_urls)),
                 url(r'^.*', include(frontend_urlpatterns)),
 ]
+
+
+
+# ---
+# QA Workflow steps
+if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)):
+    from .workflowapp import viewsets as workflow_viewsets
+
+    viewflow_router = OptionalSlashRouter()
+    viewflow_router.APIRootView = TMSSAPIRootView
+
+    viewflow_router.register('scheduling_unit_flow/su', workflow_viewsets.SchedulingUnitFlowViewSet, basename='su')
+    viewflow_router.register('scheduling_unit_flow/qa_reporting_to', workflow_viewsets.QAReportingTOViewSet, basename='qa_reporting_to')
+    viewflow_router.register('scheduling_unit_flow/qa_reporting_sos', workflow_viewsets.QAReportingSOSViewSet, basename='qa_reporting_sos')
+    viewflow_router.register('scheduling_unit_flow/qa_pi_verification', workflow_viewsets.PIVerificationViewSet, basename='qa_pi_verification')
+    viewflow_router.register('scheduling_unit_flow/qa_decide_acceptance', workflow_viewsets.DecideAcceptanceViewSet, basename='qa_decide_acceptance')
+    viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_process', workflow_viewsets.SchedulingUnitProcessViewSet, basename='qa_scheduling_unit_process')
+
+    urlpatterns.extend([url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)),
+                        url(r'^workflow_api/', include(viewflow_router.urls))])
\ No newline at end of file
diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py
index ec9de7be402f90143072687153d34c53c6bac89a..ef00fc0a9956c05a7ce6425db34220e3777165ff 100755
--- a/SAS/TMSS/test/t_scheduling.py
+++ b/SAS/TMSS/test/t_scheduling.py
@@ -34,25 +34,21 @@ if skip_integration_tests():
 # before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set.
 # import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports)
 # this automagically sets the required  DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars.
-from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment
 from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
 
-ra_test_env = RATestEnvironment()
-tmss_test_env = TMSSTestEnvironment()
+tmss_test_env = TMSSTestEnvironment(populate_schemas=True, populate_test_data=False, start_ra_test_environment=True,
+                                    start_postgres_listener=False, start_subtask_scheduler=False, start_dynamic_scheduler=False,
+                                    enable_viewflow=False)
 
 try:
-    ra_test_env.start()
     tmss_test_env.start()
-    tmss_test_env.populate_schemas()
 except:
-    ra_test_env.stop()
     tmss_test_env.stop()
     exit(1)
 
 # tell unittest to stop (and automagically cleanup) the test database once all testing is done.
 def tearDownModule():
     tmss_test_env.stop()
-    ra_test_env.stop()
 
 
 from lofar.sas.tmss.test.tmss_test_data_django_models import *
@@ -73,20 +69,23 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     as string (no object)
     For these testcases 'pipeline control' and 'observation control' is relevant
     """
+    task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(specifications_template=models.TaskTemplate.objects.get(name='target observation' if subtask_type_value=='observation' else 'preprocessing pipeline')))
     subtask_template_obj = models.SubtaskTemplate.objects.get(name="%s control" % subtask_type_value)
     subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value)
-    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj)
+    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj, task_blueprint=task_blueprint)
     return models.Subtask.objects.create(**subtask_data)
 
 
 class SchedulingTest(unittest.TestCase):
     def setUp(self):
         # clean all specs/tasks/claims in RADB (cascading delete)
-        for spec in ra_test_env.radb.getSpecifications():
-            ra_test_env.radb.deleteSpecification(spec['id'])
+        for spec in tmss_test_env.ra_test_environment.radb.getSpecifications():
+            tmss_test_env.ra_test_environment.radb.deleteSpecification(spec['id'])
 
     def test_schedule_observation_subtask_with_enough_resources_available(self):
         with tmss_test_env.create_tmss_client() as client:
+            task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url'])
+            task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/')
             subtask_template = client.get_subtask_template("observation control")
             spec = get_default_json_object_for_schema(subtask_template['schema'])
             spec['stations']['digital_pointings'][0]['subbands'] = [0]
@@ -96,7 +95,7 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow()+timedelta(minutes=5),
-                                                     stop_time=datetime.utcnow()+timedelta(minutes=15))
+                                                     task_blueprint_url=task_blueprint['url'])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
@@ -105,7 +104,7 @@ class SchedulingTest(unittest.TestCase):
             subtask = client.schedule_subtask(subtask_id)
 
             self.assertEqual('scheduled', subtask['state_value'])
-            self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask_id)['status'])
+            self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status'])
 
     def test_schedule_observation_subtask_with_blocking_reservations(self):
 
@@ -126,6 +125,9 @@ class SchedulingTest(unittest.TestCase):
             self.assertTrue(assigned)
 
         with tmss_test_env.create_tmss_client() as client:
+            task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url'])
+            task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/')
+
             subtask_template = client.get_subtask_template("observation control")
             spec = get_default_json_object_for_schema(subtask_template['schema'])
             spec['stations']['digital_pointings'][0]['subbands'] = [0]
@@ -134,7 +136,7 @@ class SchedulingTest(unittest.TestCase):
             subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'],
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
-                                                     task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+                                                     task_blueprint_url=task_blueprint['url'])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
@@ -146,13 +148,17 @@ class SchedulingTest(unittest.TestCase):
 
             subtask = client.get_subtask(subtask_id)
             self.assertEqual('error', subtask['state_value'])
-            self.assertEqual('conflict', ra_test_env.radb.getTask(tmss_id=subtask_id)['status'])
+            ra_task = tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)
+            self.assertIsNotNone(ra_task)
+            self.assertEqual('conflict', ra_task['status'])
 
     def test_schedule_pipeline_subtask_with_enough_resources_available(self):
         with tmss_test_env.create_tmss_client() as client:
             cluster_url = client.get_path_as_json_object('/cluster/1')['url']
 
             # setup: first create an observation, so the pipeline can have input.
+            obs_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url'])
+            obs_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(obs_task_blueprint_data, '/task_blueprint/')
             obs_subtask_template = client.get_subtask_template("observation control")
             obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema'])
             obs_spec['stations']['digital_pointings'][0]['subbands'] = [0]
@@ -160,19 +166,22 @@ class SchedulingTest(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+                                                         task_blueprint_url=obs_task_blueprint['url'])
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
             obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
             test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
                                                                                   subtask_output_url=obs_subtask_output_url), '/dataproduct/')
 
             # now create the pipeline...
+            pipe_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="preprocessing pipeline")['url'])
+            pipe_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(pipe_task_blueprint_data, '/task_blueprint/')
+
             pipe_subtask_template = client.get_subtask_template("pipeline control")
             pipe_spec = get_default_json_object_for_schema(pipe_subtask_template['schema'])
 
             pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'],
                                                           specifications_doc=pipe_spec,
-                                                          task_blueprint_url=obs_subtask['task_blueprint'],
+                                                          task_blueprint_url=pipe_task_blueprint['url'],
                                                           cluster_url=cluster_url)
             pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/')
 
@@ -187,7 +196,7 @@ class SchedulingTest(unittest.TestCase):
             subtask = client.schedule_subtask(pipe_subtask['id'])
 
             self.assertEqual('scheduled', subtask['state_value'])
-            self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=pipe_subtask['id'])['status'])
+            self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=pipe_subtask['id'])['status'])
 
     def test_schedule_ingest_subtask(self):
         with tmss_test_env.create_tmss_client() as client:
@@ -279,11 +288,13 @@ class SchedulingTest(unittest.TestCase):
                 self.assertEqual(1, len(task_blueprint['subtasks']))
                 subtask = client.get_url_as_json_object(task_blueprint['subtasks'][0])
 
+                client.session.patch(subtask['url'], {'start_time': datetime.utcnow() + timedelta(minutes=5)})
                 client.set_subtask_status(subtask['id'], 'defined')
+
                 subtask = client.schedule_subtask(subtask['id'])
 
                 self.assertEqual('scheduled', subtask['state_value'])
-                self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask['id'])['status'])
+                self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask['id'])['status'])
 
                 client.set_subtask_status(subtask['id'], 'finished')
 
@@ -297,11 +308,11 @@ class SubtaskInputOutputTest(unittest.TestCase):
 
     def setUp(self) -> None:
         # make sure we're allowed to schedule
-        setting = Setting.objects.get(name='allow_scheduling_observations')
+        setting = Setting.objects.get(name='dynamic_scheduling_enabled')
         setting.value = True
         setting.save()
 
-    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources")
+    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources")
     def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock):
         # setup:
         #   create observation subtask and outputs and dataproducts
@@ -341,12 +352,14 @@ class SAPTest(unittest.TestCase):
 
     def setUp(self) -> None:
         # make sure we're allowed to schedule
-        setting = Setting.objects.get(name='allow_scheduling_observations')
+        setting = Setting.objects.get(name='dynamic_scheduling_enabled')
         setting.value = True
         setting.save()
 
     def test_schedule_observation_subtask_creates_sap_with_correct_pointing(self):
         with tmss_test_env.create_tmss_client() as client:
+            task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url'])
+            task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/')
             subtask_template = client.get_subtask_template("observation control")
             spec = get_default_json_object_for_schema(subtask_template['schema'])
             spec['stations']['digital_pointings'][0]['subbands'] = [0]
@@ -357,6 +370,7 @@ class SAPTest(unittest.TestCase):
             subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'],
                                                      specifications_doc=spec,
                                                      cluster_url = cluster_url,
+                                                     task_blueprint_url=task_blueprint['url'],
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
                                                      stop_time=datetime.utcnow() + timedelta(minutes=15))
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
@@ -364,15 +378,17 @@ class SAPTest(unittest.TestCase):
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']),
                                                     '/subtask_output/')
 
-            sap_count_before_scheduling = models.SAP.objects.count()
+            subtask_model = models.Subtask.objects.get(id=subtask_id)
+            self.assertEqual(0, subtask_model.output_dataproducts.values('sap').count())
+
             client.set_subtask_status(subtask_id, 'defined')
             subtask = client.schedule_subtask(subtask_id)
 
-            self.assertGreater(models.SAP.objects.count(), sap_count_before_scheduling)
+            self.assertEqual(1, subtask_model.output_dataproducts.values('sap').count())
             self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle1'], pointing['angle1'])
             self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle2'], pointing['angle2'])
 
-    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources")
+    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources")
     def test_schedule_pipeline_subtask_copies_sap_from_input_to_output(self, assign_resources_mock):
         # setup:
         #   create observation subtask and outputs and dataproducts
diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py
index b9021a86f94d25f5fcccd620daf7705c07c8d88e..0cdb95de14d749d73d32ff03728e0daacb5ce79f 100755
--- a/SAS/TMSS/test/t_subtasks.py
+++ b/SAS/TMSS/test/t_subtasks.py
@@ -301,7 +301,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase):
 
     def setUp(self) -> None:
         # make sure we're allowed to schedule
-        setting = Setting.objects.get(name='allow_scheduling_observations')
+        setting = Setting.objects.get(name='dynamic_scheduling_enabled')
         setting.value = True
         setting.save()
 
@@ -371,7 +371,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase):
 class SettingTest(unittest.TestCase):
 
     def test_schedule_observation_subtask_raises_when_flag_is_false(self):
-        setting = Setting.objects.get(name='allow_scheduling_observations')
+        setting = Setting.objects.get(name='dynamic_scheduling_enabled')
         setting.value = False
         setting.save()
         obs_st = create_subtask_object_for_testing('observation', 'defined')
diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py
index 7d559bb9800d4ad3112d49df59d3aa3094fec86a..1029deb3474ce830e83f3d8d0a26f07c9bf3620f 100644
--- a/SAS/TMSS/test/test_utils.py
+++ b/SAS/TMSS/test/test_utils.py
@@ -270,7 +270,9 @@ class TMSSTestEnvironment:
     def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, public_host: str=None,
                  exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER),
                  populate_schemas:bool=False, populate_test_data:bool=False,
-                 start_postgres_listener: bool=True):
+                 start_ra_test_environment: bool=False, start_postgres_listener: bool=False,
+                 start_subtask_scheduler: bool=False, start_dynamic_scheduler: bool=False,
+                 start_pipeline_control: bool=False, enable_viewflow: bool=False):
         self._exchange = exchange
         self._broker = broker
         self._populate_schemas = populate_schemas
@@ -284,9 +286,25 @@ class TMSSTestEnvironment:
                                                       public_host=public_host)
         self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user,
                                                        password=self.ldap_server.dbcreds.password)
+
+        self._start_ra_test_environment = start_ra_test_environment
+        self.ra_test_environment = None
+
         self._start_postgres_listener = start_postgres_listener
         self.postgres_listener = None
 
+        self._start_subtask_scheduler = start_subtask_scheduler
+        self.subtask_scheduler = None
+
+        self._start_dynamic_scheduler = start_dynamic_scheduler
+        self.dynamic_scheduler = None
+
+        self._start_pipeline_control = start_pipeline_control
+        self.pipeline_control = None
+
+        if enable_viewflow:
+            os.environ['TMSS_ENABLE_VIEWFLOW'] = 'True'
+
         # Check for correct Django version, should be at least 3.0
         if django.VERSION[0] < 3:
             print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" %
@@ -318,12 +336,36 @@ class TMSSTestEnvironment:
         user.is_superuser = True
         user.save()
 
+        if self._start_ra_test_environment:
+            self.ra_test_environment = RATestEnvironment(exchange=self._exchange, broker=self._broker)
+            self.ra_test_environment.start()
+
         if self._start_postgres_listener:
             # start the TMSSPGListener, so the changes in the database are posted as EventMessages on the bus
             from lofar.sas.tmss.services.tmss_postgres_listener import TMSSPGListener
             self.postgres_listener = TMSSPGListener(exchange=self._exchange, broker=self._broker, dbcreds=self.database.dbcreds)
             self.postgres_listener.start()
 
+        if self._start_subtask_scheduler:
+            from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service
+            self.subtask_scheduler = create_subtask_scheduling_service(exchange=self._exchange, broker=self._broker)
+            self.subtask_scheduler.start_listening()
+
+        if self._start_dynamic_scheduler:
+            from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service, models
+            # by default, dynamic scheduling is disabled in TMSS.
+            # In this test environment, we do want to have it enabled. Why else would we wanna start this service?
+            setting = models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value)
+            setting.value = True
+            setting.save()
+            self.dynamic_scheduler = create_dynamic_scheduling_service(exchange=self._exchange, broker=self._broker)
+            self.dynamic_scheduler.start_listening()
+
+        if self._start_pipeline_control:
+            from lofar.mac.PipelineControl import PipelineControlTMSS
+            self.pipeline_control = PipelineControlTMSS(exchange=self._exchange, broker=self._broker)
+            self.pipeline_control.start_listening()
+
         if self._populate_schemas or self._populate_test_data:
             self.populate_schemas()
 
@@ -336,6 +378,22 @@ class TMSSTestEnvironment:
             self.postgres_listener.stop()
             self.postgres_listener = None
 
+        if self.subtask_scheduler is not None:
+            self.subtask_scheduler.stop_listening()
+            self.subtask_scheduler = None
+
+        if self.dynamic_scheduler is not None:
+            self.dynamic_scheduler.stop_listening()
+            self.dynamic_scheduler = None
+
+        if self.pipeline_control is not None:
+            self.pipeline_control.stop_listening()
+            self.pipeline_control = None
+
+        if self.ra_test_environment is not None:
+            self.ra_test_environment.stop()
+            self.ra_test_environment = None
+
         self.django_server.stop()
         self.ldap_server.stop()
         self.database.destroy()
@@ -406,9 +464,15 @@ def main_test_environment():
     group.add_option("-P", "--public_host", dest="public_host", type="string", default='127.0.0.1',
                       help="expose the TMSS Django REST API via this host. [default=%default]")
 
-    group = OptionGroup(parser, 'Example/Test data')
+    group = OptionGroup(parser, 'Example/Test data, schemas and services',
+                        description='Options to enable/create example/test data, schemas and services. ' \
+                                    'Without these options you get a lean and mean TMSS test environment, but then you need to run the background services yourselves, and create test data yourself. ' \
+                                    'For standalone commissioning/testing/playing around you need all these options.')
     parser.add_option_group(group)
     group.add_option('-d', '--data', dest='data', action='store_true', help='populate the test-database with test/example data')
+    group.add_option('-s', '--schemas', dest='schemas', action='store_true', help='populate the test-database with the TMSS JSON schemas')
+    group.add_option('-S', '--services', dest='services', action='store_true', help='start the TMSS background services.')
+    group.add_option('-v', '--viewflow', dest='viewflow', action='store_true', help='Enable the viewflow app for workflows on top of TMSS')
 
     group = OptionGroup(parser, 'Messaging options')
     parser.add_option_group(group)
@@ -419,10 +483,12 @@ def main_test_environment():
 
     logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO)
 
-    with RATestEnvironment(exchange=options.exchange, broker=options.broker):
-        with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host,
-                                 exchange=options.exchange, broker=options.broker,
-                                 populate_schemas=True, populate_test_data=options.data) as instance:
+    with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host,
+                             exchange=options.exchange, broker=options.broker,
+                             populate_schemas=options.schemas, populate_test_data=options.data,
+                             start_ra_test_environment=options.services, start_postgres_listener=options.services,
+                             start_subtask_scheduler=options.services, start_dynamic_scheduler=options.services,
+                             start_pipeline_control=options.services, enable_viewflow=options.viewflow) as tmss_test_env:
 
             # print some nice info for the user to use the test servers...
             # use print instead of log for clean lines.
@@ -433,19 +499,20 @@ def main_test_environment():
             print("*****************************************************")
             print("Test-TMSS database, LDAP and Django up and running...")
             print("*****************************************************")
-            print("DB Credentials ID: %s" % (instance.database.dbcreds_id, ))
-            print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, ))
-            print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, ))
-            print("Django URL: %s" % (instance.django_server.url))
+            print("DB Credentials ID: %s" % (tmss_test_env.database.dbcreds_id, ))
+            print("LDAP Credentials ID: %s" % (tmss_test_env.django_server.ldap_dbcreds_id, ))
+            print("TMSS Client Credentials ID: %s" % (tmss_test_env.client_credentials.dbcreds_id, ))
+            print("Django URL: %s" % (tmss_test_env.django_server.url))
             print()
             print("Example cmdlines to run tmss or tmss_manage_django:")
-            print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id))
-            print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id))
+            print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id))
+            print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id))
             print()
             print("Example cmdline to run tmss client call:")
-            print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, ))
+            print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (tmss_test_env.client_credentials.dbcreds_id, ))
             print()
             print("Press Ctrl-C to exit (and remove the test database and django server automatically)")
+
             waitForInterrupt()
 
 
diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json
index 70f8b97d95e9c2c830bcc42092bcf0144a506f9e..2596021102cda14054c339f651d9b7c0c0eb7a55 100644
--- a/SAS/TMSS/test/testdata/subtasks.json
+++ b/SAS/TMSS/test/testdata/subtasks.json
@@ -31,11 +31,9 @@
             "stop_time": "2020-01-02T12:00:00",
             "specifications_doc": 1,
             "do_cancel": null,
-            "priority": 1,
             "state": "defined",
             "task_blueprint": null,
             "specifications_template": 1,
-            "schedule_method": "manual",
             "cluster": 2,
             "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
             "created_at": "2020-02-24T13:19:57",
@@ -50,11 +48,9 @@
             "stop_time": "2020-01-03T12:00:00",
             "specifications_doc": 1,
             "do_cancel": null,
-            "priority": 1,
             "state": "defined",
             "task_blueprint": null,
             "specifications_template": 1,
-            "schedule_method": "manual",
             "cluster": 3,
             "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
             "created_at": "2020-02-24T13:19:57",
@@ -69,11 +65,9 @@
             "stop_time": "2020-01-04T12:00:00",
             "specifications_doc": 1,
             "do_cancel": null,
-            "priority": 1,
             "state": "defined",
             "task_blueprint": null,
             "specifications_template": 1,
-            "schedule_method": "manual",
             "cluster": 1,
             "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
             "created_at": "2020-02-24T13:19:57",
diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py
index f58583962a1887ddc6e3e6e136351ede386ba255..5edc2d0b9a87be9a108937fb0467fdff1476860d 100644
--- a/SAS/TMSS/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/test/tmss_test_data_django_models.py
@@ -118,12 +118,15 @@ def Cycle_test_data() -> dict:
             "start": datetime.utcnow().isoformat(),
             "stop": datetime.utcnow().isoformat()}
 
-def Project_test_data(archive_subdirectory="my_project/") -> dict:
+def Project_test_data(name: str=None, priority_rank: int = 1, archive_subdirectory="my_project/") -> dict:
+    if name is None:
+        name = 'my_project_' + str(uuid.uuid4())
+
     return  { #"cycles": [models.Cycle.objects.create(**Cycle_test_data())], # ManyToMany, use set()
-              "name": 'my_project_' + str(uuid.uuid4()),
+              "name": name,
                "description": 'my description ' + str(uuid.uuid4()),
                "tags": [],
-               "priority_rank": 1.0,
+               "priority_rank": priority_rank,
                "trigger_priority": 1000,
                "can_trigger": False,
                "private_data": True,
@@ -234,20 +237,26 @@ def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint', requi
             "do_cancel": False,
             "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) }
 
-def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None) -> dict:
+def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None, specifications_template: models.TaskTemplate=None, specifications_doc: dict=None) -> dict:
     if task_draft is None:
         task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data())
 
+    if specifications_template is None:
+        specifications_template = task_draft.specifications_template
+
+    if specifications_doc is None:
+        specifications_doc = get_default_json_object_for_schema(specifications_template.schema)
+
     if scheduling_unit_blueprint is None:
         scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data())
 
     return {"name": name,
             "description": "",
             "tags": [],
-            "specifications_doc": task_draft.specifications_doc,
+            "specifications_doc": specifications_doc,
             "do_cancel": False,
             "draft": task_draft,
-            "specifications_template": task_draft.specifications_template,
+            "specifications_template": specifications_template,
             "scheduling_unit_blueprint": scheduling_unit_blueprint}
 
 def TaskRelationBlueprint_test_data(producer: models.TaskBlueprint = None, consumer: models.TaskBlueprint = None) -> dict:
@@ -375,8 +384,6 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat
              "specifications_template": subtask_template,
              "tags": ["TMSS", "TESTING"],
              "do_cancel": datetime.utcnow(),
-             "priority": 1,
-             "schedule_method": models.ScheduleMethod.objects.get(value='manual'),
              "cluster": cluster,
              "raw_feedback": raw_feedback}
 
diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py
index 82f35cf01ae41d98230365c02cc85fbdc0ec8908..1a16d480f10c74cd783b3ea88d39fd363b1c2cfc 100644
--- a/SAS/TMSS/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/test/tmss_test_data_rest.py
@@ -298,8 +298,8 @@ class TMSSRESTTestDataCreator():
                 'task_blueprints': [],
                 'produced_by': [],
                 'consumed_by': [],
-                'first_to_connect': [],
-                'second_to_connect': []}
+                'first_scheduling_relation': [],
+                'second_scheduling_relation': []}
 
 
     def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_role_url=None, output_role_url=None, selection_doc=None):
@@ -380,8 +380,8 @@ class TMSSRESTTestDataCreator():
                 "subtasks": [],
                 "produced_by": [],
                 "consumed_by": [],
-                'first_to_connect': [],
-                'second_to_connect': []}
+                'first_scheduling_relation': [],
+                'second_scheduling_relation': []}
 
     def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_role_url=None, output_role_url=None, consumer_url=None, producer_url=None, selection_doc=None):
         if draft_url is None:
@@ -517,8 +517,6 @@ class TMSSRESTTestDataCreator():
                 "specifications_template": specifications_template_url,
                 "tags": ["TMSS", "TESTING"],
                 "do_cancel": datetime.utcnow().isoformat(),
-                "priority": 1,
-                "schedule_method": self.django_api_url + '/schedule_method/manual',
                 "cluster": cluster_url,
                 "raw_feedback": raw_feedack}
     
diff --git a/SubSystems/RAServices/CMakeLists.txt b/SubSystems/RAServices/CMakeLists.txt
index fba2f3ff4837f061ce7251daaeae624ee7bddee6..43896bd6785b73d1aa7f65bb64aa004ad5f6abb8 100644
--- a/SubSystems/RAServices/CMakeLists.txt
+++ b/SubSystems/RAServices/CMakeLists.txt
@@ -27,7 +27,7 @@ lofar_package(RAServices
                         ltastorageoverview
                         QA_Service
                         MessageLogger
-                        TMSSSubtaskSchedulingService)
+                        TMSSSchedulingService)
 
 # supervisord config files
 lofar_add_sysconf_files(RAServices.ini